#include "objc-private.h"
#include "objc-runtime-new.h"
#include "objc-file.h"
+#include "objc-cache.h"
+#include <Block.h>
#include <objc/message.h>
#include <mach/shared_region.h>
-#define newcls(cls) ((class_t *)cls)
-#define newmethod(meth) ((method_t *)meth)
-#define newivar(ivar) ((ivar_t *)ivar)
-#define newcategory(cat) ((category_t *)cat)
#define newprotocol(p) ((protocol_t *)p)
-#define newproperty(p) ((property_t *)p)
-
-static const char *getName(class_t *cls);
-static uint32_t unalignedInstanceSize(class_t *cls);
-static uint32_t alignedInstanceSize(class_t *cls);
-static BOOL isMetaClass(class_t *cls);
-static class_t *getSuperclass(class_t *cls);
-static void unload_class(class_t *cls, BOOL isMeta);
-static class_t *setSuperclass(class_t *cls, class_t *newSuper);
-static class_t *realizeClass(class_t *cls);
-static void flushCaches(class_t *cls);
-static void flushVtables(class_t *cls);
-static method_t *getMethodNoSuper_nolock(class_t *cls, SEL sel);
-static method_t *getMethod_nolock(class_t *cls, SEL sel);
-static void changeInfo(class_t *cls, unsigned int set, unsigned int clear);
+
+static void disableTaggedPointers();
+static void detach_class(Class cls, BOOL isMeta);
+static void free_class(Class cls);
+static Class setSuperclass(Class cls, Class newSuper);
+static Class realizeClass(Class cls);
+static method_t *getMethodNoSuper_nolock(Class cls, SEL sel);
+static method_t *getMethod_nolock(Class cls, SEL sel);
static IMP _method_getImplementation(method_t *m);
-static BOOL hasCxxStructors(class_t *cls);
-static IMP addMethod(class_t *cls, SEL name, IMP imp, const char *types, BOOL replace);
+static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, BOOL replace);
static NXHashTable *realizedClasses(void);
-static BOOL isRRSelector(SEL sel);
-
-PRIVATE_EXTERN id objc_noop_imp(id self, SEL _cmd __unused) {
- return self;
-}
-
-/***********************************************************************
-* Lock management
-* Every lock used anywhere must be managed here.
-* Locks not managed here may cause gdb deadlocks.
-**********************************************************************/
-PRIVATE_EXTERN rwlock_t runtimeLock = {0};
-PRIVATE_EXTERN rwlock_t selLock = {0};
-PRIVATE_EXTERN mutex_t cacheUpdateLock = MUTEX_INITIALIZER;
-PRIVATE_EXTERN recursive_mutex_t loadMethodLock = RECURSIVE_MUTEX_INITIALIZER;
-static int debugger_runtimeLock;
-static int debugger_selLock;
-static int debugger_cacheUpdateLock;
-static int debugger_loadMethodLock;
-#define RDONLY 1
-#define RDWR 2
-
-PRIVATE_EXTERN void lock_init(void)
-{
- rwlock_init(&selLock);
- rwlock_init(&runtimeLock);
- recursive_mutex_init(&loadMethodLock);
-}
-
-
-/***********************************************************************
-* startDebuggerMode
-* Attempt to acquire some locks for debugger mode.
-* Returns 0 if debugger mode failed because too many locks are unavailable.
-*
-* Locks successfully acquired are held until endDebuggerMode().
-* Locks not acquired are off-limits until endDebuggerMode(); any
-* attempt to manipulate them will cause a trap.
-* Locks not handled here may cause deadlocks in gdb.
-**********************************************************************/
-PRIVATE_EXTERN int startDebuggerMode(void)
-{
- int result = DEBUGGER_FULL;
-
- // runtimeLock is required (can't do much without it)
- if (rwlock_try_write(&runtimeLock)) {
- debugger_runtimeLock = RDWR;
- } else if (rwlock_try_read(&runtimeLock)) {
- debugger_runtimeLock = RDONLY;
- result = DEBUGGER_PARTIAL;
- } else {
- return DEBUGGER_OFF;
- }
-
- // cacheUpdateLock is required (must not fail a necessary cache flush)
- // must be AFTER runtimeLock to avoid lock inversion
- if (mutex_try_lock(&cacheUpdateLock)) {
- debugger_cacheUpdateLock = RDWR;
- } else {
- rwlock_unlock(&runtimeLock, debugger_runtimeLock);
- debugger_runtimeLock = 0;
- return DEBUGGER_OFF;
- }
+static bool isRRSelector(SEL sel);
+static bool isAWZSelector(SEL sel);
+static bool methodListImplementsRR(const method_list_t *mlist);
+static bool methodListImplementsAWZ(const method_list_t *mlist);
+static void updateCustomRR_AWZ(Class cls, method_t *meth);
+static method_t *search_method_list(const method_list_t *mlist, SEL sel);
+#if SUPPORT_FIXUP
+static void fixupMessageRef(message_ref_t *msg);
+#endif
- // selLock is optional
- if (rwlock_try_write(&selLock)) {
- debugger_selLock = RDWR;
- } else if (rwlock_try_read(&selLock)) {
- debugger_selLock = RDONLY;
- result = DEBUGGER_PARTIAL;
- } else {
- debugger_selLock = 0;
- result = DEBUGGER_PARTIAL;
- }
+static bool MetaclassNSObjectAWZSwizzled;
+static bool ClassNSObjectRRSwizzled;
- // loadMethodLock is optional
- if (recursive_mutex_try_lock(&loadMethodLock)) {
- debugger_loadMethodLock = RDWR;
- } else {
- debugger_loadMethodLock = 0;
- result = DEBUGGER_PARTIAL;
- }
- return result;
+id objc_noop_imp(id self, SEL _cmd __unused) {
+ return self;
}
-/***********************************************************************
-* endDebuggerMode
-* Relinquish locks acquired in startDebuggerMode().
-**********************************************************************/
-PRIVATE_EXTERN void endDebuggerMode(void)
-{
- assert(debugger_runtimeLock != 0);
-
- rwlock_unlock(&runtimeLock, debugger_runtimeLock);
- debugger_runtimeLock = 0;
-
- rwlock_unlock(&selLock, debugger_selLock);
- debugger_selLock = 0;
-
- assert(debugger_cacheUpdateLock == RDWR);
- mutex_unlock(&cacheUpdateLock);
- debugger_cacheUpdateLock = 0;
-
- if (debugger_loadMethodLock) {
- recursive_mutex_unlock(&loadMethodLock);
- debugger_loadMethodLock = 0;
- }
-}
/***********************************************************************
-* isManagedDuringDebugger
-* Returns YES if the given lock is handled specially during debugger
-* mode (i.e. debugger mode tries to acquire it).
+* Lock management
**********************************************************************/
-PRIVATE_EXTERN BOOL isManagedDuringDebugger(void *lock)
-{
- if (lock == &selLock) return YES;
- if (lock == &cacheUpdateLock) return YES;
- if (lock == &runtimeLock) return YES;
- if (lock == &loadMethodLock) return YES;
- return NO;
-}
+rwlock_t runtimeLock;
+rwlock_t selLock;
+mutex_t cacheUpdateLock = MUTEX_INITIALIZER;
+recursive_mutex_t loadMethodLock = RECURSIVE_MUTEX_INITIALIZER;
-/***********************************************************************
-* isLockedDuringDebugger
-* Returns YES if the given mutex was acquired by debugger mode.
-* Locking a managed mutex during debugger mode causes a trap unless
-* this returns YES.
-**********************************************************************/
-PRIVATE_EXTERN BOOL isLockedDuringDebugger(mutex_t *lock)
+void lock_init(void)
{
- assert(DebuggerMode);
-
- if (lock == &cacheUpdateLock) return YES;
- if (lock == (mutex_t *)&loadMethodLock) return YES;
-
- return NO;
+ rwlock_init(&selLock);
+ rwlock_init(&runtimeLock);
+ recursive_mutex_init(&loadMethodLock);
}
-/***********************************************************************
-* isReadingDuringDebugger
-* Returns YES if the given rwlock was read-locked by debugger mode.
-* Read-locking a managed rwlock during debugger mode causes a trap unless
-* this returns YES.
-**********************************************************************/
-PRIVATE_EXTERN BOOL isReadingDuringDebugger(rwlock_t *lock)
-{
- assert(DebuggerMode);
-
- // read-lock is allowed even if debugger mode actually write-locked it
- if (debugger_runtimeLock && lock == &runtimeLock) return YES;
- if (debugger_selLock && lock == &selLock) return YES;
-
- return NO;
-}
/***********************************************************************
-* isWritingDuringDebugger
-* Returns YES if the given rwlock was write-locked by debugger mode.
-* Write-locking a managed rwlock during debugger mode causes a trap unless
-* this returns YES.
+* Non-pointer isa decoding
**********************************************************************/
-PRIVATE_EXTERN BOOL isWritingDuringDebugger(rwlock_t *lock)
-{
- assert(DebuggerMode);
-
- if (debugger_runtimeLock == RDWR && lock == &runtimeLock) return YES;
- if (debugger_selLock == RDWR && lock == &selLock) return YES;
-
- return NO;
-}
-
-
-/***********************************************************************
-* vtable dispatch
-*
-* Every class gets a vtable pointer. The vtable is an array of IMPs.
-* The selectors represented in the vtable are the same for all classes
-* (i.e. no class has a bigger or smaller vtable).
-* Each vtable index has an associated trampoline which dispatches to
-* the IMP at that index for the receiver class's vtable (after
-* checking for NULL). Dispatch fixup uses these trampolines instead
-* of objc_msgSend.
-* Fragility: The vtable size and list of selectors is chosen at launch
-* time. No compiler-generated code depends on any particular vtable
-* configuration, or even the use of vtable dispatch at all.
-* Memory size: If a class's vtable is identical to its superclass's
-* (i.e. the class overrides none of the vtable selectors), then
-* the class points directly to its superclass's vtable. This means
-* selectors to be included in the vtable should be chosen so they are
-* (1) frequently called, but (2) not too frequently overridden. In
-* particular, -dealloc is a bad choice.
-* Forwarding: If a class doesn't implement some vtable selector, that
-* selector's IMP is set to objc_msgSend in that class's vtable.
-* +initialize: Each class keeps the default vtable (which always
-* redirects to objc_msgSend) until its +initialize is completed.
-* Otherwise, the first message to a class could be a vtable dispatch,
-* and the vtable trampoline doesn't include +initialize checking.
-* Changes: Categories, addMethod, and setImplementation all force vtable
-* reconstruction for the class and all of its subclasses, if the
-* vtable selectors are affected.
-**********************************************************************/
-
-/***********************************************************************
-* ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
-* vtable_prototype on x86_64 steals %rax and does not clear %rdx on return
-* This means vtable dispatch must never be used for vararg calls
-* or very large return values.
-* ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
-**********************************************************************/
-
-#define X8(x) \
- x x x x x x x x
-#define X64(x) \
- X8(x) X8(x) X8(x) X8(x) X8(x) X8(x) X8(x) X8(x)
-#define X128(x) \
- X64(x) X64(x)
-
-#define vtableMax 128
-
-// hack to avoid conflicts with compiler's internal declaration
-asm("\n .data"
- "\n .globl __objc_empty_vtable "
- "\n __objc_empty_vtable:"
-#if __LP64__
- X128("\n .quad _objc_msgSend")
-#else
- X128("\n .long _objc_msgSend")
-#endif
- );
-
-#if SUPPORT_VTABLE
+#if SUPPORT_NONPOINTER_ISA
-// Trampoline descriptors for gdb.
+const uintptr_t objc_debug_isa_class_mask = ISA_MASK;
+const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
+const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
-objc_trampoline_header *gdb_objc_trampolines = NULL;
-
-void gdb_objc_trampolines_changed(objc_trampoline_header *thdr) __attribute__((noinline));
-void gdb_objc_trampolines_changed(objc_trampoline_header *thdr)
-{
- rwlock_assert_writing(&runtimeLock);
- assert(thdr == gdb_objc_trampolines);
+// die if masks overlap
+STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
- if (PrintVtables) {
- _objc_inform("VTABLES: gdb_objc_trampolines_changed(%p)", thdr);
- }
-}
+// die if magic is wrong
+STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
-// fixme workaround for rdar://6667753
-static void appendTrampolines(objc_trampoline_header *thdr) __attribute__((noinline));
+// die if virtual address space bound goes up
+STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0);
-static void appendTrampolines(objc_trampoline_header *thdr)
-{
- rwlock_assert_writing(&runtimeLock);
- assert(thdr->next == NULL);
-
- if (gdb_objc_trampolines != thdr->next) {
- thdr->next = gdb_objc_trampolines;
- }
- gdb_objc_trampolines = thdr;
-
- gdb_objc_trampolines_changed(thdr);
-}
-
-// Vtable management.
-
-static size_t vtableStrlen;
-static size_t vtableCount;
-static SEL *vtableSelectors;
-static IMP *vtableTrampolines;
-static const char * const defaultVtable[] = {
- "allocWithZone:",
- "alloc",
- "class",
- "self",
- "isKindOfClass:",
- "respondsToSelector:",
- "isFlipped",
- "length",
- "objectForKey:",
- "count",
- "objectAtIndex:",
- "isEqualToString:",
- "isEqual:",
- "retain",
- "release",
- "autorelease",
-};
-static const char * const defaultVtableGC[] = {
- "allocWithZone:",
- "alloc",
- "class",
- "self",
- "isKindOfClass:",
- "respondsToSelector:",
- "isFlipped",
- "length",
- "objectForKey:",
- "count",
- "objectAtIndex:",
- "isEqualToString:",
- "isEqual:",
- "hash",
- "addObject:",
- "countByEnumeratingWithState:objects:count:",
-};
-
-OBJC_EXTERN id objc_msgSend_vtable0(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable1(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable2(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable3(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable4(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable5(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable6(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable7(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable8(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable9(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable10(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable11(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable12(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable13(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable14(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_vtable15(id, SEL, ...);
-
-static IMP const defaultVtableTrampolines[] = {
- objc_msgSend_vtable0,
- objc_msgSend_vtable1,
- objc_msgSend_vtable2,
- objc_msgSend_vtable3,
- objc_msgSend_vtable4,
- objc_msgSend_vtable5,
- objc_msgSend_vtable6,
- objc_msgSend_vtable7,
- objc_msgSend_vtable8,
- objc_msgSend_vtable9,
- objc_msgSend_vtable10,
- objc_msgSend_vtable11,
- objc_msgSend_vtable12,
- objc_msgSend_vtable13,
- objc_msgSend_vtable14,
- objc_msgSend_vtable15,
-};
-extern objc_trampoline_header defaultVtableTrampolineDescriptors;
-
-static void check_vtable_size(void) __unused;
-static void check_vtable_size(void)
-{
- // Fail to compile if vtable sizes don't match.
- int c1[sizeof(defaultVtableTrampolines)-sizeof(defaultVtable)] __unused;
- int c2[sizeof(defaultVtable)-sizeof(defaultVtableTrampolines)] __unused;
- int c3[sizeof(defaultVtableTrampolines)-sizeof(defaultVtableGC)] __unused;
- int c4[sizeof(defaultVtableGC)-sizeof(defaultVtableTrampolines)] __unused;
-
- // Fail to compile if vtableMax is too small
- int c5[vtableMax - sizeof(defaultVtable)] __unused;
- int c6[vtableMax - sizeof(defaultVtableGC)] __unused;
-}
-
-
-extern uint8_t vtable_prototype;
-extern uint8_t vtable_ignored;
-extern int vtable_prototype_size;
-extern int vtable_prototype_index_offset;
-extern int vtable_prototype_index2_offset;
-extern int vtable_prototype_tagtable_offset;
-extern int vtable_prototype_tagtable_size;
-static size_t makeVtableTrampoline(uint8_t *dst, size_t index)
-{
- // copy boilerplate
- memcpy(dst, &vtable_prototype, vtable_prototype_size);
-
- // insert indexes
-#if defined(__x86_64__)
- if (index > 255) _objc_fatal("vtable_prototype busted");
- {
- // `jmpq *0x7fff(%rax)` ff a0 ff 7f
- uint16_t *p = (uint16_t *)(dst + vtable_prototype_index_offset + 2);
- if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
- *p = index * 8;
- }
- {
- uint16_t *p = (uint16_t *)(dst + vtable_prototype_index2_offset + 2);
- if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
- *p = index * 8;
- }
-#else
-# warning unknown architecture
-#endif
-
- // insert tagged isa table
-#if defined(__x86_64__)
- {
- // `movq $0x1122334455667788, %r10` 49 ba 88 77 66 55 44 33 22 11
- if (vtable_prototype_tagtable_size != 10) {
- _objc_fatal("vtable_prototype busted");
- }
- uint8_t *p = (uint8_t *)(dst + vtable_prototype_tagtable_offset);
- if (*p++ != 0x49) _objc_fatal("vtable_prototype busted");
- if (*p++ != 0xba) _objc_fatal("vtable_prototype busted");
- if (*(uintptr_t *)p != 0x1122334455667788) {
- _objc_fatal("vtable_prototype busted");
- }
- uintptr_t addr = (uintptr_t)_objc_tagged_isa_table;
- memcpy(p, &addr, sizeof(addr));
- }
#else
-# warning unknown architecture
-#endif
-
- return vtable_prototype_size;
-}
-
-
-static void initVtables(void)
-{
- if (DisableVtables) {
- if (PrintVtables) {
- _objc_inform("VTABLES: vtable dispatch disabled by OBJC_DISABLE_VTABLES");
- }
- vtableCount = 0;
- vtableSelectors = NULL;
- vtableTrampolines = NULL;
- return;
- }
-
- const char * const *names;
- size_t i;
-
- if (UseGC) {
- names = defaultVtableGC;
- vtableCount = sizeof(defaultVtableGC) / sizeof(defaultVtableGC[0]);
- } else {
- names = defaultVtable;
- vtableCount = sizeof(defaultVtable) / sizeof(defaultVtable[0]);
- }
- if (vtableCount > vtableMax) vtableCount = vtableMax;
-
- vtableSelectors = (SEL*)_malloc_internal(vtableCount * sizeof(SEL));
- vtableTrampolines = (IMP*)_malloc_internal(vtableCount * sizeof(IMP));
- // Built-in trampolines and their descriptors
+// These variables exist but enforce pointer alignment only.
+const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
+const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
+const uintptr_t objc_debug_isa_magic_value = 0;
- size_t defaultVtableTrampolineCount =
- sizeof(defaultVtableTrampolines) / sizeof(defaultVtableTrampolines[0]);
-#ifndef NDEBUG
- // debug: use generated code for 3/4 of the table
- // Disabled even in Debug builds to avoid breaking backtrace symbol names.
- // defaultVtableTrampolineCount /= 4;
#endif
- for (i = 0; i < defaultVtableTrampolineCount && i < vtableCount; i++) {
- vtableSelectors[i] = sel_registerName(names[i]);
- vtableTrampolines[i] = defaultVtableTrampolines[i];
- }
- appendTrampolines(&defaultVtableTrampolineDescriptors);
-
-
- // Generated trampolines and their descriptors
-
- if (vtableCount > defaultVtableTrampolineCount) {
- // Memory for trampoline code
- size_t generatedCount =
- vtableCount - defaultVtableTrampolineCount;
-
- const int align = 16;
- size_t codeSize =
- round_page(sizeof(objc_trampoline_header) + align +
- generatedCount * (sizeof(objc_trampoline_descriptor)
- + vtable_prototype_size + align));
- void *codeAddr = mmap(0, codeSize, PROT_READ|PROT_WRITE,
- MAP_PRIVATE|MAP_ANON,
- VM_MAKE_TAG(VM_MEMORY_OBJC_DISPATCHERS), 0);
- uint8_t *t = (uint8_t *)codeAddr;
-
- // Trampoline header
- objc_trampoline_header *thdr = (objc_trampoline_header *)t;
- thdr->headerSize = sizeof(objc_trampoline_header);
- thdr->descSize = sizeof(objc_trampoline_descriptor);
- thdr->descCount = (uint32_t)generatedCount;
- thdr->next = NULL;
-
- // Trampoline descriptors
- objc_trampoline_descriptor *tdesc = (objc_trampoline_descriptor *)(thdr+1);
- t = (uint8_t *)&tdesc[generatedCount];
- t += align - ((uintptr_t)t % align);
-
- // Dispatch code
- size_t tdi;
- for (i = defaultVtableTrampolineCount, tdi = 0;
- i < vtableCount;
- i++, tdi++)
- {
- vtableSelectors[i] = sel_registerName(names[i]);
- if (ignoreSelector(vtableSelectors[i])) {
- vtableTrampolines[i] = (IMP)&vtable_ignored;
- tdesc[tdi].offset = 0;
- tdesc[tdi].flags = 0;
- } else {
- vtableTrampolines[i] = (IMP)t;
- tdesc[tdi].offset =
- (uint32_t)((uintptr_t)t - (uintptr_t)&tdesc[tdi]);
- tdesc[tdi].flags =
- OBJC_TRAMPOLINE_MESSAGE|OBJC_TRAMPOLINE_VTABLE;
-
- t += makeVtableTrampoline(t, i);
- t += align - ((uintptr_t)t % align);
- }
- }
-
- appendTrampolines(thdr);
- sys_icache_invalidate(codeAddr, codeSize);
- mprotect(codeAddr, codeSize, PROT_READ|PROT_EXEC);
- }
-
-
- if (PrintVtables) {
- for (i = 0; i < vtableCount; i++) {
- _objc_inform("VTABLES: vtable[%zu] %p %s",
- i, vtableTrampolines[i],
- sel_getName(vtableSelectors[i]));
- }
- }
-
- if (PrintVtableImages) {
- _objc_inform("VTABLE IMAGES: '#' implemented by class");
- _objc_inform("VTABLE IMAGES: '-' inherited from superclass");
- _objc_inform("VTABLE IMAGES: ' ' not implemented");
- for (i = 0; i <= vtableCount; i++) {
- char spaces[vtableCount+1+1];
- size_t j;
- for (j = 0; j < i; j++) {
- spaces[j] = '|';
- }
- spaces[j] = '\0';
- _objc_inform("VTABLE IMAGES: %s%s", spaces,
- i<vtableCount ? sel_getName(vtableSelectors[i]) : "");
- }
- }
-
- if (PrintVtables || PrintVtableImages) {
- vtableStrlen = 0;
- for (i = 0; i < vtableCount; i++) {
- vtableStrlen += strlen(sel_getName(vtableSelectors[i]));
- }
- }
-}
-
-
-static int vtable_getIndex(SEL sel)
-{
- unsigned int i;
- for (i = 0; i < vtableCount; i++) {
- if (vtableSelectors[i] == sel) return i;
- }
- return -1;
-}
-
-static BOOL vtable_containsSelector(SEL sel)
-{
- return (vtable_getIndex(sel) < 0) ? NO : YES;
-}
-
-static void printVtableOverrides(class_t *cls, class_t *supercls)
-{
- char overrideMap[vtableCount+1];
- unsigned int i;
-
- if (supercls) {
- size_t overridesBufferSize = vtableStrlen + 2*vtableCount + 1;
- char *overrides =
- (char *)_calloc_internal(overridesBufferSize, 1);
- for (i = 0; i < vtableCount; i++) {
- if (ignoreSelector(vtableSelectors[i])) {
- overrideMap[i] = '-';
- continue;
- }
- if (getMethodNoSuper_nolock(cls, vtableSelectors[i])) {
- strlcat(overrides, sel_getName(vtableSelectors[i]), overridesBufferSize);
- strlcat(overrides, ", ", overridesBufferSize);
- overrideMap[i] = '#';
- } else if (getMethod_nolock(cls, vtableSelectors[i])) {
- overrideMap[i] = '-';
- } else {
- overrideMap[i] = ' ';
- }
- }
- if (PrintVtables) {
- _objc_inform("VTABLES: %s%s implements %s",
- getName(cls), isMetaClass(cls) ? "(meta)" : "",
- overrides);
- }
- _free_internal(overrides);
- }
- else {
- for (i = 0; i < vtableCount; i++) {
- overrideMap[i] = '#';
- }
- }
-
- if (PrintVtableImages) {
- overrideMap[vtableCount] = '\0';
- _objc_inform("VTABLE IMAGES: %s %s%s", overrideMap,
- getName(cls), isMetaClass(cls) ? "(meta)" : "");
- }
-}
-
-/***********************************************************************
-* updateVtable
-* Rebuilds vtable for cls, using superclass's vtable if appropriate.
-* Assumes superclass's vtable is up to date.
-* Does nothing to subclass vtables.
-* Locking: runtimeLock must be held by the caller.
-**********************************************************************/
-static void updateVtable(class_t *cls, BOOL force)
-{
- rwlock_assert_writing(&runtimeLock);
-
- // Keep default vtable until +initialize is complete.
- // Default vtable redirects to objc_msgSend, which
- // enforces +initialize locking.
- if (!force && !_class_isInitialized((Class)cls)) {
- /*
- if (PrintVtables) {
- _objc_inform("VTABLES: KEEPING DEFAULT vtable for "
- "uninitialized class %s%s",
- getName(cls), isMetaClass(cls) ? "(meta)" : "");
- }
- */
- return;
- }
-
- // Decide whether this class can share its superclass's vtable.
-
- class_t *supercls = getSuperclass(cls);
- BOOL needVtable = NO;
- unsigned int i;
- if (!supercls) {
- // Root classes always need a vtable
- needVtable = YES;
- }
- else if (cls->data()->flags & RW_SPECIALIZED_VTABLE) {
- // Once you have your own vtable, you never go back
- needVtable = YES;
- }
- else {
- for (i = 0; i < vtableCount; i++) {
- if (ignoreSelector(vtableSelectors[i])) continue;
- method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
- // assume any local implementation differs from super's
- if (m) {
- needVtable = YES;
- break;
- }
- }
- }
-
- // Build a vtable for this class, or not.
-
- if (!needVtable) {
- if (PrintVtables) {
- _objc_inform("VTABLES: USING SUPERCLASS vtable for class %s%s",
- getName(cls), isMetaClass(cls) ? "(meta)" : "");
- }
- cls->vtable = supercls->vtable;
- }
- else {
- if (PrintVtables) {
- _objc_inform("VTABLES: %s vtable for class %s%s",
- (cls->data()->flags & RW_SPECIALIZED_VTABLE) ?
- "UPDATING SPECIALIZED" : "CREATING SPECIALIZED",
- getName(cls), isMetaClass(cls) ? "(meta)" : "");
- }
- if (PrintVtables || PrintVtableImages) {
- printVtableOverrides(cls, supercls);
- }
-
- IMP *new_vtable;
- IMP *super_vtable = supercls ? supercls->vtable : &_objc_empty_vtable;
- // fixme use msgForward (instead of msgSend from empty vtable) ?
-
- if (cls->data()->flags & RW_SPECIALIZED_VTABLE) {
- // update cls->vtable in place
- new_vtable = cls->vtable;
- assert(new_vtable != &_objc_empty_vtable);
- } else {
- // make new vtable
- new_vtable = (IMP*)malloc(vtableCount * sizeof(IMP));
- changeInfo(cls, RW_SPECIALIZED_VTABLE, 0);
- }
-
- for (i = 0; i < vtableCount; i++) {
- if (ignoreSelector(vtableSelectors[i])) {
- new_vtable[i] = (IMP)&vtable_ignored;
- } else {
- method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
- if (m) new_vtable[i] = _method_getImplementation(m);
- else new_vtable[i] = super_vtable[i];
- }
- }
-
- if (cls->vtable != new_vtable) {
- // don't let other threads see uninitialized parts of new_vtable
- OSMemoryBarrier();
- cls->vtable = new_vtable;
- }
- }
-}
-
-// SUPPORT_VTABLE
-#else
-// !SUPPORT_VTABLE
-
-static void initVtables(void)
-{
- if (PrintVtables) {
- _objc_inform("VTABLES: no vtables on this architecture");
- }
-}
-
-static BOOL vtable_containsSelector(SEL sel)
-{
- return NO;
-}
-
-static void updateVtable(class_t *cls, BOOL force)
-{
-}
-
-// !SUPPORT_VTABLE
-#endif
typedef struct {
category_t *cat;
#define FOREACH_METHOD_LIST(_mlist, _cls, code) \
do { \
+ class_rw_t *_data = _cls->data(); \
const method_list_t *_mlist; \
- if (_cls->data()->methods) { \
- method_list_t **_mlistp; \
- for (_mlistp = _cls->data()->methods; *_mlistp; _mlistp++) { \
- _mlist = *_mlistp; \
+ if (_data->method_lists) { \
+ if (_data->flags & RW_METHOD_ARRAY) { \
+ method_list_t **_mlistp; \
+ for (_mlistp=_data->method_lists; _mlistp[0]; _mlistp++){ \
+ _mlist = _mlistp[0]; \
+ code \
+ } \
+ } else { \
+ _mlist = _data->method_list; \
code \
} \
} \
} while (0)
-#define FOREACH_REALIZED_CLASS_AND_SUBCLASS(_c, _cls, code) \
+
+// As above, but skips the class's base method list.
+#define FOREACH_CATEGORY_METHOD_LIST(_mlist, _cls, code) \
do { \
- rwlock_assert_writing(&runtimeLock); \
- class_t *_top = _cls; \
- class_t *_c = _top; \
- if (_c) { \
- while (1) { \
- code \
- if (_c->data()->firstSubclass) { \
- _c = _c->data()->firstSubclass; \
+ class_rw_t *_data = _cls->data(); \
+ const method_list_t *_mlist; \
+ if (_data->method_lists) { \
+ if (_data->flags & RW_METHOD_ARRAY) { \
+ if (_data->ro->baseMethods) { \
+ /* has base methods: use all mlists except the last */ \
+ method_list_t **_mlistp; \
+ for (_mlistp=_data->method_lists; _mlistp[0] && _mlistp[1]; _mlistp++){ \
+ _mlist = _mlistp[0]; \
+ code \
+ } \
} else { \
- while (!_c->data()->nextSiblingClass && _c != _top) { \
- _c = getSuperclass(_c); \
+ /* no base methods: use all mlists including the last */ \
+ method_list_t **_mlistp; \
+ for (_mlistp=_data->method_lists; _mlistp[0]; _mlistp++){ \
+ _mlist = _mlistp[0]; \
+ code \
} \
- if (_c == _top) break; \
- _c = _c->data()->nextSiblingClass; \
} \
- } \
- } else { \
- /* nil means all realized classes */ \
- NXHashTable *_classes = realizedClasses(); \
- NXHashTable *_metaclasses = realizedMetaclasses(); \
- NXHashState _state; \
- _state = NXInitHashState(_classes); \
- while (NXNextHashState(_classes, &_state, (void**)&_c)) \
- { \
- code \
- } \
- _state = NXInitHashState(_metaclasses); \
- while (NXNextHashState(_metaclasses, &_state, (void**)&_c)) \
- { \
+ } else if (!_data->ro->baseMethods) { \
+ /* no base methods: use all mlists including the last */ \
+ _mlist = _data->method_list; \
code \
} \
} \
- } while (0)
+ } while (0)
/*
Low two bits of mlist->entsize is used as the fixed-up marker.
PREOPTIMIZED VERSION:
- Fixed-up method lists get entsize&3 == 3.
- dyld shared cache sets this for method lists it preoptimizes.
+ Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
+ (Protocol method lists are not sorted because of their extra parallel data)
+ Runtime fixed-up method lists get 3.
UN-PREOPTIMIZED VERSION:
- Fixed-up method lists get entsize&3 == 1.
- dyld shared cache uses 3, but those aren't trusted.
+ Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted)
+ Shared cache's sorting and uniquing are not trusted, but do affect the
+ location of the selector name string.
+ Runtime fixed-up method lists get 2.
*/
static uint32_t fixed_up_method_list = 3;
-PRIVATE_EXTERN void
+void
disableSharedCacheOptimizations(void)
{
- fixed_up_method_list = 1;
+ fixed_up_method_list = 2;
}
-static BOOL isMethodListFixedUp(const method_list_t *mlist)
+static bool
+isMethodListFixedUp(const method_list_t *mlist)
{
return (mlist->entsize_NEVER_USE & 3) == fixed_up_method_list;
}
-static void setMethodListFixedUp(method_list_t *mlist)
+
+static const char *sel_cname(SEL sel)
+{
+ return (const char *)(void *)sel;
+}
+
+
+static void
+setMethodListFixedUp(method_list_t *mlist)
{
rwlock_assert_writing(&runtimeLock);
assert(!isMethodListFixedUp(mlist));
- mlist->entsize_NEVER_USE = (mlist->entsize_NEVER_USE & ~3) | fixed_up_method_list;
+ mlist->entsize_NEVER_USE =
+ (mlist->entsize_NEVER_USE & ~3) | fixed_up_method_list;
}
/*
// low bit used by dyld shared cache
static uint32_t method_list_entsize(const method_list_t *mlist)
{
- return mlist->entsize_NEVER_USE & ~(uint32_t)3;
+ return mlist->entsize_NEVER_USE & ~3;
}
static size_t method_list_size(const method_list_t *mlist)
static method_t *method_list_nth(const method_list_t *mlist, uint32_t i)
{
- return (method_t *)(i*method_list_entsize(mlist) + (char *)&mlist->first);
+ return &mlist->get(i);
}
+static uint32_t method_list_count(const method_list_t *mlist)
+{
+ return mlist ? mlist->count : 0;
+}
-static size_t ivar_list_size(const ivar_list_t *ilist)
+static void method_list_swap(method_list_t *mlist, uint32_t i, uint32_t j)
{
- return sizeof(ivar_list_t) + (ilist->count-1) * ilist->entsize;
+ size_t entsize = method_list_entsize(mlist);
+ char temp[entsize];
+ memcpy(temp, method_list_nth(mlist, i), entsize);
+ memcpy(method_list_nth(mlist, i), method_list_nth(mlist, j), entsize);
+ memcpy(method_list_nth(mlist, j), temp, entsize);
}
-static ivar_t *ivar_list_nth(const ivar_list_t *ilist, uint32_t i)
+static uint32_t method_list_index(const method_list_t *mlist,const method_t *m)
{
- return (ivar_t *)(i*ilist->entsize + (char *)&ilist->first);
+ uint32_t i = (uint32_t)(((uintptr_t)m - (uintptr_t)mlist) / method_list_entsize(mlist));
+ assert(i < mlist->count);
+ return i;
}
-// part of ivar_t, with non-deprecated alignment
-typedef struct {
- uintptr_t *offset;
- const char *name;
- const char *type;
- uint32_t alignment;
-} ivar_alignment_t;
+static size_t ivar_list_size(const ivar_list_t *ilist)
+{
+ return sizeof(ivar_list_t) + (ilist->count-1) * ilist->entsize;
+}
-static uint32_t ivar_alignment(const ivar_t *ivar)
+static ivar_t *ivar_list_nth(const ivar_list_t *ilist, uint32_t i)
{
- uint32_t alignment = ((ivar_alignment_t *)ivar)->alignment;
- if (alignment == (uint32_t)-1) alignment = (uint32_t)WORD_SHIFT;
- return 1<<alignment;
+ return (ivar_t *)(i*ilist->entsize + (char *)&ilist->first);
}
static method_list_t *cat_method_list(const category_t *cat, BOOL isMeta)
{
- if (!cat) return NULL;
+ if (!cat) return nil;
if (isMeta) return cat->classMethods;
else return cat->instanceMethods;
static method_t *cat_method_nth(const category_t *cat, BOOL isMeta, uint32_t i)
{
method_list_t *cmlist = cat_method_list(cat, isMeta);
- if (!cmlist) return NULL;
+ if (!cmlist) return nil;
return method_list_nth(cmlist, i);
}
}
+static Class
+alloc_class_for_subclass(Class supercls, size_t extraBytes)
+{
+ if (!supercls || !supercls->isSwift()) {
+ return _calloc_class(sizeof(objc_class) + extraBytes);
+ }
+
+ // Superclass is a Swift class. New subclass must duplicate its extra bits.
+
+ // Allocate the new class, with space for super's prefix and suffix
+ // and self's extraBytes.
+ swift_class_t *swiftSupercls = (swift_class_t *)supercls;
+ size_t superSize = swiftSupercls->classSize;
+ void *superBits = swiftSupercls->baseAddress();
+ void *bits = _malloc_internal(superSize + extraBytes);
+
+ // Copy all of the superclass's data to the new class.
+ memcpy(bits, superBits, superSize);
+
+ // Erase the objc data and the Swift description in the new class.
+ swift_class_t *swcls = (swift_class_t *)
+ ((uint8_t *)bits + swiftSupercls->classAddressOffset);
+ bzero(swcls, sizeof(objc_class));
+ swcls->description = nil;
+
+ // Mark this class as Swift-enhanced.
+ swcls->bits.setIsSwift();
+
+ return (Class)swcls;
+}
+
+
+/***********************************************************************
+* object_getIndexedIvars.
+**********************************************************************/
+void *object_getIndexedIvars(id obj)
+{
+ uint8_t *base = (uint8_t *)obj;
+
+ if (!obj) return nil;
+ if (obj->isTaggedPointer()) return nil;
+
+ if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
+
+ Class cls = (Class)obj;
+ if (!cls->isSwift()) return base + sizeof(objc_class);
+
+ swift_class_t *swcls = (swift_class_t *)cls;
+ return base - swcls->classAddressOffset + word_align(swcls->classSize);
+}
+
+
/***********************************************************************
* make_ro_writeable
* Reallocates rw->ro if necessary to make it writeable.
{
rwlock_assert_writing(&runtimeLock);
- static NXMapTable *category_map = NULL;
+ static NXMapTable *category_map = nil;
if (category_map) return category_map;
* Records an unattached category.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static void addUnattachedCategoryForClass(category_t *cat, class_t *cls,
+static void addUnattachedCategoryForClass(category_t *cat, Class cls,
header_info *catHeader)
{
rwlock_assert_writing(&runtimeLock);
BOOL catFromBundle = (catHeader->mhdr->filetype == MH_BUNDLE) ? YES: NO;
- // DO NOT use cat->cls!
- // cls may be cat->cls->isa, or cat->cls may have been remapped.
+ // DO NOT use cat->cls! cls may be cat->cls->isa instead
NXMapTable *cats = unattachedCategories();
category_list *list;
* Removes an unattached category.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static void removeUnattachedCategoryForClass(category_t *cat, class_t *cls)
+static void removeUnattachedCategoryForClass(category_t *cat, Class cls)
{
rwlock_assert_writing(&runtimeLock);
- // DO NOT use cat->cls!
- // cls may be cat->cls->isa, or cat->cls may have been remapped.
+ // DO NOT use cat->cls! cls may be cat->cls->isa instead
NXMapTable *cats = unattachedCategories();
category_list *list;
* The result must be freed by the caller.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static category_list *unattachedCategoriesForClass(class_t *cls)
+static category_list *unattachedCategoriesForClass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
return (category_list *)NXMapRemove(unattachedCategories(), cls);
/***********************************************************************
-* isRealized
-* Returns YES if class cls has been realized.
-* Locking: To prevent concurrent realization, hold runtimeLock.
-**********************************************************************/
-static BOOL isRealized(class_t *cls)
-{
- return (cls->data()->flags & RW_REALIZED) ? YES : NO;
-}
-
-
-/***********************************************************************
-* isFuture
-* Returns YES if class cls is an unrealized future class.
-* Locking: To prevent concurrent realization, hold runtimeLock.
+* classNSObject
+* Returns class NSObject.
+* Locking: none
**********************************************************************/
-#ifndef NDEBUG
-// currently used in asserts only
-static BOOL isFuture(class_t *cls)
+static Class classNSObject(void)
{
- return (cls->data()->flags & RW_FUTURE) ? YES : NO;
+ extern objc_class OBJC_CLASS_$_NSObject;
+ return (Class)&OBJC_CLASS_$_NSObject;
}
-#endif
/***********************************************************************
* Warn about methods from cats that override other methods in cats or cls.
* Assumes no methods from cats have been added to cls yet.
**********************************************************************/
-static void printReplacements(class_t *cls, category_list *cats)
+static void printReplacements(Class cls, category_list *cats)
{
uint32_t c;
- BOOL isMeta = isMetaClass(cls);
+ BOOL isMeta = cls->isMetaClass();
if (!cats) return;
uint32_t m;
for (m = 0; m < cmCount; m++) {
uint32_t c2, m2;
- method_t *meth2 = NULL;
+ method_t *meth2 = nil;
method_t *meth = cat_method_nth(cat, isMeta, m);
- SEL s = sel_registerName((const char *)meth->name);
+ SEL s = sel_registerName(sel_cname(meth->name));
// Don't warn about GC-ignored selectors
if (ignoreSelector(s)) continue;
uint32_t cm2Count = cat_method_count(cat2, isMeta);
for (m2 = 0; m2 < cm2Count; m2++) {
meth2 = cat_method_nth(cat2, isMeta, m2);
- SEL s2 = sel_registerName((const char *)meth2->name);
+ SEL s2 = sel_registerName(sel_cname(meth2->name));
if (s == s2) goto whine;
}
}
FOREACH_METHOD_LIST(mlist, cls, {
for (m2 = 0; m2 < mlist->count; m2++) {
meth2 = method_list_nth(mlist, m2);
- SEL s2 = sel_registerName((const char *)meth2->name);
+ SEL s2 = sel_registerName(sel_cname(meth2->name));
if (s == s2) goto whine;
}
});
whine:
// Found an override.
- logReplacedMethod(getName(cls), s, isMetaClass(cls), cat->name,
+ logReplacedMethod(cls->nameForLogging(), s,
+ cls->isMetaClass(), cat->name,
_method_getImplementation(meth2),
_method_getImplementation(meth));
}
}
-static BOOL isBundleClass(class_t *cls)
+static BOOL isBundleClass(Class cls)
{
return (cls->data()->ro->flags & RO_FROM_BUNDLE) ? YES : NO;
}
static method_list_t *
-fixupMethodList(method_list_t *mlist, BOOL bundleCopy)
+fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
{
+ rwlock_assert_writing(&runtimeLock);
assert(!isMethodListFixedUp(mlist));
mlist = (method_list_t *)
// fixme lock less in attachMethodLists ?
sel_lock();
-
+
// Unique selectors in list.
uint32_t m;
for (m = 0; m < mlist->count; m++) {
method_t *meth = method_list_nth(mlist, m);
- SEL sel = sel_registerNameNoLock((const char *)meth->name, bundleCopy);
+
+ const char *name = sel_cname(meth->name);
+
+ SEL sel = sel_registerNameNoLock(name, bundleCopy);
meth->name = sel;
-
+
if (ignoreSelector(sel)) {
meth->imp = (IMP)&_objc_ignored_method;
}
}
-
+
sel_unlock();
// Sort by selector address.
- method_t::SortBySELAddress sorter;
- std::stable_sort(mlist->begin(), mlist->end(), sorter);
+ if (sort) {
+ method_t::SortBySELAddress sorter;
+ std::stable_sort(mlist->begin(), mlist->end(), sorter);
+ }
// Mark method list as uniqued and sorted
setMethodListFixedUp(mlist);
static void
-attachMethodLists(class_t *cls, method_list_t **addedLists, int addedCount,
- BOOL methodsFromBundle, BOOL *inoutVtablesAffected)
+attachMethodLists(Class cls, method_list_t **addedLists, int addedCount,
+ bool baseMethods, bool methodsFromBundle,
+ bool flushCaches)
{
rwlock_assert_writing(&runtimeLock);
// Don't scan redundantly
- BOOL scanForCustomRR = !UseGC && !cls->hasCustomRR();
+ bool scanForCustomRR = !UseGC && !cls->hasCustomRR();
+ bool scanForCustomAWZ = !UseGC && !cls->hasCustomAWZ();
- // Method list array is NULL-terminated.
- // Some elements of lists are NULL; we must filter them out.
+ // There exist RR/AWZ special cases for some class's base methods.
+ // But this code should never need to scan base methods for RR/AWZ:
+ // default RR/AWZ cannot be set before setInitialized().
+ // Therefore we need not handle any special cases here.
+ if (baseMethods) {
+ assert(!scanForCustomRR && !scanForCustomAWZ);
+ }
+
+ // Method list array is nil-terminated.
+ // Some elements of lists are nil; we must filter them out.
- method_list_t **oldLists = cls->data()->methods;
+ method_list_t *oldBuf[2];
+ method_list_t **oldLists;
int oldCount = 0;
+ if (cls->data()->flags & RW_METHOD_ARRAY) {
+ oldLists = cls->data()->method_lists;
+ } else {
+ oldBuf[0] = cls->data()->method_list;
+ oldBuf[1] = nil;
+ oldLists = oldBuf;
+ }
if (oldLists) {
while (oldLists[oldCount]) oldCount++;
}
- int newCount = oldCount + 1; // including NULL terminator
+ int newCount = oldCount;
for (int i = 0; i < addedCount; i++) {
- if (addedLists[i]) newCount++; // only non-NULL entries get added
+ if (addedLists[i]) newCount++; // only non-nil entries get added
}
- method_list_t **newLists = (method_list_t **)
- _malloc_internal(newCount * sizeof(*newLists));
+ method_list_t *newBuf[2];
+ method_list_t **newLists;
+ if (newCount > 1) {
+ newLists = (method_list_t **)
+ _malloc_internal((1 + newCount) * sizeof(*newLists));
+ } else {
+ newLists = newBuf;
+ }
// Add method lists to array.
// Reallocate un-fixed method lists.
// Fixup selectors if necessary
if (!isMethodListFixedUp(mlist)) {
- mlist = fixupMethodList(mlist, methodsFromBundle);
+ mlist = fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
}
- // Scan for vtable updates
- if (inoutVtablesAffected && !*inoutVtablesAffected) {
- uint32_t m;
- for (m = 0; m < mlist->count; m++) {
- SEL sel = method_list_nth(mlist, m)->name;
- if (vtable_containsSelector(sel)) {
- *inoutVtablesAffected = YES;
- break;
- }
- }
+ // Scan for method implementations tracked by the class's flags
+ if (scanForCustomRR && methodListImplementsRR(mlist)) {
+ cls->setHasCustomRR();
+ scanForCustomRR = false;
+ }
+ if (scanForCustomAWZ && methodListImplementsAWZ(mlist)) {
+ cls->setHasCustomAWZ();
+ scanForCustomAWZ = false;
}
- // Scan for method implementations tracked by the class's flags
- if (scanForCustomRR) {
- uint32_t m;
- for (m = 0; m < mlist->count; m++) {
- SEL sel = method_list_nth(mlist, m)->name;
- if (isRRSelector(sel)) {
- cls->setHasCustomRR();
- scanForCustomRR = NO;
- break;
- }
- }
+ // Update method caches
+ if (flushCaches) {
+ cache_eraseMethods(cls, mlist);
}
// Fill method list array
for (i = 0; i < oldCount; i++) {
newLists[newCount++] = oldLists[i];
}
- if (oldLists) free(oldLists);
+ if (oldLists && oldLists != oldBuf) free(oldLists);
+
+ // nil-terminate
+ newLists[newCount] = nil;
- // NULL-terminate
- newLists[newCount++] = NULL;
- cls->data()->methods = newLists;
+ if (newCount > 1) {
+ assert(newLists != newBuf);
+ cls->data()->method_lists = newLists;
+ cls->setInfo(RW_METHOD_ARRAY);
+ } else {
+ assert(newLists == newBuf);
+ cls->data()->method_list = newLists[0];
+ assert(!(cls->data()->flags & RW_METHOD_ARRAY));
+ }
}
static void
-attachCategoryMethods(class_t *cls, category_list *cats,
- BOOL *inoutVtablesAffected)
+attachCategoryMethods(Class cls, category_list *cats, bool flushCaches)
{
if (!cats) return;
if (PrintReplacedMethods) printReplacements(cls, cats);
- BOOL isMeta = isMetaClass(cls);
+ bool isMeta = cls->isMetaClass();
method_list_t **mlists = (method_list_t **)
_malloc_internal(cats->count * sizeof(*mlists));
}
}
- attachMethodLists(cls, mlists, mcount, fromBundle, inoutVtablesAffected);
+ attachMethodLists(cls, mlists, mcount, NO, fromBundle, flushCaches);
_free_internal(mlists);
-
}
static chained_property_list *
buildPropertyList(const property_list_t *plist, category_list *cats, BOOL isMeta)
{
- // Do NOT use cat->cls! It may have been remapped.
chained_property_list *newlist;
uint32_t count = 0;
uint32_t p, c;
}
}
- if (count == 0) return NULL;
+ if (count == 0) return nil;
// Allocate new list.
newlist = (chained_property_list *)
_malloc_internal(sizeof(*newlist) + count * sizeof(property_t));
newlist->count = 0;
- newlist->next = NULL;
+ newlist->next = nil;
// Copy properties; newest categories first, then ordinary properties
if (cats) {
buildProtocolList(category_list *cats, const protocol_list_t *base,
const protocol_list_t **protos)
{
- // Do NOT use cat->cls! It may have been remapped.
const protocol_list_t **p, **newp;
const protocol_list_t **newprotos;
unsigned int count = 0;
count++;
}
- if (count == 0) return NULL;
+ if (count == 0) return nil;
newprotos = (const protocol_list_t **)
_malloc_internal((count+1) * sizeof(protocol_list_t *));
}
}
- *newp = NULL;
+ *newp = nil;
return newprotos;
}
* methodizeClass
* Fixes up cls's method list, protocol list, and property list.
* Attaches any outstanding categories.
-* Builds vtable.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void methodizeClass(class_t *cls)
+static void methodizeClass(Class cls)
{
category_list *cats;
BOOL isMeta;
rwlock_assert_writing(&runtimeLock);
- isMeta = isMetaClass(cls);
+ isMeta = cls->isMetaClass();
// Methodizing for the first time
if (PrintConnecting) {
_objc_inform("CLASS: methodizing class '%s' %s",
- getName(cls), isMeta ? "(meta)" : "");
+ cls->nameForLogging(), isMeta ? "(meta)" : "");
}
// Build method and protocol and property lists.
// Include methods and protocols and properties from categories, if any
- // Do NOT use cat->cls! It may have been remapped.
attachMethodLists(cls, (method_list_t **)&cls->data()->ro->baseMethods, 1,
- isBundleClass(cls), NULL);
+ YES, isBundleClass(cls), NO);
// Root classes get bonus method implementations if they don't have
// them already. These apply before category replacements.
- if (cls->isRootClass()) {
- // root class
- if (!UseGC) {
- // Assume custom RR except NSObject, even without MM method imps.
- if (0 != strcmp(getName(cls), "NSObject")) cls->setHasCustomRR();
- }
- }
- else if (cls->isRootMetaclass()) {
+ if (cls->isRootMetaclass()) {
// root metaclass
addMethod(cls, SEL_initialize, (IMP)&objc_noop_imp, "", NO);
- if (!UseGC) {
- // Assume custom RR always.
- cls->setHasCustomRR();
- }
}
cats = unattachedCategoriesForClass(cls);
- attachCategoryMethods(cls, cats, NULL);
+ attachCategoryMethods(cls, cats, NO);
if (cats || cls->data()->ro->baseProperties) {
cls->data()->properties =
if (cats || cls->data()->ro->baseProtocols) {
cls->data()->protocols =
- buildProtocolList(cats, cls->data()->ro->baseProtocols, NULL);
+ buildProtocolList(cats, cls->data()->ro->baseProtocols, nil);
}
if (PrintConnecting) {
for (i = 0; i < cats->count; i++) {
_objc_inform("CLASS: attached category %c%s(%s)",
isMeta ? '+' : '-',
- getName(cls), cats->list[i].cat->name);
+ cls->nameForLogging(), cats->list[i].cat->name);
}
}
}
if (cats) _free_internal(cats);
- // No vtable until +initialize completes
- assert(cls->vtable == &_objc_empty_vtable);
-
#ifndef NDEBUG
// Debug: sanity-check all SELs; log method list contents
FOREACH_METHOD_LIST(mlist, cls, {
for ( ; iter != end; ++iter) {
if (PrintConnecting) {
_objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
- getName(cls), sel_getName(iter->name));
+ cls->nameForLogging(), sel_getName(iter->name));
}
assert(ignoreSelector(iter->name) || sel_registerName(sel_getName(iter->name))==iter->name);
}
* remethodizeClass
* Attach outstanding categories to an existing class.
* Fixes up cls's method list, protocol list, and property list.
-* Updates method caches and vtables for cls and its subclasses.
+* Updates method caches for cls and its subclasses.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void remethodizeClass(class_t *cls)
+static void remethodizeClass(Class cls)
{
category_list *cats;
BOOL isMeta;
rwlock_assert_writing(&runtimeLock);
- isMeta = isMetaClass(cls);
+ isMeta = cls->isMetaClass();
// Re-methodizing: check for more categories
if ((cats = unattachedCategoriesForClass(cls))) {
if (PrintConnecting) {
_objc_inform("CLASS: attaching categories to class '%s' %s",
- getName(cls), isMeta ? "(meta)" : "");
+ cls->nameForLogging(), isMeta ? "(meta)" : "");
}
// Update methods, properties, protocols
- BOOL vtableAffected = NO;
- attachCategoryMethods(cls, cats, &vtableAffected);
+ attachCategoryMethods(cls, cats, YES);
- newproperties = buildPropertyList(NULL, cats, isMeta);
+ newproperties = buildPropertyList(nil, cats, isMeta);
if (newproperties) {
newproperties->next = cls->data()->properties;
cls->data()->properties = newproperties;
}
- newprotos = buildProtocolList(cats, NULL, cls->data()->protocols);
+ newprotos = buildProtocolList(cats, nil, cls->data()->protocols);
if (cls->data()->protocols && cls->data()->protocols != newprotos) {
_free_internal(cls->data()->protocols);
}
cls->data()->protocols = newprotos;
_free_internal(cats);
+ }
+}
- // Update method caches and vtables
- flushCaches(cls);
- if (vtableAffected) flushVtables(cls);
+
+/***********************************************************************
+* nonMetaClasses
+* Returns the secondary metaclass => class map
+* Used for some cases of +initialize and +resolveClassMethod:.
+* This map does not contain all class and metaclass pairs. It only
+* contains metaclasses whose classes would be in the runtime-allocated
+* named-class table, but are not because some other class with the same name
+* is in that table.
+* Classes with no duplicates are not included.
+* Classes in the preoptimized named-class table are not included.
+* Classes whose duplicates are in the preoptimized table are not included.
+* Most code should use getNonMetaClass() instead of reading this table.
+* Locking: runtimeLock must be read- or write-locked by the caller
+**********************************************************************/
+static NXMapTable *nonmeta_class_map = nil;
+static NXMapTable *nonMetaClasses(void)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ if (nonmeta_class_map) return nonmeta_class_map;
+
+ // nonmeta_class_map is typically small
+ INIT_ONCE_PTR(nonmeta_class_map,
+ NXCreateMapTableFromZone(NXPtrValueMapPrototype, 32,
+ _objc_internal_zone()),
+ NXFreeMapTable(v));
+
+ return nonmeta_class_map;
+}
+
+
+/***********************************************************************
+* addNonMetaClass
+* Adds metacls => cls to the secondary metaclass map
+* Locking: runtimeLock must be held by the caller
+**********************************************************************/
+static void addNonMetaClass(Class cls)
+{
+ rwlock_assert_writing(&runtimeLock);
+ void *old;
+ old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
+
+ assert(!cls->isMetaClass());
+ assert(cls->ISA()->isMetaClass());
+ assert(!old);
+}
+
+
+static void removeNonMetaClass(Class cls)
+{
+ rwlock_assert_writing(&runtimeLock);
+ NXMapRemove(nonMetaClasses(), cls->ISA());
+}
+
+
+static bool scanMangledField(const char *&string, const char *end,
+ const char *&field, int& length)
+{
+ // Leading zero not allowed.
+ if (*string == '0') return false;
+
+ length = 0;
+ field = string;
+ while (field < end) {
+ char c = *field;
+ if (!isdigit(c)) break;
+ field++;
+ if (__builtin_smul_overflow(length, 10, &length)) return false;
+ if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
+ }
+
+ string = field + length;
+ return length > 0 && string <= end;
+}
+
+
+/***********************************************************************
+* copySwiftV1DemangledName
+* Returns the pretty form of the given Swift-v1-mangled class or protocol name.
+* Returns nil if the string doesn't look like a mangled Swift v1 name.
+* The result must be freed with free().
+**********************************************************************/
+static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
+{
+ if (!string) return nil;
+
+ // Swift mangling prefix.
+ if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
+ string += 4;
+
+ const char *end = string + strlen(string);
+
+ // Module name.
+ const char *prefix;
+ int prefixLength;
+ if (strncmp(string, "Ss", 2) == 0) {
+ prefix = "Swift";
+ prefixLength = 5;
+ string += 2;
+ } else {
+ if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
+ }
+
+ // Class or protocol name.
+ const char *suffix;
+ int suffixLength;
+ if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
+
+ if (isProtocol) {
+ // Remainder must be "_".
+ if (strcmp(string, "_") != 0) return nil;
+ } else {
+ // Remainder must be empty.
+ if (string != end) return nil;
}
+
+ char *result;
+ asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
+ return result;
}
/***********************************************************************
-* changeInfo
-* Atomically sets and clears some bits in cls's info field.
-* set and clear must not overlap.
+* copySwiftV1MangledName
+* Returns the Swift 1.0 mangled form of the given class or protocol name.
+* Returns nil if the string doesn't look like an unmangled Swift name.
+* The result must be freed with free().
**********************************************************************/
-static void changeInfo(class_t *cls, unsigned int set, unsigned int clear)
+static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
{
- uint32_t oldf, newf;
+ if (!string) return nil;
+
+ size_t dotCount = 0;
+ size_t dotIndex;
+ const char *s;
+ for (s = string; *s; s++) {
+ if (*s == '.') {
+ dotCount++;
+ dotIndex = s - string;
+ }
+ }
+ size_t stringLength = s - string;
- assert(isFuture(cls) || isRealized(cls));
+ if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
+ return nil;
+ }
+
+ const char *prefix = string;
+ size_t prefixLength = dotIndex;
+ const char *suffix = string + dotIndex + 1;
+ size_t suffixLength = stringLength - (dotIndex + 1);
+
+ char *name;
- do {
- oldf = cls->data()->flags;
- newf = (oldf | set) & ~clear;
- } while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&cls->data()->flags));
+ if (strncmp(prefix, "Swift", prefixLength) == 0) {
+ asprintf(&name, "_Tt%cSs%zu%.*s%s",
+ isProtocol ? 'P' : 'C',
+ suffixLength, (int)suffixLength, suffix,
+ isProtocol ? "_" : "");
+ } else {
+ asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
+ isProtocol ? 'P' : 'C',
+ prefixLength, (int)prefixLength, prefix,
+ suffixLength, (int)suffixLength, suffix,
+ isProtocol ? "_" : "");
+ }
+ return name;
}
/***********************************************************************
-* namedClasses
-* Returns the classname => class map of all non-meta classes.
-* Locking: runtimeLock must be read- or write-locked by the caller
+* getClass
+* Looks up a class by name. The class MIGHT NOT be realized.
+* Demangled Swift names are recognized.
+* Locking: runtimeLock must be read- or write-locked by the caller.
**********************************************************************/
+// This is a misnomer: gdb_objc_realized_classes is actually a list of
+// named classes not in the dyld shared cache, whether realized or not.
NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
-static NXMapTable *namedClasses(void)
+static Class getClass_impl(const char *name)
{
rwlock_assert_locked(&runtimeLock);
// allocated in _read_images
assert(gdb_objc_realized_classes);
- return gdb_objc_realized_classes;
+ // Try runtime-allocated table
+ Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
+ if (result) return result;
+
+ // Try table from dyld shared cache
+ return getPreoptimizedClass(name);
+}
+
+static Class getClass(const char *name)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ // Try name as-is
+ Class result = getClass_impl(name);
+ if (result) return result;
+
+ // Try Swift-mangled equivalent of the given name.
+ if (char *swName = copySwiftV1MangledName(name)) {
+ result = getClass_impl(swName);
+ free(swName);
+ return result;
+ }
+
+ return nil;
}
* Warns about duplicate class names and keeps the old mapping.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void addNamedClass(class_t *cls, const char *name)
+static void addNamedClass(Class cls, const char *name)
{
rwlock_assert_writing(&runtimeLock);
- class_t *old;
- if ((old = (class_t *)NXMapGet(namedClasses(), name))) {
- inform_duplicate(name, (Class)old, (Class)cls);
+ Class old;
+ if ((old = getClass(name))) {
+ inform_duplicate(name, old, cls);
+
+ // getNonMetaClass uses name lookups. Classes not found by name
+ // lookup must be in the secondary meta->nonmeta table.
+ addNonMetaClass(cls);
} else {
- NXMapInsert(namedClasses(), name, cls);
+ NXMapInsert(gdb_objc_realized_classes, name, cls);
}
assert(!(cls->data()->flags & RO_META));
// wrong: constructed classes are already realized when they get here
- // assert(!isRealized(cls));
+ // assert(!cls->isRealized());
}
* Removes cls from the name => cls map.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void removeNamedClass(class_t *cls, const char *name)
+static void removeNamedClass(Class cls, const char *name)
{
rwlock_assert_writing(&runtimeLock);
assert(!(cls->data()->flags & RO_META));
- if (cls == NXMapGet(namedClasses(), name)) {
- NXMapRemove(namedClasses(), name);
+ if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
+ NXMapRemove(gdb_objc_realized_classes, name);
} else {
// cls has a name collision with another class - don't remove the other
+ // but do remove cls from the secondary metaclass->class map.
+ removeNonMetaClass(cls);
}
}
* Returns the class list for realized non-meta classes.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-static NXHashTable *realized_class_hash = NULL;
+static NXHashTable *realized_class_hash = nil;
static NXHashTable *realizedClasses(void)
-{
+{
rwlock_assert_locked(&runtimeLock);
// allocated in _read_images
* Returns the class list for realized metaclasses.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-static NXHashTable *realized_metaclass_hash = NULL;
+static NXHashTable *realized_metaclass_hash = nil;
static NXHashTable *realizedMetaclasses(void)
{
rwlock_assert_locked(&runtimeLock);
* Adds cls to the realized non-meta class hash.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void addRealizedClass(class_t *cls)
+static void addRealizedClass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
void *old;
old = NXHashInsert(realizedClasses(), cls);
- objc_addRegisteredClass((Class)cls);
- assert(!isMetaClass(cls));
+ objc_addRegisteredClass(cls);
+ assert(!cls->isMetaClass());
assert(!old);
}
* Removes cls from the realized non-meta class hash.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void removeRealizedClass(class_t *cls)
+static void removeRealizedClass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
- if (isRealized(cls)) {
- assert(!isMetaClass(cls));
+ if (cls->isRealized()) {
+ assert(!cls->isMetaClass());
NXHashRemove(realizedClasses(), cls);
- objc_removeRegisteredClass((Class)cls);
+ objc_removeRegisteredClass(cls);
}
}
* Adds cls to the realized metaclass hash.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void addRealizedMetaclass(class_t *cls)
+static void addRealizedMetaclass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
void *old;
old = NXHashInsert(realizedMetaclasses(), cls);
- assert(isMetaClass(cls));
+ assert(cls->isMetaClass());
assert(!old);
}
* Removes cls from the realized metaclass hash.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void removeRealizedMetaclass(class_t *cls)
+static void removeRealizedMetaclass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
- if (isRealized(cls)) {
- assert(isMetaClass(cls));
+ if (cls->isRealized()) {
+ assert(cls->isMetaClass());
NXHashRemove(realizedMetaclasses(), cls);
}
}
/***********************************************************************
-* uninitializedClasses
-* Returns the metaclass => class map for un-+initialized classes
-* Replaces the 32-bit cls = objc_getName(metacls) during +initialize.
-* Locking: runtimeLock must be read- or write-locked by the caller
-**********************************************************************/
-static NXMapTable *uninitialized_class_map = NULL;
-static NXMapTable *uninitializedClasses(void)
-{
- rwlock_assert_locked(&runtimeLock);
-
- // allocated in _read_images
- assert(uninitialized_class_map);
-
- return uninitialized_class_map;
-}
-
-
-/***********************************************************************
-* addUninitializedClass
-* Adds metacls => cls to the un-+initialized class map
-* Locking: runtimeLock must be held by the caller
-**********************************************************************/
-static void addUninitializedClass(class_t *cls, class_t *metacls)
-{
- rwlock_assert_writing(&runtimeLock);
- void *old;
- old = NXMapInsert(uninitializedClasses(), metacls, cls);
- assert(isRealized(metacls) ? isMetaClass(metacls) : metacls->data()->flags & RO_META);
- assert(! (isRealized(cls) ? isMetaClass(cls) : cls->data()->flags & RO_META));
- assert(!old);
-}
-
-
-static void removeUninitializedClass(class_t *cls)
-{
- rwlock_assert_writing(&runtimeLock);
- NXMapRemove(uninitializedClasses(), cls->isa);
-}
-
-
-/***********************************************************************
-* getNonMetaClass
-* Return the ordinary class for this class or metaclass.
-* Used by +initialize.
-* Locking: runtimeLock must be read- or write-locked by the caller
-**********************************************************************/
-static class_t *getNonMetaClass(class_t *cls)
-{
- rwlock_assert_locked(&runtimeLock);
- if (isMetaClass(cls)) {
- cls = (class_t *)NXMapGet(uninitializedClasses(), cls);
- }
- return cls;
-}
-
-
-/***********************************************************************
-* _class_getNonMetaClass
-* Return the ordinary class for this class or metaclass.
-* Used by +initialize.
-* Locking: acquires runtimeLock
-**********************************************************************/
-PRIVATE_EXTERN Class _class_getNonMetaClass(Class cls_gen)
-{
- class_t *cls = newcls(cls_gen);
- rwlock_write(&runtimeLock);
- cls = getNonMetaClass(cls);
- realizeClass(cls);
- rwlock_unlock_write(&runtimeLock);
-
- return (Class)cls;
-}
-
-
-
-/***********************************************************************
-* futureClasses
+* futureNamedClasses
* Returns the classname => future class map for unrealized future classes.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static NXMapTable *futureClasses(void)
+static NXMapTable *futureNamedClasses(void)
{
rwlock_assert_writing(&runtimeLock);
- static NXMapTable *future_class_map = NULL;
+ static NXMapTable *future_named_class_map = nil;
- if (future_class_map) return future_class_map;
+ if (future_named_class_map) return future_named_class_map;
- // future_class_map is big enough to hold CF's classes and a few others
- future_class_map = NXCreateMapTableFromZone(NXStrValueMapPrototype, 32,
- _objc_internal_zone());
+ // future_named_class_map is big enough for CF's classes and a few others
+ future_named_class_map =
+ NXCreateMapTableFromZone(NXStrValueMapPrototype, 32,
+ _objc_internal_zone());
- return future_class_map;
+ return future_named_class_map;
}
/***********************************************************************
-* addFutureClass
+* addFutureNamedClass
* Installs cls as the class structure to use for the named class if it appears.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void addFutureClass(const char *name, class_t *cls)
+static void addFutureNamedClass(const char *name, Class cls)
{
void *old;
rwlock_assert_writing(&runtimeLock);
if (PrintFuture) {
- _objc_inform("FUTURE: reserving %p for %s", cls, name);
+ _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
}
- cls->setData((class_rw_t *)_calloc_internal(sizeof(*cls->data()), 1));
+ class_rw_t *rw = (class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1);
+ class_ro_t *ro = (class_ro_t *)_calloc_internal(sizeof(class_ro_t), 1);
+ ro->name = _strdup_internal(name);
+ rw->ro = ro;
+ cls->setData(rw);
cls->data()->flags = RO_FUTURE;
- old = NXMapKeyCopyingInsert(futureClasses(), name, cls);
+ old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
assert(!old);
}
/***********************************************************************
-* removeFutureClass
+* removeFutureNamedClass
* Removes the named class from the unrealized future class list,
* because it has been realized.
* Locking: runtimeLock must be held by the caller
**********************************************************************/
-static void removeFutureClass(const char *name)
+static void removeFutureNamedClass(const char *name)
{
rwlock_assert_writing(&runtimeLock);
- NXMapKeyFreeingRemove(futureClasses(), name);
+ NXMapKeyFreeingRemove(futureNamedClasses(), name);
}
/***********************************************************************
* remappedClasses
* Returns the oldClass => newClass map for realized future classes.
-* Returns the oldClass => NULL map for ignored weak-linked classes.
+* Returns the oldClass => nil map for ignored weak-linked classes.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
static NXMapTable *remappedClasses(BOOL create)
{
- static NXMapTable *remapped_class_map = NULL;
+ static NXMapTable *remapped_class_map = nil;
rwlock_assert_locked(&runtimeLock);
if (remapped_class_map) return remapped_class_map;
- if (!create) return NULL;
+ if (!create) return nil;
// remapped_class_map is big enough to hold CF's classes and a few others
INIT_ONCE_PTR(remapped_class_map,
{
rwlock_assert_locked(&runtimeLock);
- BOOL result = (remappedClasses(NO) == NULL);
+ BOOL result = (remappedClasses(NO) == nil);
return result;
}
/***********************************************************************
* addRemappedClass
* newcls is a realized future class, replacing oldcls.
-* OR newcls is NULL, replacing ignored weak-linked class oldcls.
+* OR newcls is nil, replacing ignored weak-linked class oldcls.
* Locking: runtimeLock must be write-locked by the caller
**********************************************************************/
-static void addRemappedClass(class_t *oldcls, class_t *newcls)
+static void addRemappedClass(Class oldcls, Class newcls)
{
rwlock_assert_writing(&runtimeLock);
if (PrintFuture) {
_objc_inform("FUTURE: using %p instead of %p for %s",
- oldcls, newcls, getName(newcls));
+ (void*)oldcls, (void*)newcls, oldcls->nameForLogging());
}
void *old;
* remapClass
* Returns the live class pointer for cls, which may be pointing to
* a class struct that has been reallocated.
-* Returns NULL if cls is ignored because of weak linking.
+* Returns nil if cls is ignored because of weak linking.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-static class_t *remapClass(class_t *cls)
+static Class remapClass(Class cls)
{
rwlock_assert_locked(&runtimeLock);
- class_t *c2;
+ Class c2;
- if (!cls) return NULL;
+ if (!cls) return nil;
if (NXMapMember(remappedClasses(YES), cls, (void**)&c2) == NX_MAPNOTAKEY) {
return cls;
}
}
+static Class remapClass(classref_t cls)
+{
+ return remapClass((Class)cls);
+}
+
+Class _class_remap(Class cls)
+{
+ rwlock_read(&runtimeLock);
+ Class result = remapClass(cls);
+ rwlock_unlock_read(&runtimeLock);
+ return result;
+}
/***********************************************************************
* remapClassRef
* or is an ignored weak-linked class.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-static void remapClassRef(class_t **clsref)
+static void remapClassRef(Class *clsref)
{
rwlock_assert_locked(&runtimeLock);
- class_t *newcls = remapClass(*clsref);
+ Class newcls = remapClass(*clsref);
if (*clsref != newcls) *clsref = newcls;
}
+/***********************************************************************
+* getNonMetaClass
+* Return the ordinary class for this class or metaclass.
+* `inst` is an instance of `cls` or a subclass thereof, or nil.
+* Non-nil inst is faster.
+* Used by +initialize.
+* Locking: runtimeLock must be read- or write-locked by the caller
+**********************************************************************/
+static Class getNonMetaClass(Class metacls, id inst)
+{
+ static int total, named, secondary, sharedcache;
+ rwlock_assert_locked(&runtimeLock);
+
+ realizeClass(metacls);
+
+ total++;
+
+ // return cls itself if it's already a non-meta class
+ if (!metacls->isMetaClass()) return metacls;
+
+ // metacls really is a metaclass
+
+ // special case for root metaclass
+ // where inst == inst->ISA() == metacls is possible
+ if (metacls->ISA() == metacls) {
+ Class cls = metacls->superclass;
+ assert(cls->isRealized());
+ assert(!cls->isMetaClass());
+ assert(cls->ISA() == metacls);
+ if (cls->ISA() == metacls) return cls;
+ }
+
+ // use inst if available
+ if (inst) {
+ Class cls = (Class)inst;
+ realizeClass(cls);
+ // cls may be a subclass - find the real class for metacls
+ while (cls && cls->ISA() != metacls) {
+ cls = cls->superclass;
+ realizeClass(cls);
+ }
+ if (cls) {
+ assert(!cls->isMetaClass());
+ assert(cls->ISA() == metacls);
+ return cls;
+ }
+#if !NDEBUG
+ _objc_fatal("cls is not an instance of metacls");
+#else
+ // release build: be forgiving and fall through to slow lookups
+#endif
+ }
+
+ // try name lookup
+ {
+ Class cls = getClass(metacls->mangledName());
+ if (cls->ISA() == metacls) {
+ named++;
+ if (PrintInitializing) {
+ _objc_inform("INITIALIZE: %d/%d (%g%%) "
+ "successful by-name metaclass lookups",
+ named, total, named*100.0/total);
+ }
+
+ realizeClass(cls);
+ return cls;
+ }
+ }
+
+ // try secondary table
+ {
+ Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
+ if (cls) {
+ secondary++;
+ if (PrintInitializing) {
+ _objc_inform("INITIALIZE: %d/%d (%g%%) "
+ "successful secondary metaclass lookups",
+ secondary, total, secondary*100.0/total);
+ }
+
+ assert(cls->ISA() == metacls);
+ realizeClass(cls);
+ return cls;
+ }
+ }
+
+ // try any duplicates in the dyld shared cache
+ {
+ Class cls = nil;
+
+ int count;
+ Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
+ if (classes) {
+ for (int i = 0; i < count; i++) {
+ if (classes[i]->ISA() == metacls) {
+ cls = classes[i];
+ break;
+ }
+ }
+ free(classes);
+ }
+
+ if (cls) {
+ sharedcache++;
+ if (PrintInitializing) {
+ _objc_inform("INITIALIZE: %d/%d (%g%%) "
+ "successful shared cache metaclass lookups",
+ sharedcache, total, sharedcache*100.0/total);
+ }
+
+ realizeClass(cls);
+ return cls;
+ }
+ }
+
+ _objc_fatal("no class for metaclass %p", (void*)metacls);
+}
+
+
+/***********************************************************************
+* _class_getNonMetaClass
+* Return the ordinary class for this class or metaclass.
+* Used by +initialize.
+* Locking: acquires runtimeLock
+**********************************************************************/
+Class _class_getNonMetaClass(Class cls, id obj)
+{
+ rwlock_write(&runtimeLock);
+ cls = getNonMetaClass(cls, obj);
+ assert(cls->isRealized());
+ rwlock_unlock_write(&runtimeLock);
+
+ return cls;
+}
+
+
/***********************************************************************
* addSubclass
* Adds subcls as a subclass of supercls.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static void addSubclass(class_t *supercls, class_t *subcls)
+static void addSubclass(Class supercls, Class subcls)
{
rwlock_assert_writing(&runtimeLock);
if (supercls && subcls) {
- assert(isRealized(supercls));
- assert(isRealized(subcls));
+ assert(supercls->isRealized());
+ assert(subcls->isRealized());
subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
supercls->data()->firstSubclass = subcls;
- if (supercls->data()->flags & RW_HAS_CXX_STRUCTORS) {
- subcls->data()->flags |= RW_HAS_CXX_STRUCTORS;
+ if (supercls->hasCxxCtor()) {
+ subcls->setHasCxxCtor();
+ }
+
+ if (supercls->hasCxxDtor()) {
+ subcls->setHasCxxDtor();
}
if (supercls->hasCustomRR()) {
- subcls->setHasCustomRR();
+ subcls->setHasCustomRR(true);
+ }
+
+ if (supercls->hasCustomAWZ()) {
+ subcls->setHasCustomAWZ(true);
+ }
+
+ if (supercls->requiresRawIsa()) {
+ subcls->setRequiresRawIsa(true);
}
}
}
* Removes subcls as a subclass of supercls.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static void removeSubclass(class_t *supercls, class_t *subcls)
+static void removeSubclass(Class supercls, Class subcls)
{
rwlock_assert_writing(&runtimeLock);
- assert(getSuperclass(subcls) == supercls);
+ assert(supercls->isRealized());
+ assert(subcls->isRealized());
+ assert(subcls->superclass == supercls);
- class_t **cp;
+ Class *cp;
for (cp = &supercls->data()->firstSubclass;
*cp && *cp != subcls;
cp = &(*cp)->data()->nextSiblingClass)
**********************************************************************/
static NXMapTable *protocols(void)
{
- static NXMapTable *protocol_map = NULL;
+ static NXMapTable *protocol_map = nil;
rwlock_assert_locked(&runtimeLock);
}
+/***********************************************************************
+* getProtocol
+* Looks up a protocol by name. Demangled Swift names are recognized.
+* Locking: runtimeLock must be read- or write-locked by the caller.
+**********************************************************************/
+static Protocol *getProtocol_impl(const char *name)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ return (Protocol *)NXMapGet(protocols(), name);
+}
+
+static Protocol *getProtocol(const char *name)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ // Try name as-is.
+ Protocol *result = getProtocol_impl(name);
+ if (result) return result;
+
+ // Try Swift-mangled equivalent of the given name.
+ if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
+ result = getProtocol_impl(swName);
+ free(swName);
+ return result;
+ }
+
+ return nil;
+}
+
+
/***********************************************************************
* remapProtocol
* Returns the live protocol pointer for proto, which may be pointing to
rwlock_assert_locked(&runtimeLock);
protocol_t *newproto = (protocol_t *)
- NXMapGet(protocols(), ((protocol_t *)proto)->name);
+ getProtocol(((protocol_t *)proto)->mangledName);
return newproto ? newproto : (protocol_t *)proto;
}
ivar_t *ivar = ivar_list_nth(ro->ivars, i);
if (!ivar->offset) continue; // anonymous bitfield
- uint32_t alignment = ivar_alignment(ivar);
+ uint32_t alignment = ivar->alignment();
if (alignment > maxAlignment) maxAlignment = alignment;
}
if (PrintIvars) {
_objc_inform("IVARS: offset %u -> %u for %s (size %u, align %u)",
oldOffset, newOffset, ivar->name,
- ivar->size, ivar_alignment(ivar));
+ ivar->size, ivar->alignment());
}
}
* Look up an ivar by name.
* Locking: runtimeLock must be read- or write-locked by the caller.
**********************************************************************/
-static ivar_t *getIvar(class_t *cls, const char *name)
+static ivar_t *getIvar(Class cls, const char *name)
{
rwlock_assert_locked(&runtimeLock);
const ivar_list_t *ivars;
- assert(isRealized(cls));
+ assert(cls->isRealized());
if ((ivars = cls->data()->ro->ivars)) {
uint32_t i;
for (i = 0; i < ivars->count; i++) {
ivar_t *ivar = ivar_list_nth(ivars, i);
if (!ivar->offset) continue; // anonymous bitfield
- // ivar->name may be NULL for anonymous bitfields etc.
+ // ivar->name may be nil for anonymous bitfields etc.
if (ivar->name && 0 == strcmp(name, ivar->name)) {
return ivar;
}
}
}
- return NULL;
+ return nil;
}
-static void reconcileInstanceVariables(class_t *cls, class_t *supercls) {
+
+static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
+{
class_rw_t *rw = cls->data();
- const class_ro_t *ro = rw->ro;
+
+ assert(supercls);
+ assert(!cls->isMetaClass());
+
+ /* debug: print them all before sliding
+ if (ro->ivars) {
+ uint32_t i;
+ for (i = 0; i < ro->ivars->count; i++) {
+ ivar_t *ivar = ivar_list_nth(ro->ivars, i);
+ if (!ivar->offset) continue; // anonymous bitfield
+
+ _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
+ ro->name, ivar->name,
+ *ivar->offset, ivar->size, ivar->alignment());
+ }
+ }
+ */
+
+ // Non-fragile ivars - reconcile this class with its superclass
+ layout_bitmap ivarBitmap;
+ layout_bitmap weakBitmap;
+ bool layoutsChanged = NO;
+ bool mergeLayouts = UseGC;
+ const class_ro_t *super_ro = supercls->data()->ro;
- if (supercls) {
- // Non-fragile ivars - reconcile this class with its superclass
- // Does this really need to happen for the isMETA case?
- layout_bitmap ivarBitmap;
- layout_bitmap weakBitmap;
- BOOL layoutsChanged = NO;
- BOOL mergeLayouts = UseGC;
- const class_ro_t *super_ro = supercls->data()->ro;
+ if (DebugNonFragileIvars) {
+ // Debugging: Force non-fragile ivars to slide.
+ // Intended to find compiler, runtime, and program bugs.
+ // If it fails with this and works without, you have a problem.
- if (DebugNonFragileIvars) {
- // Debugging: Force non-fragile ivars to slide.
- // Intended to find compiler, runtime, and program bugs.
- // If it fails with this and works without, you have a problem.
-
- // Operation: Reset everything to 0 + misalignment.
- // Then force the normal sliding logic to push everything back.
-
- // Exceptions: root classes, metaclasses, *NSCF* classes,
- // __CF* classes, NSConstantString, NSSimpleCString
+ // Operation: Reset everything to 0 + misalignment.
+ // Then force the normal sliding logic to push everything back.
+
+ // Exceptions: root classes, metaclasses, *NSCF* classes,
+ // __CF* classes, NSConstantString, NSSimpleCString
+
+ // (already know it's not root because supercls != nil)
+ const char *clsname = cls->mangledName();
+ if (!strstr(clsname, "NSCF") &&
+ 0 != strncmp(clsname, "__CF", 4) &&
+ 0 != strcmp(clsname, "NSConstantString") &&
+ 0 != strcmp(clsname, "NSSimpleCString"))
+ {
+ uint32_t oldStart = ro->instanceStart;
+ uint32_t oldSize = ro->instanceSize;
+ class_ro_t *ro_w = make_ro_writeable(rw);
+ ro = rw->ro;
- // (already know it's not root because supercls != nil)
- if (!strstr(getName(cls), "NSCF") &&
- 0 != strncmp(getName(cls), "__CF", 4) &&
- 0 != strcmp(getName(cls), "NSConstantString") &&
- 0 != strcmp(getName(cls), "NSSimpleCString"))
- {
- uint32_t oldStart = ro->instanceStart;
- uint32_t oldSize = ro->instanceSize;
- class_ro_t *ro_w = make_ro_writeable(rw);
- ro = rw->ro;
-
- // Find max ivar alignment in class.
- // default to word size to simplify ivar update
- uint32_t alignment = 1<<WORD_SHIFT;
- if (ro->ivars) {
- uint32_t i;
- for (i = 0; i < ro->ivars->count; i++) {
- ivar_t *ivar = ivar_list_nth(ro->ivars, i);
- if (ivar_alignment(ivar) > alignment) {
- alignment = ivar_alignment(ivar);
- }
+ // Find max ivar alignment in class.
+ // default to word size to simplify ivar update
+ uint32_t alignment = 1<<WORD_SHIFT;
+ if (ro->ivars) {
+ uint32_t i;
+ for (i = 0; i < ro->ivars->count; i++) {
+ ivar_t *ivar = ivar_list_nth(ro->ivars, i);
+ if (ivar->alignment() > alignment) {
+ alignment = ivar->alignment();
}
}
- uint32_t misalignment = ro->instanceStart % alignment;
- uint32_t delta = ro->instanceStart - misalignment;
- ro_w->instanceStart = misalignment;
- ro_w->instanceSize -= delta;
-
- if (PrintIvars) {
- _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
- "to slide (instanceStart %zu -> %zu)",
- getName(cls), (size_t)oldStart,
- (size_t)ro->instanceStart);
+ }
+ uint32_t misalignment = ro->instanceStart % alignment;
+ uint32_t delta = ro->instanceStart - misalignment;
+ ro_w->instanceStart = misalignment;
+ ro_w->instanceSize -= delta;
+
+ if (PrintIvars) {
+ _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
+ "to slide (instanceStart %zu -> %zu)",
+ cls->nameForLogging(), (size_t)oldStart,
+ (size_t)ro->instanceStart);
+ }
+
+ if (ro->ivars) {
+ uint32_t i;
+ for (i = 0; i < ro->ivars->count; i++) {
+ ivar_t *ivar = ivar_list_nth(ro->ivars, i);
+ if (!ivar->offset) continue; // anonymous bitfield
+ *ivar->offset -= delta;
}
-
- if (ro->ivars) {
- uint32_t i;
- for (i = 0; i < ro->ivars->count; i++) {
- ivar_t *ivar = ivar_list_nth(ro->ivars, i);
- if (!ivar->offset) continue; // anonymous bitfield
- *ivar->offset -= delta;
- }
+ }
+
+ if (mergeLayouts) {
+ layout_bitmap layout;
+ if (ro->ivarLayout) {
+ layout = layout_bitmap_create(ro->ivarLayout,
+ oldSize, oldSize, NO);
+ layout_bitmap_slide_anywhere(&layout,
+ delta >> WORD_SHIFT, 0);
+ ro_w->ivarLayout = layout_string_create(layout);
+ layout_bitmap_free(layout);
}
-
- if (mergeLayouts) {
- layout_bitmap layout;
- if (ro->ivarLayout) {
- layout = layout_bitmap_create(ro->ivarLayout,
- oldSize, oldSize, NO);
- layout_bitmap_slide_anywhere(&layout,
- delta >> WORD_SHIFT, 0);
- ro_w->ivarLayout = layout_string_create(layout);
- layout_bitmap_free(layout);
- }
- if (ro->weakIvarLayout) {
- layout = layout_bitmap_create(ro->weakIvarLayout,
- oldSize, oldSize, YES);
- layout_bitmap_slide_anywhere(&layout,
- delta >> WORD_SHIFT, 0);
- ro_w->weakIvarLayout = layout_string_create(layout);
- layout_bitmap_free(layout);
- }
+ if (ro->weakIvarLayout) {
+ layout = layout_bitmap_create(ro->weakIvarLayout,
+ oldSize, oldSize, YES);
+ layout_bitmap_slide_anywhere(&layout,
+ delta >> WORD_SHIFT, 0);
+ ro_w->weakIvarLayout = layout_string_create(layout);
+ layout_bitmap_free(layout);
}
}
}
-
- // fixme can optimize for "class has no new ivars", etc
+ }
+
+ if (ro->instanceStart >= super_ro->instanceSize && !mergeLayouts) {
+ // Superclass has not overgrown its space, and we don't
+ // need to rebuild GC layouts. We're done here.
+ return;
+ }
+ // fixme can optimize for "class has no new ivars", etc
+
+ if (mergeLayouts) {
// WARNING: gcc c++ sets instanceStart/Size=0 for classes with
// no local ivars, but does provide a layout bitmap.
// Handle that case specially so layout_bitmap_create doesn't die
// how long it is. Force a new layout to be created.
if (PrintIvars) {
_objc_inform("IVARS: instanceStart/Size==0 for class %s; "
- "disregarding ivar layout", ro->name);
+ "disregarding ivar layout", cls->nameForLogging());
}
ivarBitmap = layout_bitmap_create_empty(super_ro->instanceSize, NO);
weakBitmap = layout_bitmap_create_empty(super_ro->instanceSize, YES);
layoutsChanged = YES;
- } else {
+ }
+ else {
ivarBitmap =
- layout_bitmap_create(ro->ivarLayout,
- ro->instanceSize,
- ro->instanceSize, NO);
+ layout_bitmap_create(ro->ivarLayout,
+ ro->instanceSize,
+ ro->instanceSize, NO);
weakBitmap =
- layout_bitmap_create(ro->weakIvarLayout,
- ro->instanceSize,
- ro->instanceSize, YES);
+ layout_bitmap_create(ro->weakIvarLayout,
+ ro->instanceSize,
+ ro->instanceSize, YES);
+ }
+ }
+
+ if (ro->instanceStart < super_ro->instanceSize) {
+ // Superclass has changed size. This class's ivars must move.
+ // Also slide layout bits in parallel.
+ // This code is incapable of compacting the subclass to
+ // compensate for a superclass that shrunk, so don't do that.
+ if (PrintIvars) {
+ _objc_inform("IVARS: sliding ivars for class %s "
+ "(superclass was %u bytes, now %u)",
+ cls->nameForLogging(), ro->instanceStart,
+ super_ro->instanceSize);
}
+ class_ro_t *ro_w = make_ro_writeable(rw);
+ ro = rw->ro;
+ moveIvars(ro_w, super_ro->instanceSize,
+ mergeLayouts ? &ivarBitmap : nil,
+ mergeLayouts ? &weakBitmap : nil);
+ gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
+ layoutsChanged = YES;
+ }
+
+ if (mergeLayouts) {
+ // Check superclass's layout against this class's layout.
+ // This needs to be done even if the superclass is not bigger.
+ layout_bitmap superBitmap;
- if (ro->instanceStart < super_ro->instanceSize) {
- // Superclass has changed size. This class's ivars must move.
- // Also slide layout bits in parallel.
- // This code is incapable of compacting the subclass to
- // compensate for a superclass that shrunk, so don't do that.
- if (PrintIvars) {
- _objc_inform("IVARS: sliding ivars for class %s "
- "(superclass was %u bytes, now %u)",
- ro->name, ro->instanceStart,
- super_ro->instanceSize);
- }
- class_ro_t *ro_w = make_ro_writeable(rw);
- ro = rw->ro;
- moveIvars(ro_w, super_ro->instanceSize,
- mergeLayouts ? &ivarBitmap : NULL, mergeLayouts ? &weakBitmap : NULL);
- gdb_objc_class_changed((Class)cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
- layoutsChanged = mergeLayouts;
- }
+ superBitmap = layout_bitmap_create(super_ro->ivarLayout,
+ super_ro->instanceSize,
+ super_ro->instanceSize, NO);
+ layoutsChanged |= layout_bitmap_splat(ivarBitmap, superBitmap,
+ ro->instanceStart);
+ layout_bitmap_free(superBitmap);
- if (mergeLayouts) {
- // Check superclass's layout against this class's layout.
- // This needs to be done even if the superclass is not bigger.
- layout_bitmap superBitmap = layout_bitmap_create(super_ro->ivarLayout,
- super_ro->instanceSize,
- super_ro->instanceSize, NO);
- layoutsChanged |= layout_bitmap_splat(ivarBitmap, superBitmap,
- ro->instanceStart);
- layout_bitmap_free(superBitmap);
-
- // check the superclass' weak layout.
- superBitmap = layout_bitmap_create(super_ro->weakIvarLayout,
- super_ro->instanceSize,
- super_ro->instanceSize, YES);
- layoutsChanged |= layout_bitmap_splat(weakBitmap, superBitmap,
- ro->instanceStart);
- layout_bitmap_free(superBitmap);
- }
+ // check the superclass' weak layout.
+ superBitmap = layout_bitmap_create(super_ro->weakIvarLayout,
+ super_ro->instanceSize,
+ super_ro->instanceSize, YES);
+ layoutsChanged |= layout_bitmap_splat(weakBitmap, superBitmap,
+ ro->instanceStart);
+ layout_bitmap_free(superBitmap);
+ // Rebuild layout strings if necessary.
if (layoutsChanged) {
- // Rebuild layout strings.
if (PrintIvars) {
- _objc_inform("IVARS: gc layout changed for class %s",
- ro->name);
+ _objc_inform("IVARS: gc layout changed for class %s",
+ cls->nameForLogging());
}
class_ro_t *ro_w = make_ro_writeable(rw);
ro = rw->ro;
}
}
+
/***********************************************************************
* realizeClass
* Performs first-time initialization on class cls,
* Returns the real class structure for the class.
* Locking: runtimeLock must be write-locked by the caller
**********************************************************************/
-static class_t *realizeClass(class_t *cls)
+static Class realizeClass(Class cls)
{
rwlock_assert_writing(&runtimeLock);
const class_ro_t *ro;
class_rw_t *rw;
- class_t *supercls;
- class_t *metacls;
+ Class supercls;
+ Class metacls;
BOOL isMeta;
- if (!cls) return NULL;
- if (isRealized(cls)) return cls;
+ if (!cls) return nil;
+ if (cls->isRealized()) return cls;
assert(cls == remapClass(cls));
+ // fixme verify class is not in an un-dlopened part of the shared cache?
+
ro = (const class_ro_t *)cls->data();
if (ro->flags & RO_FUTURE) {
// This was a future class. rw data is already allocated.
rw = cls->data();
ro = cls->data()->ro;
- changeInfo(cls, RW_REALIZED, RW_FUTURE);
+ cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
} else {
// Normal class. Allocate writeable class data.
rw = (class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1);
rw->ro = ro;
- rw->flags = RW_REALIZED;
+ rw->flags = RW_REALIZED|RW_REALIZING;
cls->setData(rw);
}
if (PrintConnecting) {
_objc_inform("CLASS: realizing class '%s' %s %p %p",
- ro->name, isMeta ? "(meta)" : "", cls, ro);
+ cls->nameForLogging(), isMeta ? "(meta)" : "",
+ (void*)cls, ro);
}
// Realize superclass and metaclass, if they aren't already.
// This needs to be done after RW_REALIZED is set above, for root classes.
supercls = realizeClass(remapClass(cls->superclass));
- metacls = realizeClass(remapClass(cls->isa));
-
- // Check for remapped superclass
- // fixme doesn't handle remapped metaclass
- assert(metacls == cls->isa);
- if (supercls != cls->superclass) {
- cls->superclass = supercls;
- }
-
- /* debug: print them all
- if (ro->ivars) {
- uint32_t i;
- for (i = 0; i < ro->ivars->count; i++) {
- ivar_t *ivar = ivar_list_nth(ro->ivars, i);
- if (!ivar->offset) continue; // anonymous bitfield
-
- _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
- ro->name, ivar->name,
- *ivar->offset, ivar->size, ivar_alignment(ivar));
- }
- }
- */
+ metacls = realizeClass(remapClass(cls->ISA()));
+
+ // Update superclass and metaclass in case of remapping
+ cls->superclass = supercls;
+ cls->initClassIsa(metacls);
// Reconcile instance variable offsets / layout.
- if (!isMeta) reconcileInstanceVariables(cls, supercls);
+ // This may reallocate class_ro_t, updating our ro variable.
+ if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
+
+ // Set fastInstanceSize if it wasn't set already.
+ cls->setInstanceSize(ro->instanceSize);
// Copy some flags from ro to rw
- if (ro->flags & RO_HAS_CXX_STRUCTORS) rw->flags |= RW_HAS_CXX_STRUCTORS;
+ if (ro->flags & RO_HAS_CXX_STRUCTORS) {
+ cls->setHasCxxDtor();
+ if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
+ cls->setHasCxxCtor();
+ }
+ }
+
+ // Disable non-pointer isa for some classes and/or platforms.
+#if SUPPORT_NONPOINTER_ISA
+ {
+ bool disable = false;
+ static bool hackedDispatch = false;
+
+ if (DisableIndexedIsa) {
+ // Non-pointer isa disabled by environment or GC or app SDK version
+ disable = true;
+ }
+ else if (!hackedDispatch && !(ro->flags & RO_META) &&
+ 0 == strcmp(ro->name, "OS_object"))
+ {
+ // hack for libdispatch et al - isa also acts as vtable pointer
+ hackedDispatch = true;
+ disable = true;
+ }
+
+ if (disable) {
+ cls->setRequiresRawIsa(false/*inherited*/);
+ }
+ }
+#endif
// Connect this class to its superclass's subclass lists
if (supercls) {
}
-/***********************************************************************
-* getClass
-* Looks up a class by name. The class MIGHT NOT be realized.
-* Locking: runtimeLock must be read- or write-locked by the caller.
-**********************************************************************/
-static class_t *getClass(const char *name)
-{
- rwlock_assert_locked(&runtimeLock);
-
- return (class_t *)NXMapGet(namedClasses(), name);
-}
-
-
/***********************************************************************
* missingWeakSuperclass
* Return YES if some superclass of cls was weak-linked and is missing.
**********************************************************************/
static BOOL
-missingWeakSuperclass(class_t *cls)
+missingWeakSuperclass(Class cls)
{
- assert(!isRealized(cls));
+ assert(!cls->isRealized());
if (!cls->superclass) {
- // superclass NULL. This is normal for root classes only.
+ // superclass nil. This is normal for root classes only.
return (!(cls->data()->flags & RO_ROOT));
} else {
- // superclass not NULL. Check if a higher superclass is missing.
- class_t *supercls = remapClass(cls->superclass);
+ // superclass not nil. Check if a higher superclass is missing.
+ Class supercls = remapClass(cls->superclass);
+ assert(cls != cls->superclass);
+ assert(cls != supercls);
if (!supercls) return YES;
- if (isRealized(supercls)) return NO;
+ if (supercls->isRealized()) return NO;
return missingWeakSuperclass(supercls);
}
}
rwlock_assert_writing(&runtimeLock);
size_t count, i;
- class_t **classlist;
+ classref_t *classlist;
if (hi->allClassesRealized) return;
* Assumes the named class doesn't exist yet.
* Locking: acquires runtimeLock
**********************************************************************/
-PRIVATE_EXTERN Class _objc_allocateFutureClass(const char *name)
+Class _objc_allocateFutureClass(const char *name)
{
rwlock_write(&runtimeLock);
- class_t *cls;
- NXMapTable *future_class_map = futureClasses();
+ Class cls;
+ NXMapTable *future_named_class_map = futureNamedClasses();
- if ((cls = (class_t *)NXMapGet(future_class_map, name))) {
+ if ((cls = (Class)NXMapGet(future_named_class_map, name))) {
// Already have a future class for this name.
rwlock_unlock_write(&runtimeLock);
- return (Class)cls;
+ return cls;
}
- cls = (class_t *)_calloc_class(sizeof(*cls));
- addFutureClass(name, cls);
+ cls = _calloc_class(sizeof(objc_class));
+ addFutureNamedClass(name, cls);
rwlock_unlock_write(&runtimeLock);
- return (Class)cls;
+ return cls;
+}
+
+
+/***********************************************************************
+* objc_getFutureClass. Return the id of the named class.
+* If the class does not exist, return an uninitialized class
+* structure that will be used for the class when and if it
+* does get loaded.
+* Not thread safe.
+**********************************************************************/
+Class objc_getFutureClass(const char *name)
+{
+ Class cls;
+
+ // YES unconnected, NO class handler
+ // (unconnected is OK because it will someday be the real class)
+ cls = look_up_class(name, YES, NO);
+ if (cls) {
+ if (PrintFuture) {
+ _objc_inform("FUTURE: found %p already in use for %s",
+ (void*)cls, name);
+ }
+
+ return cls;
+ }
+
+ // No class or future class with that name yet. Make one.
+ // fixme not thread-safe with respect to
+ // simultaneous library load or getFutureClass.
+ return _objc_allocateFutureClass(name);
}
}
+BOOL _class_isFutureClass(Class cls)
+{
+ return cls && cls->isFuture();
+}
+
+
/***********************************************************************
-* flushVtables
-* Rebuilds vtables for cls and its realized subclasses.
-* If cls is Nil, all realized classes and metaclasses are touched.
-* Locking: runtimeLock must be held by the caller.
+* _objc_flush_caches
+* Flushes all caches.
+* (Historical behavior: flush caches for cls, its metaclass,
+* and subclasses thereof. Nil flushes all classes.)
+* Locking: acquires runtimeLock
**********************************************************************/
-static void flushVtables(class_t *cls)
+static void flushCaches(Class cls)
{
rwlock_assert_writing(&runtimeLock);
- if (PrintVtables && !cls) {
- _objc_inform("VTABLES: ### EXPENSIVE ### global vtable flush!");
+ mutex_lock(&cacheUpdateLock);
+
+ if (cls) {
+ foreach_realized_class_and_subclass(cls, ^(Class c){
+ cache_erase_nolock(&c->cache);
+ });
+
+ if (!cls->superclass) {
+ // root; metaclasses are subclasses and were flushed above
+ } else {
+ foreach_realized_class_and_subclass(cls->ISA(), ^(Class c){
+ cache_erase_nolock(&c->cache);
+ });
+ }
+ }
+ else {
+ Class c;
+ NXHashTable *classes = realizedClasses();
+ NXHashState state = NXInitHashState(classes);
+ while (NXNextHashState(classes, &state, (void **)&c)) {
+ cache_erase_nolock(&c->cache);
+ }
+ classes = realizedMetaclasses();
+ state = NXInitHashState(classes);
+ while (NXNextHashState(classes, &state, (void **)&c)) {
+ cache_erase_nolock(&c->cache);
+ }
}
- FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, cls, {
- updateVtable(c, NO);
- });
+ mutex_unlock(&cacheUpdateLock);
}
-/***********************************************************************
-* flushCaches
-* Flushes caches for cls and its realized subclasses.
-* Does not update vtables.
-* If cls is Nil, all realized and metaclasses classes are touched.
-* Locking: runtimeLock must be held by the caller.
-**********************************************************************/
-static void flushCaches(class_t *cls)
+static void flushImps(Class cls, SEL sel1, IMP imp1, SEL sel2, IMP imp2)
{
rwlock_assert_writing(&runtimeLock);
- FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, cls, {
- flush_cache((Class)c);
- });
+ mutex_lock(&cacheUpdateLock);
+
+ if (cls) {
+ foreach_realized_class_and_subclass(cls, ^(Class c){
+ cache_eraseImp_nolock(c, sel1, imp1);
+ if (sel2) cache_eraseImp_nolock(c, sel2, imp2);
+ });
+
+ if (!cls->superclass) {
+ // root; metaclasses are subclasses and were flushed above
+ } else {
+ foreach_realized_class_and_subclass(cls->ISA(), ^(Class c){
+ cache_eraseImp_nolock(c, sel1, imp1);
+ if (sel2) cache_eraseImp_nolock(c, sel2, imp2);
+ });
+ }
+ }
+ else {
+ Class c;
+ NXHashTable *classes = realizedClasses();
+ NXHashState state = NXInitHashState(classes);
+ while (NXNextHashState(classes, &state, (void **)&c)) {
+ cache_eraseImp_nolock(c, sel1, imp1);
+ if (sel2) cache_eraseImp_nolock(c, sel2, imp2);
+ }
+ classes = realizedMetaclasses();
+ state = NXInitHashState(classes);
+ while (NXNextHashState(classes, &state, (void **)&c)) {
+ cache_eraseImp_nolock(c, sel1, imp1);
+ if (sel2) cache_eraseImp_nolock(c, sel2, imp2);
+ }
+ }
+
+ mutex_unlock(&cacheUpdateLock);
}
-/***********************************************************************
-* flush_caches
-* Flushes caches and rebuilds vtables for cls, its subclasses,
-* and optionally its metaclass.
-* Locking: acquires runtimeLock
-**********************************************************************/
-PRIVATE_EXTERN void flush_caches(Class cls_gen, BOOL flush_meta)
+void _objc_flush_caches(Class cls)
{
- class_t *cls = newcls(cls_gen);
rwlock_write(&runtimeLock);
- // fixme optimize vtable flushing? (only needed for vtable'd selectors)
flushCaches(cls);
- flushVtables(cls);
- // don't flush root class's metaclass twice (it's a subclass of the root)
- if (flush_meta && getSuperclass(cls)) {
- flushCaches(cls->isa);
- flushVtables(cls->isa);
- }
rwlock_unlock_write(&runtimeLock);
+
+ if (!cls) {
+ // collectALot if cls==nil
+ mutex_lock(&cacheUpdateLock);
+ cache_collect(true);
+ mutex_unlock(&cacheUpdateLock);
+ }
}
*
* Locking: write-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN const char *
+const char *
map_images(enum dyld_image_states state, uint32_t infoCount,
const struct dyld_image_info infoList[])
{
*
* Locking: write-locks runtimeLock and loadMethodLock
**********************************************************************/
-PRIVATE_EXTERN const char *
+const char *
load_images(enum dyld_image_states state, uint32_t infoCount,
const struct dyld_image_info infoList[])
{
recursive_mutex_unlock(&loadMethodLock);
- return NULL;
+ return nil;
}
*
* Locking: write-locks runtimeLock and loadMethodLock
**********************************************************************/
-PRIVATE_EXTERN void
+void
unmap_image(const struct mach_header *mh, intptr_t vmaddr_slide)
{
recursive_mutex_lock(&loadMethodLock);
+
+/***********************************************************************
+* readClass
+* Read a class and metaclass as written by a compiler.
+* Returns the new class pointer. This could be:
+* - cls
+* - nil (cls has a missing weak-linked superclass)
+* - something else (space for this class was reserved by a future class)
+*
+* Locking: runtimeLock acquired by map_images or objc_readClassPair
+**********************************************************************/
+static unsigned int PreoptTotalMethodLists;
+static unsigned int PreoptOptimizedMethodLists;
+static unsigned int PreoptTotalClasses;
+static unsigned int PreoptOptimizedClasses;
+
+Class readClass(Class cls, bool headerIsBundle, bool headerInSharedCache)
+{
+ const char *mangledName = cls->mangledName();
+
+ if (missingWeakSuperclass(cls)) {
+ // No superclass (probably weak-linked).
+ // Disavow any knowledge of this subclass.
+ if (PrintConnecting) {
+ _objc_inform("CLASS: IGNORING class '%s' with "
+ "missing weak-linked superclass",
+ cls->nameForLogging());
+ }
+ addRemappedClass(cls, nil);
+ cls->superclass = nil;
+ return nil;
+ }
+
+ // Note: Class __ARCLite__'s hack does not go through here.
+ // Class structure fixups that apply to it also need to be
+ // performed in non-lazy realization below.
+
+ // These fields should be set to zero because of the
+ // binding of _objc_empty_vtable, but OS X 10.8's dyld
+ // does not bind shared cache absolute symbols as expected.
+ // This (and the __ARCLite__ hack below) can be removed
+ // once the simulator drops 10.8 support.
+#if TARGET_IPHONE_SIMULATOR
+ if (cls->cache._mask) cls->cache._mask = 0;
+ if (cls->cache._occupied) cls->cache._occupied = 0;
+ if (cls->ISA()->cache._mask) cls->ISA()->cache._mask = 0;
+ if (cls->ISA()->cache._occupied) cls->ISA()->cache._occupied = 0;
+#endif
+
+ NXMapTable *future_named_class_map = futureNamedClasses();
+
+ if (NXCountMapTable(future_named_class_map) > 0) {
+ Class newCls = nil;
+ newCls = (Class)NXMapGet(future_named_class_map, mangledName);
+ removeFutureNamedClass(mangledName);
+
+ if (newCls) {
+ // Copy objc_class to future class's struct.
+ // Preserve future's rw data block.
+
+ if (newCls->isSwift()) {
+ _objc_fatal("Can't complete future class request for '%s' "
+ "because the real class is too big.",
+ cls->nameForLogging());
+ }
+
+ class_rw_t *rw = newCls->data();
+ const class_ro_t *old_ro = rw->ro;
+ memcpy(newCls, cls, sizeof(objc_class));
+ rw->ro = (class_ro_t *)newCls->data();
+ newCls->setData(rw);
+ _free_internal((void *)old_ro->name);
+ _free_internal((void *)old_ro);
+
+ addRemappedClass(cls, newCls);
+
+ cls = newCls;
+ }
+ }
+
+ PreoptTotalClasses++;
+ if (headerInSharedCache && isPreoptimized()) {
+ // class list built in shared cache
+ // fixme strict assert doesn't work because of duplicates
+ // assert(cls == getClass(name));
+ assert(getClass(mangledName));
+ PreoptOptimizedClasses++;
+ } else {
+ addNamedClass(cls, mangledName);
+ }
+
+ // for future reference: shared cache never contains MH_BUNDLEs
+ if (headerIsBundle) {
+ cls->data()->flags |= RO_FROM_BUNDLE;
+ cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
+ }
+
+ if (PrintPreopt) {
+ const method_list_t *mlist;
+ if ((mlist = ((class_ro_t *)cls->data())->baseMethods)) {
+ PreoptTotalMethodLists++;
+ if (isMethodListFixedUp(mlist)) PreoptOptimizedMethodLists++;
+ }
+ if ((mlist = ((class_ro_t *)cls->ISA()->data())->baseMethods)) {
+ PreoptTotalMethodLists++;
+ if (isMethodListFixedUp(mlist)) PreoptOptimizedMethodLists++;
+ }
+ }
+
+ return cls;
+}
+
+
/***********************************************************************
* _read_images
* Perform initial processing of the headers in the linked
*
* Locking: runtimeLock acquired by map_images
**********************************************************************/
-PRIVATE_EXTERN void _read_images(header_info **hList, uint32_t hCount)
+void _read_images(header_info **hList, uint32_t hCount)
{
header_info *hi;
uint32_t hIndex;
size_t count;
size_t i;
- class_t **resolvedFutureClasses = NULL;
+ Class *resolvedFutureClasses = nil;
size_t resolvedFutureClassCount = 0;
static BOOL doneOnce;
#define EACH_HEADER \
hIndex = 0; \
- crashlog_header_name(NULL) && hIndex < hCount && (hi = hList[hIndex]) && crashlog_header_name(hi); \
+ crashlog_header_name(nil) && hIndex < hCount && (hi = hList[hIndex]) && crashlog_header_name(hi); \
hIndex++
if (!doneOnce) {
doneOnce = YES;
- initVtables();
+
+#if SUPPORT_NONPOINTER_ISA
+
+# if TARGET_OS_MAC && !TARGET_OS_IPHONE
+ // Disable non-pointer isa if the app is too old.
+ if (AppSDKVersion < INSERT VERSION HERE) {
+ DisableIndexedIsa = true;
+ if (PrintRawIsa) {
+ _objc_inform("RAW ISA: disabling non-pointer isa because "
+ "the app is too old (SDK version %hu.%hhu.%hhu)",
+ (unsigned short)(AppSDKVersion>>16),
+ (unsigned char)(AppSDKVersion>>8),
+ (unsigned char)(AppSDKVersion));
+ }
+ }
+# endif
+
+ // Disable non-pointer isa for all GC apps.
+ if (UseGC) {
+ DisableIndexedIsa = true;
+ if (PrintRawIsa) {
+ _objc_inform("RAW ISA: disabling non-pointer isa because "
+ "the app is GC");
+ }
+ }
+
+#endif
+
+ if (DisableTaggedPointers) {
+ disableTaggedPointers();
+ }
// Count classes. Size various table based on the total.
- unsigned int total = 0;
+ int total = 0;
+ int unoptimizedTotal = 0;
for (EACH_HEADER) {
if (_getObjc2ClassList(hi, &count)) {
- total += (unsigned int)count;
+ total += (int)count;
+ if (!hi->inSharedCache) unoptimizedTotal += count;
}
}
if (PrintConnecting) {
- _objc_inform("CLASS: found %u classes during launch", total);
+ _objc_inform("CLASS: found %d classes during launch", total);
}
// namedClasses (NOT realizedClasses)
+ // Preoptimized classes don't go in this table.
// 4/3 is NXMapTable's load factor
+ int namedClassesSize =
+ (isPreoptimized() ? unoptimizedTotal : total) * 4 / 3;
gdb_objc_realized_classes =
- NXCreateMapTableFromZone(NXStrValueMapPrototype, total*4/3,
- _objc_internal_zone());
-
- // uninitializedClasses
- // 4/3 is NXMapTable's load factor
- uninitialized_class_map =
- NXCreateMapTableFromZone(NXPtrValueMapPrototype, total*4/3,
+ NXCreateMapTableFromZone(NXStrValueMapPrototype, namedClassesSize,
_objc_internal_zone());
// realizedClasses and realizedMetaclasses - less than the full total
realized_class_hash =
- NXCreateHashTableFromZone(NXPtrPrototype, total / 8, NULL,
+ NXCreateHashTableFromZone(NXPtrPrototype, total / 8, nil,
_objc_internal_zone());
realized_metaclass_hash =
- NXCreateHashTableFromZone(NXPtrPrototype, total / 8, NULL,
+ NXCreateHashTableFromZone(NXPtrPrototype, total / 8, nil,
_objc_internal_zone());
}
// Discover classes. Fix up unresolved future classes. Mark bundle classes.
- NXMapTable *future_class_map = futureClasses();
+
for (EACH_HEADER) {
- class_t **classlist = _getObjc2ClassList(hi, &count);
- for (i = 0; i < count; i++) {
- const char *name = getName(classlist[i]);
-
- if (missingWeakSuperclass(classlist[i])) {
- // No superclass (probably weak-linked).
- // Disavow any knowledge of this subclass.
- if (PrintConnecting) {
- _objc_inform("CLASS: IGNORING class '%s' with "
- "missing weak-linked superclass", name);
- }
- addRemappedClass(classlist[i], NULL);
- classlist[i]->superclass = NULL;
- classlist[i] = NULL;
- continue;
- }
+ bool headerIsBundle = (hi->mhdr->filetype == MH_BUNDLE);
+ bool headerInSharedCache = hi->inSharedCache;
- if (NXCountMapTable(future_class_map) > 0) {
- class_t *newCls = (class_t *)NXMapGet(future_class_map, name);
- if (newCls) {
- // Copy class_t to future class's struct.
- // Preserve future's rw data block.
- class_rw_t *rw = newCls->data();
- memcpy(newCls, classlist[i], sizeof(class_t));
- rw->ro = (class_ro_t *)newCls->data();
- newCls->setData(rw);
-
- removeFutureClass(name);
- addRemappedClass(classlist[i], newCls);
- classlist[i] = newCls;
- // Non-lazily realize the class below.
- resolvedFutureClasses = (class_t **)
- _realloc_internal(resolvedFutureClasses,
- (resolvedFutureClassCount+1)
- * sizeof(class_t *));
- resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
- }
- }
- addNamedClass(classlist[i], name);
- addUninitializedClass(classlist[i], classlist[i]->isa);
- if (hi->mhdr->filetype == MH_BUNDLE) {
- classlist[i]->data()->flags |= RO_FROM_BUNDLE;
- classlist[i]->isa->data()->flags |= RO_FROM_BUNDLE;
+ classref_t *classlist = _getObjc2ClassList(hi, &count);
+ for (i = 0; i < count; i++) {
+ Class cls = (Class)classlist[i];
+ Class newCls = readClass(cls, headerIsBundle, headerInSharedCache);
+
+ if (newCls != cls && newCls) {
+ // Class was moved but not deleted. Currently this occurs
+ // only when the new class resolved a future class.
+ // Non-lazily realize the class below.
+ resolvedFutureClasses = (Class *)
+ _realloc_internal(resolvedFutureClasses,
+ (resolvedFutureClassCount+1)
+ * sizeof(Class));
+ resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
}
}
}
+ if (PrintPreopt && PreoptTotalMethodLists) {
+ _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
+ PreoptOptimizedMethodLists, PreoptTotalMethodLists,
+ 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists);
+ }
+ if (PrintPreopt && PreoptTotalClasses) {
+ _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
+ PreoptOptimizedClasses, PreoptTotalClasses,
+ 100.0*PreoptOptimizedClasses/PreoptTotalClasses);
+ }
+
// Fix up remapped classes
- // classlist is up to date, but classrefs may not be
+ // Class list and nonlazy class list remain unremapped.
+ // Class refs and super refs are remapped for message dispatching.
if (!noClassesRemapped()) {
for (EACH_HEADER) {
- class_t **classrefs = _getObjc2ClassRefs(hi, &count);
+ Class *classrefs = _getObjc2ClassRefs(hi, &count);
for (i = 0; i < count; i++) {
remapClassRef(&classrefs[i]);
}
if (PrintPreopt) {
if (sel_preoptimizationValid(hi)) {
_objc_inform("PREOPTIMIZATION: honoring preoptimized selectors in %s",
- _nameForHeader(hi->mhdr));
+ hi->fname);
}
else if (_objcHeaderOptimizedByDyld(hi)) {
_objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors in %s",
- _nameForHeader(hi->mhdr));
+ hi->fname);
}
}
if (sel_preoptimizationValid(hi)) continue;
+ bool isBundle = hi->mhdr->filetype == MH_BUNDLE;
SEL *sels = _getObjc2SelectorRefs(hi, &count);
- BOOL isBundle = hi->mhdr->filetype == MH_BUNDLE;
for (i = 0; i < count; i++) {
- sels[i] = sel_registerNameNoLock((const char *)sels[i], isBundle);
+ const char *name = sel_cname(sels[i]);
+ sels[i] = sel_registerNameNoLock(name, isBundle);
}
}
sel_unlock();
+#if SUPPORT_FIXUP
+ // Fix up old objc_msgSend_fixup call sites
+ for (EACH_HEADER) {
+ message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
+ if (count == 0) continue;
+
+ if (PrintVtables) {
+ _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
+ "call sites in %s", count, hi->fname);
+ }
+ for (i = 0; i < count; i++) {
+ fixupMessageRef(refs+i);
+ }
+ }
+#endif
+
// Discover protocols. Fix up protocol refs.
- NXMapTable *protocol_map = protocols();
for (EACH_HEADER) {
- extern class_t OBJC_CLASS_$_Protocol;
+ extern objc_class OBJC_CLASS_$_Protocol;
Class cls = (Class)&OBJC_CLASS_$_Protocol;
assert(cls);
- protocol_t **protocols = _getObjc2ProtocolList(hi, &count);
- // fixme duplicate protocol from bundle
+ protocol_t **protolist = _getObjc2ProtocolList(hi, &count);
+ NXMapTable *protocol_map = protocols();
+ // fixme duplicate protocols from unloadable bundle
for (i = 0; i < count; i++) {
- if (!NXMapGet(protocol_map, protocols[i]->name)) {
- protocols[i]->isa = cls;
+ protocol_t *oldproto = (protocol_t *)
+ getProtocol(protolist[i]->mangledName);
+ if (!oldproto) {
+ size_t size = max(sizeof(protocol_t),
+ (size_t)protolist[i]->size);
+ protocol_t *newproto = (protocol_t *)_calloc_internal(size, 1);
+ memcpy(newproto, protolist[i], protolist[i]->size);
+ newproto->size = (typeof(newproto->size))size;
+
+ newproto->initIsa(cls); // fixme pinned
NXMapKeyCopyingInsert(protocol_map,
- protocols[i]->name, protocols[i]);
+ newproto->mangledName, newproto);
if (PrintProtocols) {
_objc_inform("PROTOCOLS: protocol at %p is %s",
- protocols[i], protocols[i]->name);
+ newproto, newproto->nameForLogging());
}
} else {
if (PrintProtocols) {
_objc_inform("PROTOCOLS: protocol at %p is %s (duplicate)",
- protocols[i], protocols[i]->name);
+ protolist[i], oldproto->nameForLogging());
}
}
}
}
for (EACH_HEADER) {
- protocol_t **protocols;
- protocols = _getObjc2ProtocolRefs(hi, &count);
+ protocol_t **protolist;
+ protolist = _getObjc2ProtocolRefs(hi, &count);
for (i = 0; i < count; i++) {
- remapProtocolRef(&protocols[i]);
+ remapProtocolRef(&protolist[i]);
}
}
// Realize non-lazy classes (for +load methods and static instances)
for (EACH_HEADER) {
- class_t **classlist =
+ classref_t *classlist =
_getObjc2NonlazyClassList(hi, &count);
for (i = 0; i < count; i++) {
- realizeClass(remapClass(classlist[i]));
+ Class cls = remapClass(classlist[i]);
+ if (!cls) continue;
+
+ // hack for class __ARCLite__, which didn't get this above
+#if TARGET_IPHONE_SIMULATOR
+ if (cls->cache._buckets == (void*)&_objc_empty_cache &&
+ (cls->cache._mask || cls->cache._occupied))
+ {
+ cls->cache._mask = 0;
+ cls->cache._occupied = 0;
+ }
+ if (cls->ISA()->cache._buckets == (void*)&_objc_empty_cache &&
+ (cls->ISA()->cache._mask || cls->ISA()->cache._occupied))
+ {
+ cls->ISA()->cache._mask = 0;
+ cls->ISA()->cache._occupied = 0;
+ }
+#endif
+
+ realizeClass(cls);
}
}
if (resolvedFutureClasses) {
for (i = 0; i < resolvedFutureClassCount; i++) {
realizeClass(resolvedFutureClasses[i]);
+ resolvedFutureClasses[i]->setRequiresRawIsa(false/*inherited*/);
}
_free_internal(resolvedFutureClasses);
}
_getObjc2CategoryList(hi, &count);
for (i = 0; i < count; i++) {
category_t *cat = catlist[i];
- // Do NOT use cat->cls! It may have been remapped.
- class_t *cls = remapClass(cat->cls);
+ Class cls = remapClass(cat->cls);
if (!cls) {
// Category's target class is missing (probably weak-linked).
// Disavow any knowledge of this category.
- catlist[i] = NULL;
+ catlist[i] = nil;
if (PrintConnecting) {
_objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
"missing weak-linked target class",
|| cat->instanceProperties)
{
addUnattachedCategoryForClass(cat, cls, hi);
- if (isRealized(cls)) {
+ if (cls->isRealized()) {
remethodizeClass(cls);
classExists = YES;
}
if (PrintConnecting) {
_objc_inform("CLASS: found category -%s(%s) %s",
- getName(cls), cat->name,
+ cls->nameForLogging(), cat->name,
classExists ? "on existing class" : "");
}
}
if (cat->classMethods || cat->protocols
/* || cat->classProperties */)
{
- addUnattachedCategoryForClass(cat, cls->isa, hi);
- if (isRealized(cls->isa)) {
- remethodizeClass(cls->isa);
+ addUnattachedCategoryForClass(cat, cls->ISA(), hi);
+ if (cls->ISA()->isRealized()) {
+ remethodizeClass(cls->ISA());
}
if (PrintConnecting) {
_objc_inform("CLASS: found category +%s(%s)",
- getName(cls), cat->name);
+ cls->nameForLogging(), cat->name);
}
}
}
**********************************************************************/
// Recursively schedule +load for cls and any un-+load-ed superclasses.
// cls must already be connected.
-static void schedule_class_load(class_t *cls)
+static void schedule_class_load(Class cls)
{
if (!cls) return;
- assert(isRealized(cls)); // _read_images should realize
+ assert(cls->isRealized()); // _read_images should realize
if (cls->data()->flags & RW_LOADED) return;
// Ensure superclass-first ordering
- schedule_class_load(getSuperclass(cls));
+ schedule_class_load(cls->superclass);
- add_class_to_loadable_list((Class)cls);
- changeInfo(cls, RW_LOADED, 0);
+ add_class_to_loadable_list(cls);
+ cls->setInfo(RW_LOADED);
}
-PRIVATE_EXTERN void prepare_load_methods(header_info *hi)
+void prepare_load_methods(header_info *hi)
{
size_t count, i;
rwlock_assert_writing(&runtimeLock);
- class_t **classlist =
+ classref_t *classlist =
_getObjc2NonlazyClassList(hi, &count);
for (i = 0; i < count; i++) {
schedule_class_load(remapClass(classlist[i]));
category_t **categorylist = _getObjc2NonlazyCategoryList(hi, &count);
for (i = 0; i < count; i++) {
category_t *cat = categorylist[i];
- // Do NOT use cat->cls! It may have been remapped.
- class_t *cls = remapClass(cat->cls);
+ Class cls = remapClass(cat->cls);
if (!cls) continue; // category for ignored weak-linked class
realizeClass(cls);
- assert(isRealized(cls->isa));
- add_category_to_loadable_list((Category)cat);
+ assert(cls->ISA()->isRealized());
+ add_category_to_loadable_list(cat);
}
}
* Only handles MH_BUNDLE for now.
* Locking: write-lock and loadMethodLock acquired by unmap_image
**********************************************************************/
-PRIVATE_EXTERN void _unload_image(header_info *hi)
+void _unload_image(header_info *hi)
{
size_t count, i;
for (i = 0; i < count; i++) {
category_t *cat = catlist[i];
if (!cat) continue; // category for ignored weak-linked class
- class_t *cls = remapClass(cat->cls);
+ Class cls = remapClass(cat->cls);
assert(cls); // shouldn't have live category for dead class
// fixme for MH_DYLIB cat's class may have been unloaded already
removeUnattachedCategoryForClass(cat, cls);
// +load queue
- remove_category_from_loadable_list((Category)cat);
+ remove_category_from_loadable_list(cat);
}
// Unload classes.
- class_t **classlist = _getObjc2ClassList(hi, &count);
+ classref_t *classlist = _getObjc2ClassList(hi, &count);
+
+ // First detach classes from each other. Then free each class.
+ // This avoid bugs where this loop unloads a subclass before its superclass
+
+ for (i = 0; i < count; i++) {
+ Class cls = remapClass(classlist[i]);
+ if (cls) {
+ remove_class_from_loadable_list(cls);
+ detach_class(cls->ISA(), YES);
+ detach_class(cls, NO);
+ }
+ }
+
for (i = 0; i < count; i++) {
- class_t *cls = classlist[i];
- // fixme remapped classes?
- // fixme ignored weak-linked classes
+ Class cls = remapClass(classlist[i]);
if (cls) {
- remove_class_from_loadable_list((Class)cls);
- unload_class(cls->isa, YES);
- unload_class(cls, NO);
+ free_class(cls->ISA());
+ free_class(cls);
}
}
struct objc_method_description *
method_getDescription(Method m)
{
- if (!m) return NULL;
- return (struct objc_method_description *)newmethod(m);
+ if (!m) return nil;
+ return (struct objc_method_description *)m;
}
static IMP
_method_getImplementation(method_t *m)
{
- if (!m) return NULL;
+ if (!m) return nil;
return m->imp;
}
IMP
method_getImplementation(Method m)
{
- return _method_getImplementation(newmethod(m));
+ return _method_getImplementation(m);
}
/***********************************************************************
* method_getName
* Returns this method's selector.
-* The method must not be NULL.
+* The method must not be nil.
* The method must already have been fixed-up.
* Locking: none
**********************************************************************/
SEL
-method_getName(Method m_gen)
+method_getName(Method m)
{
- method_t *m = newmethod(m_gen);
- if (!m) return NULL;
+ if (!m) return nil;
- assert((SEL)m->name == sel_registerName((char *)m->name));
- return (SEL)m->name;
+ assert(m->name == sel_registerName(sel_getName(m->name)));
+ return m->name;
}
/***********************************************************************
* method_getTypeEncoding
* Returns this method's old-style type encoding string.
-* The method must not be NULL.
+* The method must not be nil.
* Locking: none
**********************************************************************/
const char *
method_getTypeEncoding(Method m)
{
- if (!m) return NULL;
- return newmethod(m)->types;
+ if (!m) return nil;
+ return m->types;
}
* The previous implementation is returned.
**********************************************************************/
static IMP
-_method_setImplementation(class_t *cls, method_t *m, IMP imp)
+_method_setImplementation(Class cls, method_t *m, IMP imp)
{
rwlock_assert_writing(&runtimeLock);
- if (!m) return NULL;
- if (!imp) return NULL;
+ if (!m) return nil;
+ if (!imp) return nil;
if (ignoreSelector(m->name)) {
// Ignored methods stay ignored
IMP old = _method_getImplementation(m);
m->imp = imp;
- // No cache flushing needed - cache contains Methods not IMPs.
-
- if (vtable_containsSelector(newmethod(m)->name)) {
- // Will be slow if cls is NULL (i.e. unknown)
- // fixme build list of classes whose Methods are known externally?
- flushVtables(cls);
- }
+ // Class-side cache updates are slow if cls is nil (i.e. unknown)
+ // RR/AWZ updates are slow if cls is nil (i.e. unknown)
+ // fixme build list of classes whose Methods are known externally?
- // fixme catch NSObject changing to custom RR
- // cls->setCustomRR();
+ // Scrub the old IMP from the cache.
+ // Can't simply overwrite the new IMP because the cached value could be
+ // the same IMP from a different Method.
+ flushImps(cls, m->name, old, nil, nil);
- // fixme update monomorphism if necessary
+ // Catch changes to retain/release and allocWithZone implementations
+ updateCustomRR_AWZ(cls, m);
return old;
}
IMP
method_setImplementation(Method m, IMP imp)
{
- // Don't know the class - will be slow if vtables are affected
+ // Don't know the class - will be slow if RR/AWZ are affected
// fixme build list of classes whose Methods are known externally?
IMP result;
rwlock_write(&runtimeLock);
- result = _method_setImplementation(Nil, newmethod(m), imp);
+ result = _method_setImplementation(Nil, m, imp);
rwlock_unlock_write(&runtimeLock);
return result;
}
-void method_exchangeImplementations(Method m1_gen, Method m2_gen)
+void method_exchangeImplementations(Method m1, Method m2)
{
- method_t *m1 = newmethod(m1_gen);
- method_t *m2 = newmethod(m2_gen);
if (!m1 || !m2) return;
rwlock_write(&runtimeLock);
m1->imp = m2->imp;
m2->imp = m1_imp;
- if (vtable_containsSelector(m1->name) ||
- vtable_containsSelector(m2->name))
- {
- // Don't know the class - will be slow if vtables are affected
- // fixme build list of classes whose Methods are known externally?
- flushVtables(NULL);
- }
- // fixme catch NSObject changing to custom RR
- // cls->setCustomRR();
+ // RR/AWZ updates are slow because class is unknown
+ // Class-side cache updates are slow because class is unknown
+ // fixme build list of classes whose Methods are known externally?
+
+ // Scrub the old IMPs from the caches.
+ // Can't simply overwrite the new IMP because the cached value could be
+ // the same IMP from a different Method.
+ flushImps(nil, m1->name,m2->imp, m2->name,m1->imp);
- // fixme update monomorphism if necessary
+ updateCustomRR_AWZ(nil, m1);
+ updateCustomRR_AWZ(nil, m2);
rwlock_unlock_write(&runtimeLock);
}
ivar_getOffset(Ivar ivar)
{
if (!ivar) return 0;
- return *newivar(ivar)->offset;
+ return *ivar->offset;
}
const char *
ivar_getName(Ivar ivar)
{
- if (!ivar) return NULL;
- return newivar(ivar)->name;
+ if (!ivar) return nil;
+ return ivar->name;
}
const char *
ivar_getTypeEncoding(Ivar ivar)
{
- if (!ivar) return NULL;
- return newivar(ivar)->type;
+ if (!ivar) return nil;
+ return ivar->type;
+}
+
+
+
+const char *property_getName(objc_property_t prop)
+{
+ return prop->name;
+}
+
+const char *property_getAttributes(objc_property_t prop)
+{
+ return prop->attributes;
+}
+
+objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
+ unsigned int *outCount)
+{
+ if (!prop) {
+ if (outCount) *outCount = 0;
+ return nil;
+ }
+
+ objc_property_attribute_t *result;
+ rwlock_read(&runtimeLock);
+ result = copyPropertyAttributeList(prop->attributes,outCount);
+ rwlock_unlock_read(&runtimeLock);
+ return result;
+}
+
+char * property_copyAttributeValue(objc_property_t prop, const char *name)
+{
+ if (!prop || !name || *name == '\0') return nil;
+
+ char *result;
+ rwlock_read(&runtimeLock);
+ result = copyPropertyAttributeValue(prop->attributes, name);
+ rwlock_unlock_read(&runtimeLock);
+ return result;
+}
+
+
+/***********************************************************************
+* getExtendedTypesIndexesForMethod
+* Returns:
+* a is the count of methods in all method lists before m's method list
+* b is the index of m in m's method list
+* a+b is the index of m's extended types in the extended types array
+**********************************************************************/
+static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
+{
+ a = 0;
+
+ if (isRequiredMethod && isInstanceMethod) {
+ b = method_list_index(proto->instanceMethods, m);
+ return;
+ }
+ a += method_list_count(proto->instanceMethods);
+
+ if (isRequiredMethod && !isInstanceMethod) {
+ b = method_list_index(proto->classMethods, m);
+ return;
+ }
+ a += method_list_count(proto->classMethods);
+
+ if (!isRequiredMethod && isInstanceMethod) {
+ b = method_list_index(proto->optionalInstanceMethods, m);
+ return;
+ }
+ a += method_list_count(proto->optionalInstanceMethods);
+
+ if (!isRequiredMethod && !isInstanceMethod) {
+ b = method_list_index(proto->optionalClassMethods, m);
+ return;
+ }
+ a += method_list_count(proto->optionalClassMethods);
+}
+
+
+/***********************************************************************
+* getExtendedTypesIndexForMethod
+* Returns the index of m's extended types in proto's extended types array.
+**********************************************************************/
+static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
+{
+ uint32_t a;
+ uint32_t b;
+ getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
+ isInstanceMethod, a, b);
+ return a + b;
+}
+
+
+/***********************************************************************
+* fixupProtocolMethodList
+* Fixes up a single method list in a protocol.
+**********************************************************************/
+static void
+fixupProtocolMethodList(protocol_t *proto, method_list_t **mlistp,
+ bool required, bool instance)
+{
+ rwlock_assert_writing(&runtimeLock);
+
+ if (!*mlistp) return;
+ if (isMethodListFixedUp(*mlistp)) return;
+
+ bool hasExtendedMethodTypes = proto->hasExtendedMethodTypes();
+ *mlistp = fixupMethodList(*mlistp, true/*always copy for simplicity*/,
+ !hasExtendedMethodTypes/*sort if no ext*/);
+
+ method_list_t *mlist = *mlistp;
+
+ if (hasExtendedMethodTypes) {
+ // Sort method list and extended method types together.
+ // fixupMethodList() can't do this.
+ // fixme COW stomp
+ uint32_t count = method_list_count(mlist);
+ uint32_t prefix;
+ uint32_t junk;
+ getExtendedTypesIndexesForMethod(proto, method_list_nth(mlist, 0),
+ required, instance, prefix, junk);
+ const char **types = proto->extendedMethodTypes;
+ for (uint32_t i = 0; i < count; i++) {
+ for (uint32_t j = i+1; j < count; j++) {
+ method_t *mi = method_list_nth(mlist, i);
+ method_t *mj = method_list_nth(mlist, j);
+ if (mi->name > mj->name) {
+ method_list_swap(mlist, i, j);
+ std::swap(types[prefix+i], types[prefix+j]);
+ }
+ }
+ }
+ }
}
-
-const char *property_getName(objc_property_t prop)
-{
- return newproperty(prop)->name;
-}
-
-const char *property_getAttributes(objc_property_t prop)
+/***********************************************************************
+* fixupProtocol
+* Fixes up all of a protocol's method lists.
+**********************************************************************/
+static void
+fixupProtocol(protocol_t *proto)
{
- return newproperty(prop)->attributes;
-}
+ rwlock_assert_writing(&runtimeLock);
-objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
- unsigned int *outCount)
-{
- if (!prop) {
- if (outCount) *outCount = 0;
- return NULL;
+ if (proto->protocols) {
+ for (uintptr_t i = 0; i < proto->protocols->count; i++) {
+ protocol_t *sub = remapProtocol(proto->protocols->list[i]);
+ if (!sub->isFixedUp()) fixupProtocol(sub);
+ }
}
- objc_property_attribute_t *result;
- rwlock_read(&runtimeLock);
- result = copyPropertyAttributeList(newproperty(prop)->attributes,outCount);
- rwlock_unlock_read(&runtimeLock);
- return result;
-}
+ fixupProtocolMethodList(proto, &proto->instanceMethods, YES, YES);
+ fixupProtocolMethodList(proto, &proto->classMethods, YES, NO);
+ fixupProtocolMethodList(proto, &proto->optionalInstanceMethods, NO, YES);
+ fixupProtocolMethodList(proto, &proto->optionalClassMethods, NO, NO);
-char * property_copyAttributeValue(objc_property_t prop, const char *name)
-{
- if (!prop || !name || *name == '\0') return NULL;
-
- char *result;
- rwlock_read(&runtimeLock);
- result = copyPropertyAttributeValue(newproperty(prop)->attributes, name);
- rwlock_unlock_read(&runtimeLock);
- return result;
+ // fixme memory barrier so we can check this with no lock
+ proto->flags |= PROTOCOL_FIXED_UP;
}
/***********************************************************************
-* _protocol_getMethod_nolock
-* Locking: runtimeLock must be write-locked by the caller
+* fixupProtocolIfNeeded
+* Fixes up all of a protocol's method lists if they aren't fixed up already.
+* Locking: write-locks runtimeLock.
**********************************************************************/
-static Method
-_protocol_getMethod_nolock(protocol_t *proto, SEL sel,
- BOOL isRequiredMethod, BOOL isInstanceMethod)
+static void
+fixupProtocolIfNeeded(protocol_t *proto)
{
- rwlock_assert_writing(&runtimeLock);
+ rwlock_assert_unlocked(&runtimeLock);
+ assert(proto);
- uint32_t i;
- if (!proto || !sel) return NULL;
+ if (!proto->isFixedUp()) {
+ rwlock_write(&runtimeLock);
+ fixupProtocol(proto);
+ rwlock_unlock_write(&runtimeLock);
+ }
+}
- method_list_t **mlistp = NULL;
- if (isRequiredMethod) {
- if (isInstanceMethod) {
+static method_list_t *
+getProtocolMethodList(protocol_t *proto, bool required, bool instance)
+{
+ method_list_t **mlistp = nil;
+ if (required) {
+ if (instance) {
mlistp = &proto->instanceMethods;
} else {
mlistp = &proto->classMethods;
}
} else {
- if (isInstanceMethod) {
+ if (instance) {
mlistp = &proto->optionalInstanceMethods;
} else {
mlistp = &proto->optionalClassMethods;
}
}
- if (*mlistp) {
- method_list_t *mlist = *mlistp;
- if (!isMethodListFixedUp(mlist)) {
- mlist = fixupMethodList(mlist, YES/*always copy for simplicity*/);
- *mlistp = mlist;
- }
- for (i = 0; i < mlist->count; i++) {
- method_t *m = method_list_nth(mlist, i);
- if (sel == m->name) return (Method)m;
- }
+ return *mlistp;
+}
+
+
+/***********************************************************************
+* protocol_getMethod_nolock
+* Locking: runtimeLock must be held by the caller
+**********************************************************************/
+static method_t *
+protocol_getMethod_nolock(protocol_t *proto, SEL sel,
+ bool isRequiredMethod, bool isInstanceMethod,
+ bool recursive)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ if (!proto || !sel) return nil;
+
+ assert(proto->isFixedUp());
+
+ method_list_t *mlist =
+ getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
+ if (mlist) {
+ method_t *m = search_method_list(mlist, sel);
+ if (m) return m;
}
- if (proto->protocols) {
- Method m;
- for (i = 0; i < proto->protocols->count; i++) {
+ if (recursive && proto->protocols) {
+ method_t *m;
+ for (uint32_t i = 0; i < proto->protocols->count; i++) {
protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
- m = _protocol_getMethod_nolock(realProto, sel,
- isRequiredMethod, isInstanceMethod);
+ m = protocol_getMethod_nolock(realProto, sel,
+ isRequiredMethod, isInstanceMethod,
+ true);
if (m) return m;
}
}
- return NULL;
+ return nil;
}
/***********************************************************************
-* _protocol_getMethod
+* protocol_getMethod
* fixme
-* Locking: write-locks runtimeLock
+* Locking: acquires runtimeLock
**********************************************************************/
-PRIVATE_EXTERN Method
-_protocol_getMethod(Protocol *p, SEL sel, BOOL isRequiredMethod, BOOL isInstanceMethod)
+Method
+protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
{
- rwlock_write(&runtimeLock);
- Method result = _protocol_getMethod_nolock(newprotocol(p), sel,
- isRequiredMethod,
- isInstanceMethod);
- rwlock_unlock_write(&runtimeLock);
+ if (!proto) return nil;
+ fixupProtocolIfNeeded(proto);
+
+ rwlock_read(&runtimeLock);
+ method_t *result = protocol_getMethod_nolock(proto, sel,
+ isRequiredMethod,
+ isInstanceMethod,
+ recursive);
+ rwlock_unlock_read(&runtimeLock);
return result;
}
+/***********************************************************************
+* protocol_getMethodTypeEncoding_nolock
+* Return the @encode string for the requested protocol method.
+* Returns nil if the compiler did not emit any extended @encode data.
+* Locking: runtimeLock must be held for writing by the caller
+**********************************************************************/
+const char *
+protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
+ bool isRequiredMethod,
+ bool isInstanceMethod)
+{
+ rwlock_assert_locked(&runtimeLock);
+
+ if (!proto) return nil;
+ if (!proto->hasExtendedMethodTypes()) return nil;
+
+ assert(proto->isFixedUp());
+
+ method_t *m =
+ protocol_getMethod_nolock(proto, sel,
+ isRequiredMethod, isInstanceMethod, false);
+ if (m) {
+ uint32_t i = getExtendedTypesIndexForMethod(proto, m,
+ isRequiredMethod,
+ isInstanceMethod);
+ return proto->extendedMethodTypes[i];
+ }
+
+ // No method with that name. Search incorporated protocols.
+ if (proto->protocols) {
+ for (uintptr_t i = 0; i < proto->protocols->count; i++) {
+ const char *enc =
+ protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
+ if (enc) return enc;
+ }
+ }
+
+ return nil;
+}
+
+/***********************************************************************
+* _protocol_getMethodTypeEncoding
+* Return the @encode string for the requested protocol method.
+* Returns nil if the compiler did not emit any extended @encode data.
+* Locking: acquires runtimeLock
+**********************************************************************/
+const char *
+_protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
+ BOOL isRequiredMethod, BOOL isInstanceMethod)
+{
+ protocol_t *proto = newprotocol(proto_gen);
+
+ if (!proto) return nil;
+ fixupProtocolIfNeeded(proto);
+
+ const char *enc;
+ rwlock_read(&runtimeLock);
+ enc = protocol_getMethodTypeEncoding_nolock(proto, sel,
+ isRequiredMethod,
+ isInstanceMethod);
+ rwlock_unlock_read(&runtimeLock);
+ return enc;
+}
+
+
+/***********************************************************************
+* protocol_t::demangledName
+* Returns the (Swift-demangled) name of the given protocol.
+* Locking: none
+**********************************************************************/
+const char *
+protocol_t::demangledName()
+{
+ assert(size >= offsetof(protocol_t, _demangledName)+sizeof(_demangledName));
+
+ if (! _demangledName) {
+ char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
+ if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
+ (void**)&_demangledName))
+ {
+ if (de) free(de);
+ }
+ }
+ return _demangledName;
+}
+
/***********************************************************************
* protocol_getName
-* Returns the name of the given protocol.
+* Returns the (Swift-demangled) name of the given protocol.
* Locking: runtimeLock must not be held by the caller
**********************************************************************/
const char *
protocol_getName(Protocol *proto)
{
- return newprotocol(proto)->name;
+ if (!proto) return "nil";
+ else return newprotocol(proto)->demangledName();
}
BOOL isRequiredMethod, BOOL isInstanceMethod)
{
Method m =
- _protocol_getMethod(p, aSel, isRequiredMethod, isInstanceMethod);
+ protocol_getMethod(newprotocol(p), aSel,
+ isRequiredMethod, isInstanceMethod, true);
if (m) return *method_getDescription(m);
- else return (struct objc_method_description){NULL, NULL};
+ else return (struct objc_method_description){nil, nil};
}
/***********************************************************************
-* _protocol_conformsToProtocol_nolock
+* protocol_conformsToProtocol_nolock
* Returns YES if self conforms to other.
* Locking: runtimeLock must be held by the caller.
**********************************************************************/
-static BOOL _protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
+static bool
+protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
{
+ rwlock_assert_locked(&runtimeLock);
+
if (!self || !other) {
return NO;
}
- if (0 == strcmp(self->name, other->name)) {
+ // protocols need not be fixed up
+
+ if (0 == strcmp(self->mangledName, other->mangledName)) {
return YES;
}
uintptr_t i;
for (i = 0; i < self->protocols->count; i++) {
protocol_t *proto = remapProtocol(self->protocols->list[i]);
- if (0 == strcmp(other->name, proto->name)) {
+ if (0 == strcmp(other->mangledName, proto->mangledName)) {
return YES;
}
- if (_protocol_conformsToProtocol_nolock(proto, other)) {
+ if (protocol_conformsToProtocol_nolock(proto, other)) {
return YES;
}
}
{
BOOL result;
rwlock_read(&runtimeLock);
- result = _protocol_conformsToProtocol_nolock(newprotocol(self),
- newprotocol(other));
+ result = protocol_conformsToProtocol_nolock(newprotocol(self),
+ newprotocol(other));
rwlock_unlock_read(&runtimeLock);
return result;
}
unsigned int *outCount)
{
protocol_t *proto = newprotocol(p);
- struct objc_method_description *result = NULL;
+ struct objc_method_description *result = nil;
unsigned int count = 0;
if (!proto) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
- rwlock_read(&runtimeLock);
+ fixupProtocolIfNeeded(proto);
- method_list_t *mlist = NULL;
+ rwlock_read(&runtimeLock);
- if (isRequiredMethod) {
- if (isInstanceMethod) {
- mlist = proto->instanceMethods;
- } else {
- mlist = proto->classMethods;
- }
- } else {
- if (isInstanceMethod) {
- mlist = proto->optionalInstanceMethods;
- } else {
- mlist = proto->optionalClassMethods;
- }
- }
+ method_list_t *mlist =
+ getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
if (mlist) {
unsigned int i;
calloc(count + 1, sizeof(struct objc_method_description));
for (i = 0; i < count; i++) {
method_t *m = method_list_nth(mlist, i);
- result[i].name = sel_registerName((const char *)m->name);
+ result[i].name = m->name;
result[i].types = (char *)m->types;
}
}
/***********************************************************************
* protocol_getProperty
* fixme
-* Locking: acquires runtimeLock
+* Locking: runtimeLock must be held by the caller
**********************************************************************/
static property_t *
-_protocol_getProperty_nolock(protocol_t *proto, const char *name,
- BOOL isRequiredProperty, BOOL isInstanceProperty)
+protocol_getProperty_nolock(protocol_t *proto, const char *name,
+ bool isRequiredProperty, bool isInstanceProperty)
{
+ rwlock_assert_locked(&runtimeLock);
+
if (!isRequiredProperty || !isInstanceProperty) {
// Only required instance properties are currently supported
- return NULL;
+ return nil;
}
property_list_t *plist;
for (i = 0; i < proto->protocols->count; i++) {
protocol_t *p = remapProtocol(proto->protocols->list[i]);
property_t *prop =
- _protocol_getProperty_nolock(p, name,
- isRequiredProperty,
- isInstanceProperty);
+ protocol_getProperty_nolock(p, name,
+ isRequiredProperty,
+ isInstanceProperty);
if (prop) return prop;
}
}
- return NULL;
+ return nil;
}
objc_property_t protocol_getProperty(Protocol *p, const char *name,
{
property_t *result;
- if (!p || !name) return NULL;
+ if (!p || !name) return nil;
rwlock_read(&runtimeLock);
- result = _protocol_getProperty_nolock(newprotocol(p), name,
- isRequiredProperty,
- isInstanceProperty);
+ result = protocol_getProperty_nolock(newprotocol(p), name,
+ isRequiredProperty,
+ isInstanceProperty);
rwlock_unlock_read(&runtimeLock);
return (objc_property_t)result;
static property_t **
copyPropertyList(property_list_t *plist, unsigned int *outCount)
{
- property_t **result = NULL;
+ property_t **result = nil;
unsigned int count = 0;
if (plist) {
for (i = 0; i < count; i++) {
result[i] = property_list_nth(plist, i);
}
- result[i] = NULL;
+ result[i] = nil;
}
if (outCount) *outCount = count;
objc_property_t *protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
{
- property_t **result = NULL;
+ property_t **result = nil;
if (!proto) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
{
unsigned int count = 0;
- Protocol **result = NULL;
+ Protocol **result = nil;
protocol_t *proto = newprotocol(p);
if (!proto) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
for (i = 0; i < count; i++) {
result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
}
- result[i] = NULL;
+ result[i] = nil;
}
rwlock_unlock_read(&runtimeLock);
* objc_allocateProtocol
* Creates a new protocol. The protocol may not be used until
* objc_registerProtocol() is called.
-* Returns NULL if a protocol with the same name already exists.
+* Returns nil if a protocol with the same name already exists.
* Locking: acquires runtimeLock
**********************************************************************/
Protocol *
{
rwlock_write(&runtimeLock);
- if (NXMapGet(protocols(), name)) {
+ if (getProtocol(name)) {
rwlock_unlock_write(&runtimeLock);
- return NULL;
+ return nil;
}
protocol_t *result = (protocol_t *)_calloc_internal(sizeof(protocol_t), 1);
- extern class_t OBJC_CLASS_$___IncompleteProtocol;
+ extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
- result->isa = cls;
- result->name = _strdup_internal(name);
+ result->initProtocolIsa(cls);
+ result->size = sizeof(protocol_t);
+ // fixme mangle the name if it looks swift-y?
+ result->mangledName = _strdup_internal(name);
// fixme reserve name without installing
rwlock_write(&runtimeLock);
- extern class_t OBJC_CLASS_$___IncompleteProtocol;
+ extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
- extern class_t OBJC_CLASS_$_Protocol;
+ extern objc_class OBJC_CLASS_$_Protocol;
Class cls = (Class)&OBJC_CLASS_$_Protocol;
- if (proto->isa == cls) {
+ if (proto->ISA() == cls) {
_objc_inform("objc_registerProtocol: protocol '%s' was already "
- "registered!", proto->name);
+ "registered!", proto->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
- if (proto->isa != oldcls) {
+ if (proto->ISA() != oldcls) {
_objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
- "with objc_allocateProtocol!", proto->name);
+ "with objc_allocateProtocol!", proto->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
- proto->isa = cls;
+ proto->initProtocolIsa(cls);
- NXMapKeyCopyingInsert(protocols(), proto->name, proto);
+ NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
rwlock_unlock_write(&runtimeLock);
}
protocol_t *proto = newprotocol(proto_gen);
protocol_t *addition = newprotocol(addition_gen);
- extern class_t OBJC_CLASS_$___IncompleteProtocol;
+ extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
if (!proto_gen) return;
rwlock_write(&runtimeLock);
- if (proto->isa != cls) {
+ if (proto->ISA() != cls) {
_objc_inform("protocol_addProtocol: modified protocol '%s' is not "
- "under construction!", proto->name);
+ "under construction!", proto->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
- if (addition->isa == cls) {
+ if (addition->ISA() == cls) {
_objc_inform("protocol_addProtocol: added protocol '%s' is still "
- "under construction!", addition->name);
+ "under construction!", addition->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
* Locking: acquires runtimeLock
**********************************************************************/
static void
-_protocol_addMethod(method_list_t **list, SEL name, const char *types)
+protocol_addMethod_nolock(method_list_t **list, SEL name, const char *types)
{
if (!*list) {
*list = (method_list_t *)
method_t *meth = method_list_nth(*list, (*list)->count++);
meth->name = name;
meth->types = _strdup_internal(types ? types : "");
- meth->imp = NULL;
+ meth->imp = nil;
}
void
{
protocol_t *proto = newprotocol(proto_gen);
- extern class_t OBJC_CLASS_$___IncompleteProtocol;
+ extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
if (!proto_gen) return;
rwlock_write(&runtimeLock);
- if (proto->isa != cls) {
+ if (proto->ISA() != cls) {
_objc_inform("protocol_addMethodDescription: protocol '%s' is not "
- "under construction!", proto->name);
+ "under construction!", proto->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
if (isRequiredMethod && isInstanceMethod) {
- _protocol_addMethod(&proto->instanceMethods, name, types);
+ protocol_addMethod_nolock(&proto->instanceMethods, name, types);
} else if (isRequiredMethod && !isInstanceMethod) {
- _protocol_addMethod(&proto->classMethods, name, types);
+ protocol_addMethod_nolock(&proto->classMethods, name, types);
} else if (!isRequiredMethod && isInstanceMethod) {
- _protocol_addMethod(&proto->optionalInstanceMethods, name, types);
+ protocol_addMethod_nolock(&proto->optionalInstanceMethods, name,types);
} else /* !isRequiredMethod && !isInstanceMethod) */ {
- _protocol_addMethod(&proto->optionalClassMethods, name, types);
+ protocol_addMethod_nolock(&proto->optionalClassMethods, name, types);
}
rwlock_unlock_write(&runtimeLock);
* Locking: acquires runtimeLock
**********************************************************************/
static void
-_protocol_addProperty(property_list_t **plist, const char *name,
- const objc_property_attribute_t *attrs,
- unsigned int count)
+protocol_addProperty_nolock(property_list_t **plist, const char *name,
+ const objc_property_attribute_t *attrs,
+ unsigned int count)
{
if (!*plist) {
*plist = (property_list_t *)
{
protocol_t *proto = newprotocol(proto_gen);
- extern class_t OBJC_CLASS_$___IncompleteProtocol;
+ extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
if (!proto) return;
rwlock_write(&runtimeLock);
- if (proto->isa != cls) {
+ if (proto->ISA() != cls) {
_objc_inform("protocol_addProperty: protocol '%s' is not "
- "under construction!", proto->name);
+ "under construction!", proto->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
if (isRequiredProperty && isInstanceProperty) {
- _protocol_addProperty(&proto->instanceProperties, name, attrs, count);
+ protocol_addProperty_nolock(&proto->instanceProperties, name, attrs, count);
}
//else if (isRequiredProperty && !isInstanceProperty) {
- // _protocol_addProperty(&proto->classProperties, name, attrs, count);
+ // protocol_addProperty_nolock(&proto->classProperties, name, attrs, count);
//} else if (!isRequiredProperty && isInstanceProperty) {
- // _protocol_addProperty(&proto->optionalInstanceProperties, name, attrs, count);
+ // protocol_addProperty_nolock(&proto->optionalInstanceProperties, name, attrs, count);
//} else /* !isRequiredProperty && !isInstanceProperty) */ {
- // _protocol_addProperty(&proto->optionalClassProperties, name, attrs, count);
+ // protocol_addProperty_nolock(&proto->optionalClassProperties, name, attrs, count);
//}
rwlock_unlock_write(&runtimeLock);
realizeAllClasses();
int count;
- class_t *cls;
+ Class cls;
NXHashState state;
NXHashTable *classes = realizedClasses();
int allCount = NXCountHashTable(classes);
while (count < bufferLen &&
NXNextHashState(classes, &state, (void **)&cls))
{
- buffer[count++] = (Class)cls;
+ buffer[count++] = cls;
}
rwlock_unlock_write(&runtimeLock);
* Returns pointers to all classes.
* This requires all classes be realized, which is regretfully non-lazy.
*
-* outCount may be NULL. *outCount is the number of classes returned.
-* If the returned array is not NULL, it is NULL-terminated and must be
+* outCount may be nil. *outCount is the number of classes returned.
+* If the returned array is not nil, it is nil-terminated and must be
* freed with free().
* Locking: write-locks runtimeLock
**********************************************************************/
realizeAllClasses();
- Class *result = NULL;
+ Class *result = nil;
NXHashTable *classes = realizedClasses();
unsigned int count = NXCountHashTable(classes);
if (count > 0) {
- class_t *cls;
+ Class cls;
NXHashState state = NXInitHashState(classes);
result = (Class *)malloc((1+count) * sizeof(Class));
count = 0;
while (NXNextHashState(classes, &state, (void **)&cls)) {
- result[count++] = (Class)cls;
+ result[count++] = cls;
}
- result[count] = NULL;
+ result[count] = nil;
}
rwlock_unlock_write(&runtimeLock);
if (count == 0) {
rwlock_unlock_read(&runtimeLock);
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
result = (Protocol **)calloc(1 + count, sizeof(Protocol *));
result[i++] = proto;
}
- result[i++] = NULL;
+ result[i++] = nil;
assert(i == count+1);
rwlock_unlock_read(&runtimeLock);
/***********************************************************************
* objc_getProtocol
-* Get a protocol by name, or return NULL
+* Get a protocol by name, or return nil
* Locking: read-locks runtimeLock
**********************************************************************/
Protocol *objc_getProtocol(const char *name)
{
rwlock_read(&runtimeLock);
- Protocol *result = (Protocol *)NXMapGet(protocols(), name);
+ Protocol *result = getProtocol(name);
rwlock_unlock_read(&runtimeLock);
return result;
}
* Locking: read-locks runtimeLock
**********************************************************************/
Method *
-class_copyMethodList(Class cls_gen, unsigned int *outCount)
+class_copyMethodList(Class cls, unsigned int *outCount)
{
- class_t *cls = newcls(cls_gen);
unsigned int count = 0;
- Method *result = NULL;
+ Method *result = nil;
if (!cls) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
FOREACH_METHOD_LIST(mlist, cls, {
count += mlist->count;
FOREACH_METHOD_LIST(mlist, cls, {
unsigned int i;
for (i = 0; i < mlist->count; i++) {
- Method aMethod = (Method)method_list_nth(mlist, i);
+ method_t *aMethod = method_list_nth(mlist, i);
if (ignoreSelector(method_getName(aMethod))) {
count--;
continue;
result[m++] = aMethod;
}
});
- result[m] = NULL;
+ result[m] = nil;
}
rwlock_unlock_read(&runtimeLock);
* Locking: read-locks runtimeLock
**********************************************************************/
Ivar *
-class_copyIvarList(Class cls_gen, unsigned int *outCount)
+class_copyIvarList(Class cls, unsigned int *outCount)
{
- class_t *cls = newcls(cls_gen);
const ivar_list_t *ivars;
- Ivar *result = NULL;
+ Ivar *result = nil;
unsigned int count = 0;
unsigned int i;
if (!cls) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
if ((ivars = cls->data()->ro->ivars) && ivars->count) {
result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
for (i = 0; i < ivars->count; i++) {
ivar_t *ivar = ivar_list_nth(ivars, i);
if (!ivar->offset) continue; // anonymous bitfield
- result[count++] = (Ivar)ivar;
+ result[count++] = ivar;
}
- result[count] = NULL;
+ result[count] = nil;
}
rwlock_unlock_read(&runtimeLock);
/***********************************************************************
* class_copyPropertyList. Returns a heap block containing the
-* properties declared in the class, or NULL if the class
+* properties declared in the class, or nil if the class
* declares no properties. Caller must free the block.
* Does not copy any superclass's properties.
* Locking: read-locks runtimeLock
**********************************************************************/
objc_property_t *
-class_copyPropertyList(Class cls_gen, unsigned int *outCount)
+class_copyPropertyList(Class cls, unsigned int *outCount)
{
- class_t *cls = newcls(cls_gen);
chained_property_list *plist;
unsigned int count = 0;
- property_t **result = NULL;
+ property_t **result = nil;
if (!cls) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
for (plist = cls->data()->properties; plist; plist = plist->next) {
count += plist->count;
result[p++] = &plist->list[i];
}
}
- result[p] = NULL;
+ result[p] = nil;
}
rwlock_unlock_read(&runtimeLock);
/***********************************************************************
-* _class_getLoadMethod
+* objc_class::getLoadMethod
* fixme
* Called only from add_class_to_loadable_list.
* Locking: runtimeLock must be read- or write-locked by the caller.
**********************************************************************/
-PRIVATE_EXTERN IMP
-_class_getLoadMethod(Class cls_gen)
+IMP
+objc_class::getLoadMethod()
{
rwlock_assert_locked(&runtimeLock);
- class_t *cls = newcls(cls_gen);
const method_list_t *mlist;
uint32_t i;
- assert(isRealized(cls));
- assert(isRealized(cls->isa));
- assert(!isMetaClass(cls));
- assert(isMetaClass(cls->isa));
+ assert(isRealized());
+ assert(ISA()->isRealized());
+ assert(!isMetaClass());
+ assert(ISA()->isMetaClass());
- mlist = cls->isa->data()->ro->baseMethods;
- if (mlist) for (i = 0; i < mlist->count; i++) {
- method_t *m = method_list_nth(mlist, i);
- if (0 == strcmp((const char *)m->name, "load")) {
- return m->imp;
+ mlist = ISA()->data()->ro->baseMethods;
+ if (mlist) {
+ for (i = 0; i < mlist->count; i++) {
+ method_t *m = method_list_nth(mlist, i);
+ const char *name = sel_cname(m->name);
+ if (0 == strcmp(name, "load")) {
+ return m->imp;
+ }
}
}
- return NULL;
+ return nil;
}
* Returns a category's name.
* Locking: none
**********************************************************************/
-PRIVATE_EXTERN const char *
+const char *
_category_getName(Category cat)
{
- return newcategory(cat)->name;
+ return cat->name;
}
* _category_getClassName
* Returns a category's class's name
* Called only from add_category_to_loadable_list and
-* remove_category_from_loadable_list.
+* remove_category_from_loadable_list for logging purposes.
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-PRIVATE_EXTERN const char *
+const char *
_category_getClassName(Category cat)
{
rwlock_assert_locked(&runtimeLock);
- // cat->cls may have been remapped
- return getName(remapClass(newcategory(cat)->cls));
+ return remapClass(cat->cls)->nameForLogging();
}
* Called only by call_category_loads.
* Locking: read-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN Class
+Class
_category_getClass(Category cat)
{
rwlock_read(&runtimeLock);
- // cat->cls may have been remapped
- class_t *result = remapClass(newcategory(cat)->cls);
- assert(isRealized(result)); // ok for call_category_loads' usage
+ Class result = remapClass(cat->cls);
+ assert(result->isRealized()); // ok for call_category_loads' usage
rwlock_unlock_read(&runtimeLock);
- return (Class)result;
+ return result;
}
* Called only from add_category_to_loadable_list
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
-PRIVATE_EXTERN IMP
+IMP
_category_getLoadMethod(Category cat)
{
rwlock_assert_locked(&runtimeLock);
const method_list_t *mlist;
uint32_t i;
- mlist = newcategory(cat)->classMethods;
- if (mlist) for (i = 0; i < mlist->count; i++) {
- method_t *m = method_list_nth(mlist, i);
- if (0 == strcmp((const char *)m->name, "load")) {
- return m->imp;
+ mlist = cat->classMethods;
+ if (mlist) {
+ for (i = 0; i < mlist->count; i++) {
+ method_t *m = method_list_nth(mlist, i);
+ const char *name = sel_cname(m->name);
+ if (0 == strcmp(name, "load")) {
+ return m->imp;
+ }
}
}
- return NULL;
+ return nil;
}
* Locking: read-locks runtimeLock
**********************************************************************/
Protocol * __unsafe_unretained *
-class_copyProtocolList(Class cls_gen, unsigned int *outCount)
+class_copyProtocolList(Class cls, unsigned int *outCount)
{
- class_t *cls = newcls(cls_gen);
Protocol **r;
const protocol_list_t **p;
unsigned int count = 0;
unsigned int i;
- Protocol **result = NULL;
+ Protocol **result = nil;
if (!cls) {
if (outCount) *outCount = 0;
- return NULL;
+ return nil;
}
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
for (p = cls->data()->protocols; p && *p; p++) {
count += (uint32_t)(*p)->count;
*r++ = (Protocol *)remapProtocol((*p)->list[i]);
}
}
- *r++ = NULL;
+ *r++ = nil;
}
rwlock_unlock_read(&runtimeLock);
/***********************************************************************
* _objc_copyClassNamesForImage
* fixme
-* Locking: read-locks runtimeLock
+* Locking: write-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN const char **
+const char **
_objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
{
size_t count, i, shift;
- class_t **classlist;
+ classref_t *classlist;
const char **names;
- rwlock_read(&runtimeLock);
+ // Need to write-lock in case demangledName() needs to realize a class.
+ rwlock_write(&runtimeLock);
classlist = _getObjc2ClassList(hi, &count);
names = (const char **)malloc((count+1) * sizeof(const char *));
shift = 0;
for (i = 0; i < count; i++) {
- class_t *cls = remapClass(classlist[i]);
+ Class cls = remapClass(classlist[i]);
if (cls) {
- names[i-shift] = getName(classlist[i]);
+ names[i-shift] = cls->demangledName(true/*realize*/);
} else {
shift++; // ignored weak-linked class
}
}
count -= shift;
- names[count] = NULL;
+ names[count] = nil;
- rwlock_unlock_read(&runtimeLock);
+ rwlock_unlock_write(&runtimeLock);
if (outCount) *outCount = (unsigned int)count;
return names;
}
-/***********************************************************************
-* _class_getCache
-* fixme
-* Locking: none
-**********************************************************************/
-PRIVATE_EXTERN Cache
-_class_getCache(Class cls)
-{
- return newcls(cls)->cache;
-}
-
-
-/***********************************************************************
-* _class_getInstanceSize
-* Uses alignedInstanceSize() to ensure that
-* obj + class_getInstanceSize(obj->isa) == object_getIndexedIvars(obj)
-* Locking: none
-**********************************************************************/
-PRIVATE_EXTERN size_t
-_class_getInstanceSize(Class cls)
-{
- if (!cls) return 0;
- return alignedInstanceSize(newcls(cls));
-}
-
-static uint32_t
-unalignedInstanceSize(class_t *cls)
-{
- assert(cls);
- assert(isRealized(cls));
- return (uint32_t)cls->data()->ro->instanceSize;
-}
-
-static uint32_t
-alignedInstanceSize(class_t *cls)
-{
- assert(cls);
- assert(isRealized(cls));
- // fixme rdar://5278267
- return (uint32_t)((unalignedInstanceSize(cls) + WORD_MASK) & ~WORD_MASK);
-}
-
/***********************************************************************
* _class_getInstanceStart
* Uses alignedInstanceStart() to ensure that ARR layout strings are
**********************************************************************/
static uint32_t
-alignedInstanceStart(class_t *cls)
+alignedInstanceStart(Class cls)
{
assert(cls);
- assert(isRealized(cls));
- return (uint32_t)((cls->data()->ro->instanceStart + WORD_MASK) & ~WORD_MASK);
+ assert(cls->isRealized());
+ return (uint32_t)word_align(cls->data()->ro->instanceStart);
}
-PRIVATE_EXTERN
-uint32_t _class_getInstanceStart(Class cls_gen) {
- class_t *cls = newcls(cls_gen);
+uint32_t _class_getInstanceStart(Class cls) {
return alignedInstanceStart(cls);
}
/***********************************************************************
-* class_getVersion
-* fixme
-* Locking: none
+* saveTemporaryString
+* Save a string in a thread-local FIFO buffer.
+* This is suitable for temporary strings generated for logging purposes.
**********************************************************************/
-int
-class_getVersion(Class cls)
+static void
+saveTemporaryString(char *str)
{
- if (!cls) return 0;
- assert(isRealized(newcls(cls)));
- return newcls(cls)->data()->version;
+ // Fixed-size FIFO. We free the first string, shift
+ // the rest, and add the new string to the end.
+ _objc_pthread_data *data = _objc_fetch_pthread_data(true);
+ if (data->printableNames[0]) {
+ free(data->printableNames[0]);
+ }
+ int last = countof(data->printableNames) - 1;
+ for (int i = 0; i < last; i++) {
+ data->printableNames[i] = data->printableNames[i+1];
+ }
+ data->printableNames[last] = str;
}
/***********************************************************************
-* _class_setCache
-* fixme
+* objc_class::nameForLogging
+* Returns the class's name, suitable for display.
+* The returned memory is TEMPORARY. Print it or copy it immediately.
* Locking: none
**********************************************************************/
-PRIVATE_EXTERN void
-_class_setCache(Class cls, Cache cache)
+const char *
+objc_class::nameForLogging()
{
- newcls(cls)->cache = cache;
+ // Handle the easy case directly.
+ if (isRealized() || isFuture()) {
+ if (data()->demangledName) return data()->demangledName;
+ }
+
+ char *result;
+
+ const char *name = mangledName();
+ char *de = copySwiftV1DemangledName(name);
+ if (de) result = de;
+ else result = strdup(name);
+
+ saveTemporaryString(result);
+ return result;
}
/***********************************************************************
-* class_setVersion
-* fixme
-* Locking: none
+* objc_class::demangledName
+* If realize=false, the class must already be realized or future.
+* Locking: If realize=true, runtimeLock must be held for writing by the caller.
**********************************************************************/
-void
-class_setVersion(Class cls, int version)
-{
- if (!cls) return;
- assert(isRealized(newcls(cls)));
- newcls(cls)->data()->version = version;
+const char *
+objc_class::demangledName(bool realize)
+{
+ // Return previously demangled name if available.
+ if (isRealized() || isFuture()) {
+ if (data()->demangledName) return data()->demangledName;
+ }
+
+ // Try demangling the mangled name.
+ const char *mangled = mangledName();
+ char *de = copySwiftV1DemangledName(mangled);
+ if (isRealized() || isFuture()) {
+ // Class is already realized or future.
+ // Save demangling result in rw data.
+ // We may not own rwlock for writing so use an atomic operation instead.
+ if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
+ (void**)&data()->demangledName))
+ {
+ if (de) free(de);
+ }
+ return data()->demangledName;
+ }
+
+ // Class is not yet realized.
+ if (!de) {
+ // Name is not mangled. Return it without caching.
+ return mangled;
+ }
+
+ // Class is not yet realized and name is mangled. Realize the class.
+ // Only objc_copyClassNamesForImage() should get here.
+ rwlock_assert_writing(&runtimeLock);
+ assert(realize);
+ if (realize) {
+ realizeClass((Class)this);
+ data()->demangledName = de;
+ return de;
+ } else {
+ return de; // bug - just leak
+ }
}
/***********************************************************************
-* _class_getName
+* class_getName
* fixme
* Locking: acquires runtimeLock
**********************************************************************/
-PRIVATE_EXTERN const char *_class_getName(Class cls)
+const char *class_getName(Class cls)
{
if (!cls) return "nil";
- // fixme hack rwlock_write(&runtimeLock);
- const char *name = getName(newcls(cls));
- // rwlock_unlock_write(&runtimeLock);
- return name;
+ assert(cls->isRealized() || cls->isFuture());
+ return cls->demangledName();
}
/***********************************************************************
-* getName
+* class_getVersion
* fixme
-* Locking: runtimeLock must be held by the caller
+* Locking: none
**********************************************************************/
-static const char *
-getName(class_t *cls)
+int
+class_getVersion(Class cls)
{
- // fixme hack rwlock_assert_writing(&runtimeLock);
- assert(cls);
+ if (!cls) return 0;
+ assert(cls->isRealized());
+ return cls->data()->version;
+}
- if (isRealized(cls)) {
- return cls->data()->ro->name;
- } else {
- return ((const class_ro_t *)cls->data())->name;
- }
+
+/***********************************************************************
+* class_setVersion
+* fixme
+* Locking: none
+**********************************************************************/
+void
+class_setVersion(Class cls, int version)
+{
+ if (!cls) return;
+ assert(cls->isRealized());
+ cls->data()->version = version;
}
+
static method_t *findMethodInSortedMethodList(SEL key, const method_list_t *list)
{
const method_t * const first = &list->first;
}
}
- return NULL;
+ return nil;
}
/***********************************************************************
}
}
#endif
-
- return NULL;
-}
-
-static method_t *
-getMethodNoSuper_nolock(class_t *cls, SEL sel)
-{
- rwlock_assert_locked(&runtimeLock);
-
- assert(isRealized(cls));
- // fixme nil cls?
- // fixme NULL sel?
-
- FOREACH_METHOD_LIST(mlist, cls, {
- method_t *m = search_method_list(mlist, sel);
- if (m) return m;
- });
-
- return NULL;
-}
-
-
-/***********************************************************************
-* _class_getMethodNoSuper
-* fixme
-* Locking: read-locks runtimeLock
-**********************************************************************/
-PRIVATE_EXTERN Method
-_class_getMethodNoSuper(Class cls, SEL sel)
-{
- rwlock_read(&runtimeLock);
- Method result = (Method)getMethodNoSuper_nolock(newcls(cls), sel);
- rwlock_unlock_read(&runtimeLock);
- return result;
+
+ return nil;
}
-/***********************************************************************
-* _class_getMethodNoSuper
-* For use inside lockForMethodLookup() only.
-* Locking: read-locks runtimeLock
-**********************************************************************/
-PRIVATE_EXTERN Method
-_class_getMethodNoSuper_nolock(Class cls, SEL sel)
+static method_t *
+getMethodNoSuper_nolock(Class cls, SEL sel)
{
- return (Method)getMethodNoSuper_nolock(newcls(cls), sel);
+ rwlock_assert_locked(&runtimeLock);
+
+ assert(cls->isRealized());
+ // fixme nil cls?
+ // fixme nil sel?
+
+ FOREACH_METHOD_LIST(mlist, cls, {
+ method_t *m = search_method_list(mlist, sel);
+ if (m) return m;
+ });
+
+ return nil;
}
* Locking: runtimeLock must be read- or write-locked by the caller
**********************************************************************/
static method_t *
-getMethod_nolock(class_t *cls, SEL sel)
+getMethod_nolock(Class cls, SEL sel)
{
- method_t *m = NULL;
+ method_t *m = nil;
rwlock_assert_locked(&runtimeLock);
// fixme nil cls?
- // fixme NULL sel?
+ // fixme nil sel?
- assert(isRealized(cls));
+ assert(cls->isRealized());
- while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == NULL) {
- cls = getSuperclass(cls);
+ while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
+ cls = cls->superclass;
}
return m;
* fixme
* Locking: read-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN Method _class_getMethod(Class cls, SEL sel)
+static Method _class_getMethod(Class cls, SEL sel)
{
- Method m;
+ method_t *m;
rwlock_read(&runtimeLock);
- m = (Method)getMethod_nolock(newcls(cls), sel);
+ m = getMethod_nolock(cls, sel);
rwlock_unlock_read(&runtimeLock);
return m;
}
/***********************************************************************
-* ABI-specific lookUpMethod helpers.
-* Locking: read- and write-locks runtimeLock.
+* class_getInstanceMethod. Return the instance method for the
+* specified class and selector.
**********************************************************************/
-PRIVATE_EXTERN void lockForMethodLookup(void)
+Method class_getInstanceMethod(Class cls, SEL sel)
{
- rwlock_read(&runtimeLock);
+ if (!cls || !sel) return nil;
+
+ // This deliberately avoids +initialize because it historically did so.
+
+ // This implementation is a bit weird because it's the only place that
+ // wants a Method instead of an IMP.
+
+#warning fixme build and search caches
+
+ // Search method lists, try method resolver, etc.
+ lookUpImpOrNil(cls, sel, nil,
+ NO/*initialize*/, NO/*cache*/, YES/*resolver*/);
+
+#warning fixme build and search caches
+
+ return _class_getMethod(cls, sel);
}
-PRIVATE_EXTERN void unlockForMethodLookup(void)
+
+
+/***********************************************************************
+* log_and_fill_cache
+* Log this method call. If the logger permits it, fill the method cache.
+* cls is the method whose cache should be filled.
+* implementer is the class that owns the implementation in question.
+**********************************************************************/
+static void
+log_and_fill_cache(Class cls, Class implementer, IMP imp, SEL sel)
{
- rwlock_unlock_read(&runtimeLock);
+#if SUPPORT_MESSAGE_LOGGING
+ if (objcMsgLogEnabled) {
+ bool cacheIt = logMessageSend(implementer->isMetaClass(),
+ cls->nameForLogging(),
+ implementer->nameForLogging(),
+ sel);
+ if (!cacheIt) return;
+ }
+#endif
+ cache_fill (cls, sel, imp);
}
-PRIVATE_EXTERN IMP prepareForMethodLookup(Class cls, SEL sel, BOOL init)
+
+/***********************************************************************
+* _class_lookupMethodAndLoadCache.
+* Method lookup for dispatchers ONLY. OTHER CODE SHOULD USE lookUpImp().
+* This lookup avoids optimistic cache scan because the dispatcher
+* already tried that.
+**********************************************************************/
+IMP _class_lookupMethodAndLoadCache3(id obj, SEL sel, Class cls)
+{
+ return lookUpImpOrForward(cls, sel, obj,
+ YES/*initialize*/, NO/*cache*/, YES/*resolver*/);
+}
+
+
+/***********************************************************************
+* lookUpImpOrForward.
+* The standard IMP lookup.
+* initialize==NO tries to avoid +initialize (but sometimes fails)
+* cache==NO skips optimistic unlocked lookup (but uses cache elsewhere)
+* Most callers should use initialize==YES and cache==YES.
+* inst is an instance of cls or a subclass thereof, or nil if none is known.
+* If cls is an un-initialized metaclass then a non-nil inst is faster.
+* May return _objc_msgForward_impcache. IMPs destined for external use
+* must be converted to _objc_msgForward or _objc_msgForward_stret.
+* If you don't want forwarding at all, use lookUpImpOrNil() instead.
+**********************************************************************/
+IMP lookUpImpOrForward(Class cls, SEL sel, id inst,
+ bool initialize, bool cache, bool resolver)
{
+ Class curClass;
+ IMP imp = nil;
+ Method meth;
+ bool triedResolver = NO;
+
rwlock_assert_unlocked(&runtimeLock);
- if (!isRealized(newcls(cls))) {
+ // Optimistic cache lookup
+ if (cache) {
+ imp = cache_getImp(cls, sel);
+ if (imp) return imp;
+ }
+
+ if (!cls->isRealized()) {
rwlock_write(&runtimeLock);
- realizeClass(newcls(cls));
+ realizeClass(cls);
rwlock_unlock_write(&runtimeLock);
}
- if (init && !_class_isInitialized(cls)) {
- _class_initialize (cls);
+ if (initialize && !cls->isInitialized()) {
+ _class_initialize (_class_getNonMetaClass(cls, inst));
// If sel == initialize, _class_initialize will send +initialize and
// then the messenger will send +initialize again after this
// procedure finishes. Of course, if this is not being called
// from the messenger then it won't happen. 2778172
}
- return NULL;
+ // The lock is held to make method-lookup + cache-fill atomic
+ // with respect to method addition. Otherwise, a category could
+ // be added but ignored indefinitely because the cache was re-filled
+ // with the old value after the cache flush on behalf of the category.
+ retry:
+ rwlock_read(&runtimeLock);
+
+ // Ignore GC selectors
+ if (ignoreSelector(sel)) {
+ imp = _objc_ignored_method;
+ cache_fill(cls, sel, imp);
+ goto done;
+ }
+
+ // Try this class's cache.
+
+ imp = cache_getImp(cls, sel);
+ if (imp) goto done;
+
+ // Try this class's method lists.
+
+ meth = getMethodNoSuper_nolock(cls, sel);
+ if (meth) {
+ log_and_fill_cache(cls, cls, meth->imp, sel);
+ imp = meth->imp;
+ goto done;
+ }
+
+ // Try superclass caches and method lists.
+
+ curClass = cls;
+ while ((curClass = curClass->superclass)) {
+ // Superclass cache.
+ imp = cache_getImp(curClass, sel);
+ if (imp) {
+ if (imp != (IMP)_objc_msgForward_impcache) {
+ // Found the method in a superclass. Cache it in this class.
+ log_and_fill_cache(cls, curClass, imp, sel);
+ goto done;
+ }
+ else {
+ // Found a forward:: entry in a superclass.
+ // Stop searching, but don't cache yet; call method
+ // resolver for this class first.
+ break;
+ }
+ }
+
+ // Superclass method list.
+ meth = getMethodNoSuper_nolock(curClass, sel);
+ if (meth) {
+ log_and_fill_cache(cls, curClass, meth->imp, sel);
+ imp = meth->imp;
+ goto done;
+ }
+ }
+
+ // No implementation found. Try method resolver once.
+
+ if (resolver && !triedResolver) {
+ rwlock_unlock_read(&runtimeLock);
+ _class_resolveMethod(cls, sel, inst);
+ // Don't cache the result; we don't hold the lock so it may have
+ // changed already. Re-do the search from scratch instead.
+ triedResolver = YES;
+ goto retry;
+ }
+
+ // No implementation found, and method resolver didn't help.
+ // Use forwarding.
+
+ imp = (IMP)_objc_msgForward_impcache;
+ cache_fill(cls, sel, imp);
+
+ done:
+ rwlock_unlock_read(&runtimeLock);
+
+ // paranoia: look for ignored selectors with non-ignored implementations
+ assert(!(ignoreSelector(sel) && imp != (IMP)&_objc_ignored_method));
+
+ // paranoia: never let uncached leak out
+ assert(imp != _objc_msgSend_uncached_impcache);
+
+ return imp;
+}
+
+
+/***********************************************************************
+* lookUpImpOrNil.
+* Like lookUpImpOrForward, but returns nil instead of _objc_msgForward_impcache
+**********************************************************************/
+IMP lookUpImpOrNil(Class cls, SEL sel, id inst,
+ bool initialize, bool cache, bool resolver)
+{
+ IMP imp = lookUpImpOrForward(cls, sel, inst, initialize, cache, resolver);
+ if (imp == _objc_msgForward_impcache) return nil;
+ else return imp;
+}
+
+
+/***********************************************************************
+* lookupMethodInClassAndLoadCache.
+* Like _class_lookupMethodAndLoadCache, but does not search superclasses.
+* Caches and returns objc_msgForward if the method is not found in the class.
+**********************************************************************/
+IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
+{
+ Method meth;
+ IMP imp;
+
+ // fixme this is incomplete - no resolver, +initialize, GC -
+ // but it's only used for .cxx_construct/destruct so we don't care
+ assert(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
+
+ // Search cache first.
+ imp = cache_getImp(cls, sel);
+ if (imp) return imp;
+
+ // Cache miss. Search method list.
+
+ rwlock_read(&runtimeLock);
+
+ meth = getMethodNoSuper_nolock(cls, sel);
+
+ if (meth) {
+ // Hit in method list. Cache it.
+ cache_fill(cls, sel, meth->imp);
+ rwlock_unlock_read(&runtimeLock);
+ return meth->imp;
+ } else {
+ // Miss in method list. Cache objc_msgForward.
+ cache_fill(cls, sel, _objc_msgForward_impcache);
+ rwlock_unlock_read(&runtimeLock);
+ return _objc_msgForward_impcache;
+ }
}
* fixme
* Locking: read-locks runtimeLock
**********************************************************************/
-objc_property_t class_getProperty(Class cls_gen, const char *name)
+objc_property_t class_getProperty(Class cls, const char *name)
{
- property_t *result = NULL;
+ property_t *result = nil;
chained_property_list *plist;
- class_t *cls = newcls(cls_gen);
- if (!cls || !name) return NULL;
+ if (!cls || !name) return nil;
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
- for ( ; cls; cls = getSuperclass(cls)) {
+ for ( ; cls; cls = cls->superclass) {
for (plist = cls->data()->properties; plist; plist = plist->next) {
uint32_t i;
for (i = 0; i < plist->count; i++) {
/***********************************************************************
* Locking: fixme
**********************************************************************/
-PRIVATE_EXTERN BOOL _class_isMetaClass(Class cls)
-{
- if (!cls) return NO;
- return isMetaClass(newcls(cls));
-}
-
-static BOOL
-isMetaClass(class_t *cls)
-{
- assert(cls);
- assert(isRealized(cls));
- return (cls->data()->ro->flags & RO_META) ? YES : NO;
-}
-
-
-PRIVATE_EXTERN Class _class_getMeta(Class cls)
-{
- assert(cls);
- if (isMetaClass(newcls(cls))) return cls;
- else return ((id)cls)->isa;
-}
Class gdb_class_getClass(Class cls)
{
- const char *className = getName(newcls(cls));
+ const char *className = cls->mangledName();
if(!className || !strlen(className)) return Nil;
Class rCls = look_up_class(className, NO, NO);
return rCls;
Class gdb_object_getClass(id obj)
{
- Class cls = _object_getClass(obj);
- return gdb_class_getClass(cls);
+ if (!obj) return nil;
+ return gdb_class_getClass(obj->getIsa());
}
-BOOL gdb_objc_isRuntimeLocked()
-{
- if (rwlock_try_write(&runtimeLock)) {
- rwlock_unlock_write(&runtimeLock);
- } else
- return YES;
-
- if (mutex_try_lock(&cacheUpdateLock)) {
- mutex_unlock(&cacheUpdateLock);
- } else
- return YES;
-
- return NO;
-}
/***********************************************************************
-* Locking: fixme
+* Locking: write-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_isInitializing(Class cls_gen)
+void
+objc_class::setInitialized()
{
- class_t *cls = newcls(_class_getMeta(cls_gen));
- return (cls->data()->flags & RW_INITIALIZING) ? YES : NO;
-}
+ Class metacls;
+ Class cls;
+ assert(!isMetaClass());
-/***********************************************************************
-* Locking: fixme
-**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_isInitialized(Class cls_gen)
-{
- class_t *cls = newcls(_class_getMeta(cls_gen));
- return (cls->data()->flags & RW_INITIALIZED) ? YES : NO;
-}
-
+ cls = (Class)this;
+ metacls = cls->ISA();
-/***********************************************************************
-* Locking: fixme
-**********************************************************************/
-PRIVATE_EXTERN void
-_class_setInitializing(Class cls_gen)
-{
- class_t *cls = newcls(_class_getMeta(cls_gen));
- changeInfo(cls, RW_INITIALIZING, 0);
-}
+ rwlock_read(&runtimeLock);
+ // Scan metaclass for custom AWZ.
+ // Scan metaclass for custom RR.
+ // Scan class for custom RR.
+ // Also print custom RR/AWZ because we probably haven't done it yet.
-/***********************************************************************
-* Locking: write-locks runtimeLock
-**********************************************************************/
-PRIVATE_EXTERN void
-_class_setInitialized(Class cls_gen)
-{
+ // Special cases:
+ // GC's RR and AWZ are never default.
+ // NSObject AWZ class methods are default.
+ // NSObject RR instance methods are default.
+ // updateCustomRR_AWZ() also knows these special cases.
+ // attachMethodLists() also knows these special cases.
- class_t *metacls;
- class_t *cls;
+ bool inherited;
+ bool metaCustomAWZ = NO;
+ if (UseGC) {
+ // GC is always custom AWZ
+ metaCustomAWZ = YES;
+ inherited = NO;
+ }
+ else if (MetaclassNSObjectAWZSwizzled) {
+ // Somebody already swizzled NSObject's methods
+ metaCustomAWZ = YES;
+ inherited = NO;
+ }
+ else if (metacls == classNSObject()->ISA()) {
+ // NSObject's metaclass AWZ is default, but we still need to check cats
+ FOREACH_CATEGORY_METHOD_LIST(mlist, metacls, {
+ if (methodListImplementsAWZ(mlist)) {
+ metaCustomAWZ = YES;
+ inherited = NO;
+ break;
+ }
+ });
+ }
+ else if (metacls->superclass->hasCustomAWZ()) {
+ // Superclass is custom AWZ, therefore we are too.
+ metaCustomAWZ = YES;
+ inherited = YES;
+ }
+ else {
+ // Not metaclass NSObject.
+ FOREACH_METHOD_LIST(mlist, metacls, {
+ if (methodListImplementsAWZ(mlist)) {
+ metaCustomAWZ = YES;
+ inherited = NO;
+ break;
+ }
+ });
+ }
+ if (!metaCustomAWZ) metacls->setHasDefaultAWZ();
- rwlock_write(&runtimeLock);
- metacls = newcls(_class_getMeta(cls_gen));
- cls = getNonMetaClass(metacls);
+ if (PrintCustomAWZ && metaCustomAWZ) metacls->printCustomAWZ(inherited);
+ // metacls->printCustomRR();
- // Update vtables (initially postponed pending +initialize completion)
- // Do cls first because root metacls is a subclass of root cls
- updateVtable(cls, YES);
- updateVtable(metacls, YES);
- rwlock_unlock_write(&runtimeLock);
+ bool clsCustomRR = NO;
+ if (UseGC) {
+ // GC is always custom RR
+ clsCustomRR = YES;
+ inherited = NO;
+ }
+ else if (ClassNSObjectRRSwizzled) {
+ // Somebody already swizzled NSObject's methods
+ clsCustomRR = YES;
+ inherited = NO;
+ }
+ if (cls == classNSObject()) {
+ // NSObject's RR is default, but we still need to check categories
+ FOREACH_CATEGORY_METHOD_LIST(mlist, cls, {
+ if (methodListImplementsRR(mlist)) {
+ clsCustomRR = YES;
+ inherited = NO;
+ break;
+ }
+ });
+ }
+ else if (!cls->superclass) {
+ // Custom root class
+ clsCustomRR = YES;
+ inherited = NO;
+ }
+ else if (cls->superclass->hasCustomRR()) {
+ // Superclass is custom RR, therefore we are too.
+ clsCustomRR = YES;
+ inherited = YES;
+ }
+ else {
+ // Not class NSObject.
+ FOREACH_METHOD_LIST(mlist, cls, {
+ if (methodListImplementsRR(mlist)) {
+ clsCustomRR = YES;
+ inherited = NO;
+ break;
+ }
+ });
+ }
+ if (!clsCustomRR) cls->setHasDefaultRR();
- changeInfo(metacls, RW_INITIALIZED, RW_INITIALIZING);
-}
+ // cls->printCustomAWZ();
+ if (PrintCustomRR && clsCustomRR) cls->printCustomRR(inherited);
+ // Update the +initialize flags.
+ // Do this last.
+ metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
-/***********************************************************************
-* Locking: fixme
-**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_shouldGrowCache(Class cls)
-{
- return YES; // fixme good or bad for memory use?
+ rwlock_unlock_read(&runtimeLock);
}
/***********************************************************************
-* Locking: fixme
-**********************************************************************/
-PRIVATE_EXTERN void
-_class_setGrowCache(Class cls, BOOL grow)
+ * _class_usesAutomaticRetainRelease
+ * Returns YES if class was compiled with -fobjc-arc
+ **********************************************************************/
+BOOL _class_usesAutomaticRetainRelease(Class cls)
{
- // fixme good or bad for memory use?
+ return (cls->data()->ro->flags & RO_IS_ARR) ? YES : NO;
}
/***********************************************************************
-* _class_isLoadable
-* fixme
-* Locking: none
+* Return YES if sel is used by retain/release implementors
**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_isLoadable(Class cls)
+static bool
+isRRSelector(SEL sel)
{
- assert(isRealized(newcls(cls)));
- return YES; // any class registered for +load is definitely loadable
+ return (sel == SEL_retain || sel == SEL_release ||
+ sel == SEL_autorelease || sel == SEL_retainCount ||
+ sel == SEL_tryRetain || sel == SEL_retainWeakReference ||
+ sel == SEL_isDeallocating || sel == SEL_allowsWeakReference);
}
/***********************************************************************
-* Locking: fixme
+* Return YES if mlist implements one of the isRRSelector() methods
**********************************************************************/
-static BOOL
-hasCxxStructors(class_t *cls)
-{
- // this DOES check superclasses too, because addSubclass()
- // propagates the flag from the superclass.
- assert(isRealized(cls));
- return (cls->data()->flags & RW_HAS_CXX_STRUCTORS) ? YES : NO;
-}
-
-PRIVATE_EXTERN BOOL
-_class_hasCxxStructors(Class cls)
+static bool
+methodListImplementsRR(const method_list_t *mlist)
{
- return hasCxxStructors(newcls(cls));
+ return (search_method_list(mlist, SEL_retain) ||
+ search_method_list(mlist, SEL_release) ||
+ search_method_list(mlist, SEL_autorelease) ||
+ search_method_list(mlist, SEL_retainCount) ||
+ search_method_list(mlist, SEL_tryRetain) ||
+ search_method_list(mlist, SEL_isDeallocating) ||
+ search_method_list(mlist, SEL_retainWeakReference) ||
+ search_method_list(mlist, SEL_allowsWeakReference));
}
/***********************************************************************
-* Locking: fixme
+* Return YES if sel is used by alloc or allocWithZone implementors
**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_shouldFinalizeOnMainThread(Class cls)
+static bool
+isAWZSelector(SEL sel)
{
- assert(isRealized(newcls(cls)));
- return (newcls(cls)->data()->flags & RW_FINALIZE_ON_MAIN_THREAD) ? YES : NO;
+ return (sel == SEL_allocWithZone || sel == SEL_alloc);
}
/***********************************************************************
-* Locking: fixme
+* Return YES if mlist implements one of the isAWZSelector() methods
**********************************************************************/
-PRIVATE_EXTERN void
-_class_setFinalizeOnMainThread(Class cls)
+static bool
+methodListImplementsAWZ(const method_list_t *mlist)
{
- assert(isRealized(newcls(cls)));
- changeInfo(newcls(cls), RW_FINALIZE_ON_MAIN_THREAD, 0);
+ return (search_method_list(mlist, SEL_allocWithZone) ||
+ search_method_list(mlist, SEL_alloc));
}
-/***********************************************************************
-* _class_instancesHaveAssociatedObjects
-* May manipulate unrealized future classes in the CF-bridged case.
-**********************************************************************/
-PRIVATE_EXTERN BOOL
-_class_instancesHaveAssociatedObjects(Class cls_gen)
+void
+objc_class::printCustomRR(bool inherited)
{
- class_t *cls = newcls(cls_gen);
- assert(isFuture(cls) || isRealized(cls));
- return (cls->data()->flags & RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS) ? YES : NO;
+ assert(PrintCustomRR);
+ assert(hasCustomRR());
+ _objc_inform("CUSTOM RR: %s%s%s", nameForLogging(),
+ isMetaClass() ? " (meta)" : "",
+ inherited ? " (inherited)" : "");
}
-
-/***********************************************************************
-* _class_setInstancesHaveAssociatedObjects
-* May manipulate unrealized future classes in the CF-bridged case.
-**********************************************************************/
-PRIVATE_EXTERN void
-_class_setInstancesHaveAssociatedObjects(Class cls_gen)
+void
+objc_class::printCustomAWZ(bool inherited)
{
- class_t *cls = newcls(cls_gen);
- assert(isFuture(cls) || isRealized(cls));
- changeInfo(cls, RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS, 0);
+ assert(PrintCustomAWZ);
+ assert(hasCustomAWZ());
+ _objc_inform("CUSTOM AWZ: %s%s%s", nameForLogging(),
+ isMetaClass() ? " (meta)" : "",
+ inherited ? " (inherited)" : "");
}
-
-/***********************************************************************
- * _class_usesAutomaticRetainRelease
- * Returns YES if class was compiled with -fobjc-arr
- **********************************************************************/
-BOOL _class_usesAutomaticRetainRelease(Class cls_gen)
+void
+objc_class::printRequiresRawIsa(bool inherited)
{
- class_t *cls = newcls(cls_gen);
- return (cls->data()->ro->flags & RO_IS_ARR) ? YES : NO;
+ assert(PrintRawIsa);
+ assert(requiresRawIsa());
+ _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
+ isMetaClass() ? " (meta)" : "",
+ inherited ? " (inherited)" : "");
}
/***********************************************************************
-* Return YES if sel is used by retain/release implementors
+* Mark this class and all of its subclasses as implementors or
+* inheritors of custom RR (retain/release/autorelease/retainCount)
**********************************************************************/
-static BOOL isRRSelector(SEL sel)
+void objc_class::setHasCustomRR(bool inherited)
{
- return (sel == SEL_retain || sel == SEL_release ||
- sel == SEL_autorelease || sel == SEL_retainCount) ? YES : NO;
-}
+ Class cls = (Class)this;
+ rwlock_assert_writing(&runtimeLock);
+
+ if (hasCustomRR()) return;
+
+ foreach_realized_class_and_subclass(cls, ^(Class c){
+ if (c != cls && !c->isInitialized()) {
+ // Subclass not yet initialized. Wait for setInitialized() to do it
+ // fixme short circuit recursion?
+ return;
+ }
+ if (c->hasCustomRR()) {
+ // fixme short circuit recursion?
+ return;
+ }
+ c->bits.setHasCustomRR();
+
+ if (PrintCustomRR) c->printCustomRR(inherited || c != cls);
+ });
+}
/***********************************************************************
* Mark this class and all of its subclasses as implementors or
-* inheritors of custom RR (retain/release/autorelease/retainCount)
+* inheritors of custom alloc/allocWithZone:
**********************************************************************/
-void class_t::setHasCustomRR()
+void objc_class::setHasCustomAWZ(bool inherited)
{
+ Class cls = (Class)this;
rwlock_assert_writing(&runtimeLock);
- if (hasCustomRR()) return;
+ if (hasCustomAWZ()) return;
- FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, this, {
- // rdar://8955342 c->data_NEVER_USE |= 1UL;
- c->data()->flags |= RW_HAS_CUSTOM_RR;
+ foreach_realized_class_and_subclass(cls, ^(Class c){
+ if (c != cls && !c->isInitialized()) {
+ // Subclass not yet initialized. Wait for setInitialized() to do it
+ // fixme short circuit recursion?
+ return;
+ }
+ if (c->hasCustomAWZ()) {
+ // fixme short circuit recursion?
+ return;
+ }
+
+ c->bits.setHasCustomAWZ();
+
+ if (PrintCustomAWZ) c->printCustomAWZ(inherited || c != cls);
});
}
/***********************************************************************
-* Unmark custom RR. Not recursive. Almost never used.
+* Mark this class and all of its subclasses as requiring raw isa pointers
**********************************************************************/
-void class_t::unsetHasCustomRR()
+void objc_class::setRequiresRawIsa(bool inherited)
{
+ Class cls = (Class)this;
rwlock_assert_writing(&runtimeLock);
- this->data_NEVER_USE &= ~1UL;
+ if (requiresRawIsa()) return;
+
+ foreach_realized_class_and_subclass(cls, ^(Class c){
+ if (c->isInitialized()) {
+ _objc_fatal("too late to require raw isa");
+ return;
+ }
+ if (c->requiresRawIsa()) {
+ // fixme short circuit recursion?
+ return;
+ }
+
+ c->bits.setRequiresRawIsa();
+
+ if (PrintRawIsa) c->printRequiresRawIsa(inherited || c != cls);
+ });
}
/***********************************************************************
-* Locking: none
-* fixme assert realized to get superclass remapping?
+* Update custom RR and AWZ when a method changes its IMP
**********************************************************************/
-PRIVATE_EXTERN Class
-_class_getSuperclass(Class cls)
-{
- return (Class)getSuperclass(newcls(cls));
-}
+static void
+updateCustomRR_AWZ(Class cls, method_t *meth)
+{
+ // In almost all cases, IMP swizzling does not affect custom RR/AWZ bits.
+ // Custom RR/AWZ search will already find the method whether or not
+ // it is swizzled, so it does not transition from non-custom to custom.
+ //
+ // The only cases where IMP swizzling can affect the RR/AWZ bits is
+ // if the swizzled method is one of the methods that is assumed to be
+ // non-custom. These special cases are listed in setInitialized().
+ // We look for such cases here.
+
+ if (isRRSelector(meth->name)) {
+ // already custom, nothing would change
+ if (classNSObject()->hasCustomRR()) return;
+
+ bool swizzlingNSObject = NO;
+ if (cls == classNSObject()) {
+ swizzlingNSObject = YES;
+ } else {
+ // Don't know the class.
+ // The only special case is class NSObject.
+ FOREACH_METHOD_LIST(mlist, classNSObject(), {
+ for (uint32_t i = 0; i < mlist->count; i++) {
+ if (meth == method_list_nth(mlist, i)) {
+ swizzlingNSObject = YES;
+ break;
+ }
+ }
+ if (swizzlingNSObject) break;
+ });
+ }
+ if (swizzlingNSObject) {
+ if (classNSObject()->isInitialized()) {
+ classNSObject()->setHasCustomRR();
+ } else {
+ // NSObject not yet +initialized, so custom RR has not yet
+ // been checked, and setInitialized() will not notice the
+ // swizzle.
+ ClassNSObjectRRSwizzled = YES;
+ }
+ }
+ }
+ else if (isAWZSelector(meth->name)) {
+ // already custom, nothing would change
+ if (classNSObject()->ISA()->hasCustomAWZ()) return;
-static class_t *
-getSuperclass(class_t *cls)
-{
- if (!cls) return NULL;
- return cls->superclass;
+ bool swizzlingNSObject = NO;
+ if (cls == classNSObject()->ISA()) {
+ swizzlingNSObject = YES;
+ } else {
+ // Don't know the class.
+ // The only special case is metaclass NSObject.
+ FOREACH_METHOD_LIST(mlist, classNSObject()->ISA(), {
+ for (uint32_t i = 0; i < mlist->count; i++) {
+ if (meth == method_list_nth(mlist, i)) {
+ swizzlingNSObject = YES;
+ break;
+ }
+ }
+ if (swizzlingNSObject) break;
+ });
+ }
+ if (swizzlingNSObject) {
+ if (classNSObject()->ISA()->isInitialized()) {
+ classNSObject()->ISA()->setHasCustomAWZ();
+ } else {
+ // NSObject not yet +initialized, so custom RR has not yet
+ // been checked, and setInitialized() will not notice the
+ // swizzle.
+ MetaclassNSObjectAWZSwizzled = YES;
+ }
+ }
+ }
}
/***********************************************************************
* class_getIvarLayout
* Called by the garbage collector.
-* The class must be NULL or already realized.
+* The class must be nil or already realized.
* Locking: none
**********************************************************************/
const uint8_t *
-class_getIvarLayout(Class cls_gen)
+class_getIvarLayout(Class cls)
{
- class_t *cls = newcls(cls_gen);
if (cls) return cls->data()->ro->ivarLayout;
- else return NULL;
+ else return nil;
}
/***********************************************************************
* class_getWeakIvarLayout
* Called by the garbage collector.
-* The class must be NULL or already realized.
+* The class must be nil or already realized.
* Locking: none
**********************************************************************/
const uint8_t *
-class_getWeakIvarLayout(Class cls_gen)
+class_getWeakIvarLayout(Class cls)
{
- class_t *cls = newcls(cls_gen);
if (cls) return cls->data()->ro->weakIvarLayout;
- else return NULL;
+ else return nil;
}
/***********************************************************************
* class_setIvarLayout
* Changes the class's GC scan layout.
-* NULL layout means no unscanned ivars
+* nil layout means no unscanned ivars
* The class must be under construction.
* fixme: sanity-check layout vs instance size?
* fixme: sanity-check layout vs superclass?
* Locking: acquires runtimeLock
**********************************************************************/
void
-class_setIvarLayout(Class cls_gen, const uint8_t *layout)
+class_setIvarLayout(Class cls, const uint8_t *layout)
{
- class_t *cls = newcls(cls_gen);
if (!cls) return;
rwlock_write(&runtimeLock);
// allowed, there would be a race below (us vs. concurrent GC scan)
if (!(cls->data()->flags & RW_CONSTRUCTING)) {
_objc_inform("*** Can't set ivar layout for already-registered "
- "class '%s'", getName(cls));
+ "class '%s'", cls->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
// SPI: Instance-specific object layout.
void
-_class_setIvarLayoutAccessor(Class cls_gen, const uint8_t* (*accessor) (id object)) {
- class_t *cls = newcls(cls_gen);
+_class_setIvarLayoutAccessor(Class cls, const uint8_t* (*accessor) (id object)) {
if (!cls) return;
rwlock_write(&runtimeLock);
// FIXME: this really isn't safe to free if there are instances of this class already.
if (!(cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT)) try_free(ro_w->ivarLayout);
ro_w->ivarLayout = (uint8_t *)accessor;
- changeInfo(cls, RW_HAS_INSTANCE_SPECIFIC_LAYOUT, 0);
+ cls->setInfo(RW_HAS_INSTANCE_SPECIFIC_LAYOUT);
rwlock_unlock_write(&runtimeLock);
}
const uint8_t *
-_object_getIvarLayout(Class cls_gen, id object) {
- class_t *cls = newcls(cls_gen);
+_object_getIvarLayout(Class cls, id object)
+{
if (cls) {
const uint8_t* layout = cls->data()->ro->ivarLayout;
if (cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT) {
}
return layout;
}
- return NULL;
+ return nil;
}
/***********************************************************************
* class_setWeakIvarLayout
* Changes the class's GC weak layout.
-* NULL layout means no weak ivars
+* nil layout means no weak ivars
* The class must be under construction.
* fixme: sanity-check layout vs instance size?
* fixme: sanity-check layout vs superclass?
* Locking: acquires runtimeLock
**********************************************************************/
void
-class_setWeakIvarLayout(Class cls_gen, const uint8_t *layout)
+class_setWeakIvarLayout(Class cls, const uint8_t *layout)
{
- class_t *cls = newcls(cls_gen);
if (!cls) return;
rwlock_write(&runtimeLock);
// allowed, there would be a race below (us vs. concurrent GC scan)
if (!(cls->data()->flags & RW_CONSTRUCTING)) {
_objc_inform("*** Can't set weak ivar layout for already-registered "
- "class '%s'", getName(cls));
+ "class '%s'", cls->nameForLogging());
rwlock_unlock_write(&runtimeLock);
return;
}
* fixme
* Locking: read-locks runtimeLock
**********************************************************************/
-PRIVATE_EXTERN Ivar
+Ivar
_class_getVariable(Class cls, const char *name, Class *memberOf)
{
rwlock_read(&runtimeLock);
- for ( ; cls != Nil; cls = class_getSuperclass(cls)) {
- ivar_t *ivar = getIvar(newcls(cls), name);
+ for ( ; cls; cls = cls->superclass) {
+ ivar_t *ivar = getIvar(cls, name);
if (ivar) {
rwlock_unlock_read(&runtimeLock);
if (memberOf) *memberOf = cls;
- return (Ivar)ivar;
+ return ivar;
}
}
rwlock_unlock_read(&runtimeLock);
- return NULL;
+ return nil;
}
* fixme
* Locking: read-locks runtimeLock
**********************************************************************/
-BOOL class_conformsToProtocol(Class cls_gen, Protocol *proto_gen)
+BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
{
- class_t *cls = newcls(cls_gen);
protocol_t *proto = newprotocol(proto_gen);
const protocol_list_t **plist;
unsigned int i;
BOOL result = NO;
- if (!cls_gen) return NO;
+ if (!cls) return NO;
if (!proto_gen) return NO;
rwlock_read(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
for (plist = cls->data()->protocols; plist && *plist; plist++) {
for (i = 0; i < (*plist)->count; i++) {
protocol_t *p = remapProtocol((*plist)->list[i]);
- if (p == proto || _protocol_conformsToProtocol_nolock(p, proto)) {
+ if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
result = YES;
goto done;
}
}
-/***********************************************************************
+/**********************************************************************
* addMethod
* fixme
* Locking: runtimeLock must be held by the caller
**********************************************************************/
static IMP
-addMethod(class_t *cls, SEL name, IMP imp, const char *types, BOOL replace)
+addMethod(Class cls, SEL name, IMP imp, const char *types, BOOL replace)
{
- IMP result = NULL;
+ IMP result = nil;
rwlock_assert_writing(&runtimeLock);
assert(types);
- assert(isRealized(cls));
+ assert(cls->isRealized());
method_t *m;
if ((m = getMethodNoSuper_nolock(cls, name))) {
newlist->first.imp = (IMP)&_objc_ignored_method;
}
- BOOL vtablesAffected = NO;
- attachMethodLists(cls, &newlist, 1, NO, &vtablesAffected);
- flushCaches(cls);
- if (vtablesAffected) flushVtables(cls);
+ attachMethodLists(cls, &newlist, 1, NO, NO, YES);
- result = NULL;
+ result = nil;
}
return result;
if (!cls) return NO;
rwlock_write(&runtimeLock);
- IMP old = addMethod(newcls(cls), name, imp, types ?: "", NO);
+ IMP old = addMethod(cls, name, imp, types ?: "", NO);
rwlock_unlock_write(&runtimeLock);
return old ? NO : YES;
}
IMP
class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
{
- if (!cls) return NULL;
+ if (!cls) return nil;
rwlock_write(&runtimeLock);
- IMP old = addMethod(newcls(cls), name, imp, types ?: "", YES);
+ IMP old = addMethod(cls, name, imp, types ?: "", YES);
rwlock_unlock_write(&runtimeLock);
return old;
}
* Locking: acquires runtimeLock
**********************************************************************/
BOOL
-class_addIvar(Class cls_gen, const char *name, size_t size,
+class_addIvar(Class cls, const char *name, size_t size,
uint8_t alignment, const char *type)
{
- class_t *cls = newcls(cls_gen);
-
if (!cls) return NO;
if (!type) type = "";
- if (name && 0 == strcmp(name, "")) name = NULL;
+ if (name && 0 == strcmp(name, "")) name = nil;
rwlock_write(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
// No class variables
- if (isMetaClass(cls)) {
+ if (cls->isMetaClass()) {
rwlock_unlock_write(&runtimeLock);
return NO;
}
newlist->entsize = (uint32_t)sizeof(ivar_t);
}
- uint32_t offset = unalignedInstanceSize(cls);
+ uint32_t offset = cls->unalignedInstanceSize();
uint32_t alignMask = (1<<alignment)-1;
offset = (offset + alignMask) & ~alignMask;
ivar_t *ivar = ivar_list_nth(newlist, newlist->count++);
- ivar->offset = (uintptr_t *)_malloc_internal(sizeof(*ivar->offset));
+#if __x86_64__
+ // Deliberately over-allocate the ivar offset variable.
+ // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
+ ivar->offset = (int32_t *)(int64_t *)_calloc_internal(sizeof(int64_t), 1);
+#else
+ ivar->offset = (int32_t *)_malloc_internal(sizeof(int32_t));
+#endif
*ivar->offset = offset;
- ivar->name = name ? _strdup_internal(name) : NULL;
+ ivar->name = name ? _strdup_internal(name) : nil;
ivar->type = _strdup_internal(type);
- ivar->alignment = alignment;
+ ivar->alignment_raw = alignment;
ivar->size = (uint32_t)size;
ro_w->ivars = newlist;
- ro_w->instanceSize = (uint32_t)(offset + size);
+ cls->setInstanceSize((uint32_t)(offset + size));
// Ivar layout updated in registerClass.
* Adds a protocol to a class.
* Locking: acquires runtimeLock
**********************************************************************/
-BOOL class_addProtocol(Class cls_gen, Protocol *protocol_gen)
+BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
{
- class_t *cls = newcls(cls_gen);
protocol_t *protocol = newprotocol(protocol_gen);
protocol_list_t *plist;
const protocol_list_t **plistp;
if (!cls) return NO;
- if (class_conformsToProtocol(cls_gen, protocol_gen)) return NO;
+ if (class_conformsToProtocol(cls, protocol_gen)) return NO;
rwlock_write(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
// fixme optimize
plist = (protocol_list_t *)
_realloc_internal(cls->data()->protocols,
(count+2) * sizeof(protocol_list_t *));
cls->data()->protocols[count] = plist;
- cls->data()->protocols[count+1] = NULL;
+ cls->data()->protocols[count+1] = nil;
// fixme metaclass?
* Locking: acquires runtimeLock
**********************************************************************/
static BOOL
-_class_addProperty(Class cls_gen, const char *name,
+_class_addProperty(Class cls, const char *name,
const objc_property_attribute_t *attrs, unsigned int count,
BOOL replace)
{
- class_t *cls = newcls(cls_gen);
chained_property_list *plist;
if (!cls) return NO;
if (!name) return NO;
- property_t *prop = class_getProperty(cls_gen, name);
+ property_t *prop = class_getProperty(cls, name);
if (prop && !replace) {
// already exists, refuse to replace
return NO;
else {
rwlock_write(&runtimeLock);
- assert(isRealized(cls));
+ assert(cls->isRealized());
plist = (chained_property_list *)
_malloc_internal(sizeof(*plist) + sizeof(plist->list[0]));
}
BOOL
-class_addProperty(Class cls_gen, const char *name,
+class_addProperty(Class cls, const char *name,
const objc_property_attribute_t *attrs, unsigned int n)
{
- return _class_addProperty(cls_gen, name, attrs, n, NO);
+ return _class_addProperty(cls, name, attrs, n, NO);
}
void
-class_replaceProperty(Class cls_gen, const char *name,
+class_replaceProperty(Class cls, const char *name,
const objc_property_attribute_t *attrs, unsigned int n)
{
- _class_addProperty(cls_gen, name, attrs, n, YES);
+ _class_addProperty(cls, name, attrs, n, YES);
}
* Look up a class by name, and realize it.
* Locking: acquires runtimeLock
**********************************************************************/
-PRIVATE_EXTERN id
+Class
look_up_class(const char *name,
BOOL includeUnconnected __attribute__((unused)),
BOOL includeClassHandler __attribute__((unused)))
if (!name) return nil;
rwlock_read(&runtimeLock);
- class_t *result = getClass(name);
- BOOL unrealized = result && !isRealized(result);
+ Class result = getClass(name);
+ BOOL unrealized = result && !result->isRealized();
rwlock_unlock_read(&runtimeLock);
if (unrealized) {
rwlock_write(&runtimeLock);
realizeClass(result);
rwlock_unlock_write(&runtimeLock);
}
- return (id)result;
+ return result;
}
* Locking: acquires runtimeLock
**********************************************************************/
Class
-objc_duplicateClass(Class original_gen, const char *name,
+objc_duplicateClass(Class original, const char *name,
size_t extraBytes)
{
- class_t *original = newcls(original_gen);
- class_t *duplicate;
+ Class duplicate;
rwlock_write(&runtimeLock);
- assert(isRealized(original));
- assert(!isMetaClass(original));
-
- duplicate = (class_t *)
- _calloc_class(alignedInstanceSize(original->isa) + extraBytes);
- if (unalignedInstanceSize(original->isa) < sizeof(class_t)) {
- _objc_inform("busted! %s\n", original->data()->ro->name);
- }
+ assert(original->isRealized());
+ assert(!original->isMetaClass());
+ duplicate = alloc_class_for_subclass(original, extraBytes);
- duplicate->isa = original->isa;
+ duplicate->initClassIsa(original->ISA());
duplicate->superclass = original->superclass;
- duplicate->cache = (Cache)&_objc_empty_cache;
- duplicate->vtable = &_objc_empty_vtable;
- duplicate->setData((class_rw_t *)_calloc_internal(sizeof(*original->data()), 1));
- duplicate->data()->flags = (original->data()->flags | RW_COPIED_RO) & ~RW_SPECIALIZED_VTABLE;
- duplicate->data()->version = original->data()->version;
- duplicate->data()->firstSubclass = NULL;
- duplicate->data()->nextSiblingClass = NULL;
+ duplicate->cache.setEmpty();
- duplicate->data()->ro = (class_ro_t *)
+ class_rw_t *rw = (class_rw_t *)_calloc_internal(sizeof(*original->data()), 1);
+ rw->flags = (original->data()->flags | RW_COPIED_RO | RW_REALIZING);
+ rw->version = original->data()->version;
+ rw->firstSubclass = nil;
+ rw->nextSiblingClass = nil;
+
+ duplicate->bits = original->bits;
+ duplicate->setData(rw);
+
+ rw->ro = (class_ro_t *)
_memdup_internal(original->data()->ro, sizeof(*original->data()->ro));
- *(char **)&duplicate->data()->ro->name = _strdup_internal(name);
+ *(char **)&rw->ro->name = _strdup_internal(name);
- if (original->data()->methods) {
- duplicate->data()->methods = (method_list_t **)
- _memdup_internal(original->data()->methods,
- malloc_size(original->data()->methods));
+ if (original->data()->flags & RW_METHOD_ARRAY) {
+ rw->method_lists = (method_list_t **)
+ _memdup_internal(original->data()->method_lists,
+ malloc_size(original->data()->method_lists));
method_list_t **mlistp;
- for (mlistp = duplicate->data()->methods; *mlistp; mlistp++) {
+ for (mlistp = rw->method_lists; *mlistp; mlistp++) {
*mlistp = (method_list_t *)
_memdup_internal(*mlistp, method_list_size(*mlistp));
}
+ } else {
+ if (original->data()->method_list) {
+ rw->method_list = (method_list_t *)
+ _memdup_internal(original->data()->method_list,
+ method_list_size(original->data()->method_list));
+ }
}
// fixme dies when categories are added to the base
- duplicate->data()->properties = original->data()->properties;
- duplicate->data()->protocols = original->data()->protocols;
+ rw->properties = original->data()->properties;
+ rw->protocols = original->data()->protocols;
if (duplicate->superclass) {
addSubclass(duplicate->superclass, duplicate);
addNamedClass(duplicate, duplicate->data()->ro->name);
addRealizedClass(duplicate);
- // no: duplicate->isa == original->isa
- // addRealizedMetaclass(duplicate->isa);
+ // no: duplicate->ISA == original->ISA
+ // addRealizedMetaclass(duplicate->ISA);
if (PrintConnecting) {
_objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
- name, original->data()->ro->name,
- duplicate, duplicate->data()->ro);
+ name, original->nameForLogging(),
+ (void*)duplicate, duplicate->data()->ro);
}
+ duplicate->clearInfo(RW_REALIZING);
+
rwlock_unlock_write(&runtimeLock);
- return (Class)duplicate;
+ return duplicate;
}
/***********************************************************************
// &UnsetLayout is the default ivar layout during class construction
static const uint8_t UnsetLayout = 0;
-static void objc_initializeClassPair_internal(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
+static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
{
rwlock_assert_writing(&runtimeLock);
- class_t *superclass = newcls(superclass_gen);
- class_t *cls = newcls(cls_gen);
- class_t *meta = newcls(meta_gen);
class_ro_t *cls_ro_w, *meta_ro_w;
+
+ cls->cache.setEmpty();
+ meta->cache.setEmpty();
cls->setData((class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1));
meta->setData((class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1));
meta->data()->ro = meta_ro_w;
// Set basic info
- cls->cache = (Cache)&_objc_empty_cache;
- meta->cache = (Cache)&_objc_empty_cache;
- cls->vtable = &_objc_empty_vtable;
- meta->vtable = &_objc_empty_vtable;
- cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
- meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
+ cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
+ meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
cls->data()->version = 0;
meta->data()->version = 7;
meta_ro_w->flags |= RO_ROOT;
}
if (superclass) {
- cls_ro_w->instanceStart = unalignedInstanceSize(superclass);
- meta_ro_w->instanceStart = unalignedInstanceSize(superclass->isa);
- cls_ro_w->instanceSize = cls_ro_w->instanceStart;
- meta_ro_w->instanceSize = meta_ro_w->instanceStart;
+ cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
+ meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
+ cls->setInstanceSize(cls_ro_w->instanceStart);
+ meta->setInstanceSize(meta_ro_w->instanceStart);
} else {
cls_ro_w->instanceStart = 0;
- meta_ro_w->instanceStart = (uint32_t)sizeof(class_t);
- cls_ro_w->instanceSize = (uint32_t)sizeof(id); // just an isa
- meta_ro_w->instanceSize = meta_ro_w->instanceStart;
+ meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
+ cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
+ meta->setInstanceSize(meta_ro_w->instanceStart);
}
cls_ro_w->name = _strdup_internal(name);
cls_ro_w->weakIvarLayout = &UnsetLayout;
// Connect to superclasses and metaclasses
- cls->isa = meta;
+ cls->initClassIsa(meta);
if (superclass) {
- meta->isa = superclass->isa->isa;
+ meta->initClassIsa(superclass->ISA()->ISA());
cls->superclass = superclass;
- meta->superclass = superclass->isa;
+ meta->superclass = superclass->ISA();
addSubclass(superclass, cls);
- addSubclass(superclass->isa, meta);
+ addSubclass(superclass->ISA(), meta);
} else {
- meta->isa = meta;
+ meta->initClassIsa(meta);
cls->superclass = Nil;
meta->superclass = cls;
addSubclass(cls, meta);
}
}
+
/***********************************************************************
-* objc_initializeClassPair
+* verifySuperclass
+* Sanity-check the superclass provided to
+* objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
**********************************************************************/
-Class objc_initializeClassPair(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
+bool
+verifySuperclass(Class superclass, bool rootOK)
{
- class_t *superclass = newcls(superclass_gen);
-
- rwlock_write(&runtimeLock);
-
- //
- // Common superclass integrity checks with objc_allocateClassPair
- //
- if (getClass(name)) {
- rwlock_unlock_write(&runtimeLock);
- return Nil;
+ if (!superclass) {
+ // Superclass does not exist.
+ // If subclass may be a root class, this is OK.
+ // If subclass must not be a root class, this is bad.
+ return rootOK;
}
- // fixme reserve class against simultaneous allocation
- if (superclass) assert(isRealized(superclass));
+ // Superclass must be realized.
+ if (! superclass->isRealized()) return false;
+
+ // Superclass must not be under construction.
+ if (superclass->data()->flags & RW_CONSTRUCTING) return false;
+
+ return true;
+}
+
+
+/***********************************************************************
+* objc_initializeClassPair
+**********************************************************************/
+Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
+{
+ rwlock_write(&runtimeLock);
- if (superclass && superclass->data()->flags & RW_CONSTRUCTING) {
- // Can't make subclass of an in-construction class
+ // Fail if the class name is in use.
+ // Fail if the superclass isn't kosher.
+ if (getClass(name) || !verifySuperclass(superclass, true/*rootOK*/)) {
rwlock_unlock_write(&runtimeLock);
- return Nil;
+ return nil;
}
-
- // just initialize what was supplied
- objc_initializeClassPair_internal(superclass_gen, name, cls_gen, meta_gen);
+ objc_initializeClassPair_internal(superclass, name, cls, meta);
rwlock_unlock_write(&runtimeLock);
- return cls_gen;
+ return cls;
}
+
/***********************************************************************
* objc_allocateClassPair
* fixme
* Locking: acquires runtimeLock
**********************************************************************/
-Class objc_allocateClassPair(Class superclass_gen, const char *name,
+Class objc_allocateClassPair(Class superclass, const char *name,
size_t extraBytes)
{
- class_t *superclass = newcls(superclass_gen);
Class cls, meta;
rwlock_write(&runtimeLock);
- //
- // Common superclass integrity checks with objc_initializeClassPair
- //
- if (getClass(name)) {
- rwlock_unlock_write(&runtimeLock);
- return Nil;
- }
- // fixme reserve class against simmultaneous allocation
-
- if (superclass) assert(isRealized(superclass));
-
- if (superclass && superclass->data()->flags & RW_CONSTRUCTING) {
- // Can't make subclass of an in-construction class
+ // Fail if the class name is in use.
+ // Fail if the superclass isn't kosher.
+ if (getClass(name) || !verifySuperclass(superclass, true/*rootOK*/)) {
rwlock_unlock_write(&runtimeLock);
- return Nil;
+ return nil;
}
-
-
// Allocate new classes.
- size_t size = sizeof(class_t);
- size_t metasize = sizeof(class_t);
- if (superclass) {
- size = alignedInstanceSize(superclass->isa);
- metasize = alignedInstanceSize(superclass->isa->isa);
- }
- cls = _calloc_class(size + extraBytes);
- meta = _calloc_class(metasize + extraBytes);
+ cls = alloc_class_for_subclass(superclass, extraBytes);
+ meta = alloc_class_for_subclass(superclass, extraBytes);
- objc_initializeClassPair_internal(superclass_gen, name, cls, meta);
+ // fixme mangle the name if it looks swift-y?
+ objc_initializeClassPair_internal(superclass, name, cls, meta);
rwlock_unlock_write(&runtimeLock);
- return (Class)cls;
+ return cls;
}
* fixme
* Locking: acquires runtimeLock
**********************************************************************/
-void objc_registerClassPair(Class cls_gen)
+void objc_registerClassPair(Class cls)
{
- class_t *cls = newcls(cls_gen);
-
rwlock_write(&runtimeLock);
if ((cls->data()->flags & RW_CONSTRUCTED) ||
- (cls->isa->data()->flags & RW_CONSTRUCTED))
+ (cls->ISA()->data()->flags & RW_CONSTRUCTED))
{
_objc_inform("objc_registerClassPair: class '%s' was already "
"registered!", cls->data()->ro->name);
}
if (!(cls->data()->flags & RW_CONSTRUCTING) ||
- !(cls->isa->data()->flags & RW_CONSTRUCTING))
+ !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
{
_objc_inform("objc_registerClassPair: class '%s' was not "
"allocated with objc_allocateClassPair!",
// Build ivar layouts
if (UseGC) {
- class_t *supercls = getSuperclass(cls);
+ Class supercls = cls->superclass;
class_ro_t *ro_w = (class_ro_t *)cls->data()->ro;
if (ro_w->ivarLayout != &UnsetLayout) {
}
else if (!supercls) {
// Root class. Scan conservatively (should be isa ivar only).
- ro_w->ivarLayout = NULL;
+ ro_w->ivarLayout = nil;
}
- else if (ro_w->ivars == NULL) {
+ else if (ro_w->ivars == nil) {
// No local ivars. Use superclass's layouts.
ro_w->ivarLayout =
_ustrdup_internal(supercls->data()->ro->ivarLayout);
// Has local ivars. Build layouts based on superclass.
layout_bitmap bitmap =
layout_bitmap_create(supercls->data()->ro->ivarLayout,
- unalignedInstanceSize(supercls),
- unalignedInstanceSize(cls), NO);
+ supercls->unalignedInstanceSize(),
+ cls->unalignedInstanceSize(), NO);
uint32_t i;
for (i = 0; i < ro_w->ivars->count; i++) {
ivar_t *ivar = ivar_list_nth(ro_w->ivars, i);
}
else if (!supercls) {
// Root class. No weak ivars (should be isa ivar only).
- ro_w->weakIvarLayout = NULL;
+ ro_w->weakIvarLayout = nil;
}
- else if (ro_w->ivars == NULL) {
+ else if (ro_w->ivars == nil) {
// No local ivars. Use superclass's layout.
ro_w->weakIvarLayout =
_ustrdup_internal(supercls->data()->ro->weakIvarLayout);
}
// Clear "under construction" bit, set "done constructing" bit
- cls->data()->flags &= ~RW_CONSTRUCTING;
- cls->isa->data()->flags &= ~RW_CONSTRUCTING;
- cls->data()->flags |= RW_CONSTRUCTED;
- cls->isa->data()->flags |= RW_CONSTRUCTED;
+ cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
+ cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
- // Add to realized and uninitialized classes
+ // Add to named and realized classes
addNamedClass(cls, cls->data()->ro->name);
addRealizedClass(cls);
- addRealizedMetaclass(cls->isa);
- addUninitializedClass(cls, cls->isa);
+ addRealizedMetaclass(cls->ISA());
+
+ rwlock_unlock_write(&runtimeLock);
+}
+
+
+/***********************************************************************
+* objc_readClassPair()
+* Read a class and metaclass as written by a compiler.
+* Assumes the class and metaclass are not referenced by other things
+* that might need to be fixed up (such as categories and subclasses).
+* Does not call +load.
+* Returns the class pointer, or nil.
+*
+* Locking: runtimeLock acquired by map_images
+**********************************************************************/
+Class objc_readClassPair(Class bits, const struct objc_image_info *info)
+{
+ rwlock_write(&runtimeLock);
+
+ // No info bits are significant yet.
+ (void)info;
+ // Fail if the class name is in use.
+ // Fail if the superclass isn't kosher.
+ const char *name = bits->mangledName();
+ bool rootOK = bits->data()->flags & RO_ROOT;
+ if (getClass(name) || !verifySuperclass(bits->superclass, rootOK)){
+ rwlock_unlock_write(&runtimeLock);
+ return nil;
+ }
+
+ Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
+ if (cls != bits) {
+ // This function isn't allowed to remap anything.
+ _objc_fatal("objc_readClassPair for class %s changed %p to %p",
+ cls->nameForLogging(), bits, cls);
+ }
+ realizeClass(cls);
+
rwlock_unlock_write(&runtimeLock);
+
+ return cls;
}
-static void unload_class(class_t *cls, BOOL isMeta)
+/***********************************************************************
+* detach_class
+* Disconnect a class from other data structures.
+* Exception: does not remove the class from the +load list
+* Call this before free_class.
+* Locking: runtimeLock must be held by the caller.
+**********************************************************************/
+static void detach_class(Class cls, BOOL isMeta)
{
- // Detach class from various lists
+ rwlock_assert_writing(&runtimeLock);
// categories not yet attached to this class
category_list *cats;
cats = unattachedCategoriesForClass(cls);
if (cats) free(cats);
+ // superclass's subclass list
+ if (cls->isRealized()) {
+ Class supercls = cls->superclass;
+ if (supercls) {
+ removeSubclass(supercls, cls);
+ }
+ }
+
// class tables and +load queue
if (!isMeta) {
- removeNamedClass(cls, getName(cls));
+ removeNamedClass(cls, cls->mangledName());
removeRealizedClass(cls);
- removeUninitializedClass(cls);
} else {
removeRealizedMetaclass(cls);
}
+}
- // superclass's subclass list
- if (isRealized(cls)) {
- class_t *supercls = getSuperclass(cls);
- if (supercls) removeSubclass(supercls, cls);
- }
+/***********************************************************************
+* free_class
+* Frees a class's data structures.
+* Call this after detach_class.
+* Locking: runtimeLock must be held by the caller
+**********************************************************************/
+static void free_class(Class cls)
+{
+ rwlock_assert_writing(&runtimeLock);
- // Dispose the class's own data structures
+ if (! cls->isRealized()) return;
- if (isRealized(cls)) {
- uint32_t i;
+ uint32_t i;
+
+ if (cls->cache.canBeFreed()) {
+ free(cls->cache.buckets());
+ }
- // Dereferences the cache contents; do this before freeing methods
- if (cls->cache != (Cache)&_objc_empty_cache) _cache_free(cls->cache);
-
- if (cls->data()->methods) {
- method_list_t **mlistp;
- for (mlistp = cls->data()->methods; *mlistp; mlistp++) {
- for (i = 0; i < (**mlistp).count; i++) {
- method_t *m = method_list_nth(*mlistp, i);
- try_free(m->types);
- }
- try_free(*mlistp);
- }
- try_free(cls->data()->methods);
+ FOREACH_METHOD_LIST(mlist, cls, {
+ for (i = 0; i < mlist->count; i++) {
+ method_t *m = method_list_nth(mlist, i);
+ try_free(m->types);
}
-
- const ivar_list_t *ilist = cls->data()->ro->ivars;
- if (ilist) {
- for (i = 0; i < ilist->count; i++) {
- const ivar_t *ivar = ivar_list_nth(ilist, i);
- try_free(ivar->offset);
- try_free(ivar->name);
- try_free(ivar->type);
- }
- try_free(ilist);
+ try_free(mlist);
+ });
+ if (cls->data()->flags & RW_METHOD_ARRAY) {
+ try_free(cls->data()->method_lists);
+ }
+
+ const ivar_list_t *ilist = cls->data()->ro->ivars;
+ if (ilist) {
+ for (i = 0; i < ilist->count; i++) {
+ const ivar_t *ivar = ivar_list_nth(ilist, i);
+ try_free(ivar->offset);
+ try_free(ivar->name);
+ try_free(ivar->type);
}
-
- const protocol_list_t **plistp;
- for (plistp = cls->data()->protocols; plistp && *plistp; plistp++) {
- try_free(*plistp);
+ try_free(ilist);
+ }
+
+ const protocol_list_t **plistp;
+ for (plistp = cls->data()->protocols; plistp && *plistp; plistp++) {
+ try_free(*plistp);
+ }
+ try_free(cls->data()->protocols);
+
+ const chained_property_list *proplist = cls->data()->properties;
+ while (proplist) {
+ for (i = 0; i < proplist->count; i++) {
+ const property_t *prop = proplist->list+i;
+ try_free(prop->name);
+ try_free(prop->attributes);
}
- try_free(cls->data()->protocols);
-
- const chained_property_list *proplist = cls->data()->properties;
- while (proplist) {
- for (uint32_t i = 0; i < proplist->count; i++) {
- const property_t *prop = proplist->list+i;
- try_free(prop->name);
- try_free(prop->attributes);
- }
- {
- const chained_property_list *temp = proplist;
- proplist = proplist->next;
- try_free(temp);
- }
+ {
+ const chained_property_list *temp = proplist;
+ proplist = proplist->next;
+ try_free(temp);
}
-
-
- if (cls->vtable != &_objc_empty_vtable &&
- cls->data()->flags & RW_SPECIALIZED_VTABLE) try_free(cls->vtable);
- try_free(cls->data()->ro->ivarLayout);
- try_free(cls->data()->ro->weakIvarLayout);
- try_free(cls->data()->ro->name);
- try_free(cls->data()->ro);
- try_free(cls->data());
- try_free(cls);
}
+
+ try_free(cls->data()->ro->ivarLayout);
+ try_free(cls->data()->ro->weakIvarLayout);
+ try_free(cls->data()->ro->name);
+ try_free(cls->data()->ro);
+ try_free(cls->data());
+ try_free(cls);
}
-void objc_disposeClassPair(Class cls_gen)
-{
- class_t *cls = newcls(cls_gen);
+void objc_disposeClassPair(Class cls)
+{
rwlock_write(&runtimeLock);
if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
- !(cls->isa->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
+ !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
{
// class not allocated with objc_allocateClassPair
// disposing still-unregistered class is OK!
return;
}
- if (isMetaClass(cls)) {
+ if (cls->isMetaClass()) {
_objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
"not a class!", cls->data()->ro->name);
rwlock_unlock_write(&runtimeLock);
if (cls->data()->firstSubclass) {
_objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
"including '%s'!", cls->data()->ro->name,
- getName(cls->data()->firstSubclass));
+ cls->data()->firstSubclass->nameForLogging());
}
- if (cls->isa->data()->firstSubclass) {
+ if (cls->ISA()->data()->firstSubclass) {
_objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
"including '%s'!", cls->data()->ro->name,
- getName(cls->isa->data()->firstSubclass));
+ cls->ISA()->data()->firstSubclass->nameForLogging());
}
// don't remove_class_from_loadable_list()
// - it's not there and we don't have the lock
- unload_class(cls->isa, YES);
- unload_class(cls, NO);
+ detach_class(cls->ISA(), YES);
+ detach_class(cls, NO);
+ free_class(cls->ISA());
+ free_class(cls);
rwlock_unlock_write(&runtimeLock);
}
+/***********************************************************************
+* objc_constructInstance
+* Creates an instance of `cls` at the location pointed to by `bytes`.
+* `bytes` must point to at least class_getInstanceSize(cls) bytes of
+* well-aligned zero-filled memory.
+* The new object's isa is set. Any C++ constructors are called.
+* Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
+* nil, or if C++ constructors fail.
+* Note: class_createInstance() and class_createInstances() preflight this.
+**********************************************************************/
+id
+objc_constructInstance(Class cls, void *bytes)
+{
+ if (!cls || !bytes) return nil;
+
+ id obj = (id)bytes;
+
+ // Read class's info bits all at once for performance
+ bool hasCxxCtor = cls->hasCxxCtor();
+ bool hasCxxDtor = cls->hasCxxDtor();
+ bool fast = cls->canAllocIndexed();
+
+ if (!UseGC && fast) {
+ obj->initInstanceIsa(cls, hasCxxDtor);
+ } else {
+ obj->initIsa(cls);
+ }
+
+ if (hasCxxCtor) {
+ return object_cxxConstructFromClass(obj, cls);
+ } else {
+ return obj;
+ }
+}
+
+
/***********************************************************************
* class_createInstance
* fixme
* Locking: none
**********************************************************************/
-static id
-_class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
- __attribute__((always_inline));
-static id
+static __attribute__((always_inline))
+id
_class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
{
if (!cls) return nil;
- assert(isRealized(newcls(cls)));
+ assert(cls->isRealized());
- size_t size = alignedInstanceSize(newcls(cls)) + extraBytes;
+ // Read class's info bits all at once for performance
+ bool hasCxxCtor = cls->hasCxxCtor();
+ bool hasCxxDtor = cls->hasCxxDtor();
+ bool fast = cls->canAllocIndexed();
- // CF requires all object be at least 16 bytes.
- if (size < 16) size = 16;
+ size_t size = cls->instanceSize(extraBytes);
id obj;
+ if (!UseGC && !zone && fast) {
+ obj = (id)calloc(1, size);
+ if (!obj) return nil;
+ obj->initInstanceIsa(cls, hasCxxDtor);
+ }
+ else {
#if SUPPORT_GC
- if (UseGC) {
- obj = (id)auto_zone_allocate_object(gc_zone, size,
- AUTO_OBJECT_SCANNED, 0, 1);
- } else
+ if (UseGC) {
+ obj = (id)auto_zone_allocate_object(gc_zone, size,
+ AUTO_OBJECT_SCANNED, 0, 1);
+ } else
#endif
- if (zone) {
- obj = (id)malloc_zone_calloc ((malloc_zone_t *)zone, 1, size);
+ if (zone) {
+ obj = (id)malloc_zone_calloc ((malloc_zone_t *)zone, 1, size);
} else {
- obj = (id)calloc(1, size);
- }
- if (!obj) return nil;
+ obj = (id)calloc(1, size);
+ }
+ if (!obj) return nil;
- obj->isa = cls; // need not be object_setClass
+ // Use non-indexed isa on the assumption that they might be
+ // doing something weird with the zone or RR.
+ obj->initIsa(cls);
+ }
- if (_class_hasCxxStructors(cls)) {
- obj = _objc_constructOrFree(cls, obj);
+ if (hasCxxCtor) {
+ obj = _objc_constructOrFree(obj, cls);
}
return obj;
id
class_createInstance(Class cls, size_t extraBytes)
{
- return _class_createInstanceFromZone(cls, extraBytes, NULL);
+ return _class_createInstanceFromZone(cls, extraBytes, nil);
}
+
/***********************************************************************
* class_createInstances
* fixme
* Locking: none
**********************************************************************/
+#if SUPPORT_NONPOINTER_ISA
+#warning fixme optimize class_createInstances
+#endif
unsigned
class_createInstances(Class cls, size_t extraBytes,
id *results, unsigned num_requested)
{
- return _class_createInstancesFromZone(cls, extraBytes, NULL,
+ return _class_createInstancesFromZone(cls, extraBytes, nil,
results, num_requested);
}
static BOOL classOrSuperClassesUseARR(Class cls) {
while (cls) {
if (_class_usesAutomaticRetainRelease(cls)) return true;
- cls = class_getSuperclass(cls);
+ cls = cls->superclass;
}
return false;
}
static void arr_fixup_copied_references(id newObject, id oldObject)
{
// use ARR layouts to correctly copy the references from old object to new, both strong and weak.
- Class cls = oldObject->isa;
- while (cls) {
+ Class cls = oldObject->ISA();
+ for ( ; cls; cls = cls->superclass) {
if (_class_usesAutomaticRetainRelease(cls)) {
// FIXME: align the instance start to nearest id boundary. This currently handles the case where
// the the compiler folds a leading BOOL (char, short, etc.) into the alignment slop of a superclass.
}
}
}
- cls = class_getSuperclass(cls);
}
}
size_t size;
if (!oldObj) return nil;
- if (OBJC_IS_TAGGED_PTR(oldObj)) return oldObj;
+ if (oldObj->isTaggedPointer()) return oldObj;
- size = _class_getInstanceSize(oldObj->isa) + extraBytes;
+ size = oldObj->ISA()->instanceSize(extraBytes);
#if SUPPORT_GC
if (UseGC) {
obj = (id) auto_zone_allocate_object(gc_zone, size,
#if SUPPORT_GC
if (UseGC)
gc_fixup_weakreferences(obj, oldObj);
- else if (classOrSuperClassesUseARR(obj->isa))
- arr_fixup_copied_references(obj, oldObj);
-#else
- if (classOrSuperClassesUseARR(obj->isa))
- arr_fixup_copied_references(obj, oldObj);
+ else
#endif
+ if (classOrSuperClassesUseARR(obj->ISA()))
+ arr_fixup_copied_references(obj, oldObj);
return obj;
}
void *objc_destructInstance(id obj)
{
if (obj) {
- Class isa_gen = _object_getClass(obj);
- class_t *isa = newcls(isa_gen);
-
// Read all of the flags at once for performance.
- bool cxx = hasCxxStructors(isa);
- bool assoc = !UseGC && _class_instancesHaveAssociatedObjects(isa_gen);
+ bool cxx = obj->hasCxxDtor();
+ bool assoc = !UseGC && obj->hasAssociatedObjects();
+ bool dealloc = !UseGC;
// This order is important.
if (cxx) object_cxxDestruct(obj);
if (assoc) _object_remove_assocations(obj);
-
- if (!UseGC) objc_clear_deallocating(obj);
+ if (dealloc) obj->clearDeallocating();
}
return obj;
return nil;
}
-#if SUPPORT_FIXUP
-OBJC_EXTERN id objc_msgSend_fixedup(id, SEL, ...);
-OBJC_EXTERN id objc_msgSendSuper2_fixedup(id, SEL, ...);
-OBJC_EXTERN id objc_msgSend_stret_fixedup(id, SEL, ...);
-OBJC_EXTERN id objc_msgSendSuper2_stret_fixedup(id, SEL, ...);
-#if defined(__i386__) || defined(__x86_64__)
-OBJC_EXTERN id objc_msgSend_fpret_fixedup(id, SEL, ...);
-#endif
-#if defined(__x86_64__)
-OBJC_EXTERN id objc_msgSend_fp2ret_fixedup(id, SEL, ...);
-#endif
/***********************************************************************
-* _objc_fixupMessageRef
-* Fixes up message ref *msg.
-* obj is the receiver. supr is NULL for non-super messages
-* Locking: acquires runtimeLock
+* Tagged pointer objects.
+*
+* Tagged pointer objects store the class and the object value in the
+* object pointer; the "pointer" does not actually point to anything.
+*
+* Tagged pointer objects currently use this representation:
+* (LSB)
+* 1 bit set if tagged, clear if ordinary object pointer
+* 3 bits tag index
+* 60 bits payload
+* (MSB)
+* The tag index defines the object's class.
+* The payload format is defined by the object's class.
+*
+* This representation is subject to change. Representation-agnostic SPI is:
+* objc-internal.h for class implementers.
+* objc-gdb.h for debuggers.
**********************************************************************/
-OBJC_EXTERN PRIVATE_EXTERN IMP
-_objc_fixupMessageRef(id obj, struct objc_super2 *supr, message_ref_t *msg)
+#if !SUPPORT_TAGGED_POINTERS
+
+// These variables are always provided for debuggers.
+uintptr_t objc_debug_taggedpointer_mask = 0;
+unsigned objc_debug_taggedpointer_slot_shift = 0;
+uintptr_t objc_debug_taggedpointer_slot_mask = 0;
+unsigned objc_debug_taggedpointer_payload_lshift = 0;
+unsigned objc_debug_taggedpointer_payload_rshift = 0;
+Class objc_debug_taggedpointer_classes[1] = { nil };
+
+static void
+disableTaggedPointers() { }
+
+#else
+
+// The "slot" used in the class table and given to the debugger
+// includes the is-tagged bit. This makes objc_msgSend faster.
+
+uintptr_t objc_debug_taggedpointer_mask = TAG_MASK;
+unsigned objc_debug_taggedpointer_slot_shift = TAG_SLOT_SHIFT;
+uintptr_t objc_debug_taggedpointer_slot_mask = TAG_SLOT_MASK;
+unsigned objc_debug_taggedpointer_payload_lshift = TAG_PAYLOAD_LSHIFT;
+unsigned objc_debug_taggedpointer_payload_rshift = TAG_PAYLOAD_RSHIFT;
+// objc_debug_taggedpointer_classes is defined in objc-msg-*.s
+
+static void
+disableTaggedPointers()
{
- IMP imp;
- class_t *isa;
-#if SUPPORT_VTABLE
- int vtableIndex;
+ objc_debug_taggedpointer_mask = 0;
+ objc_debug_taggedpointer_slot_shift = 0;
+ objc_debug_taggedpointer_slot_mask = 0;
+ objc_debug_taggedpointer_payload_lshift = 0;
+ objc_debug_taggedpointer_payload_rshift = 0;
+}
+
+static int
+tagSlotForTagIndex(objc_tag_index_t tag)
+{
+#if SUPPORT_MSB_TAGGED_POINTERS
+ return 0x8 | tag;
+#else
+ return (tag << 1) | 1;
#endif
+}
- rwlock_assert_unlocked(&runtimeLock);
- if (!supr) {
- // normal message - search obj->isa for the method implementation
- isa = (class_t *) _object_getClass(obj);
-
- if (!isRealized(isa)) {
- // obj is a class object, isa is its metaclass
- class_t *cls;
- rwlock_write(&runtimeLock);
- cls = realizeClass((class_t *)obj);
- rwlock_unlock_write(&runtimeLock);
-
- // shouldn't have instances of unrealized classes!
- assert(isMetaClass(isa));
- // shouldn't be relocating classes here!
- assert(cls == (class_t *)obj);
- }
+/***********************************************************************
+* _objc_registerTaggedPointerClass
+* Set the class to use for the given tagged pointer index.
+* Aborts if the tag is out of range, or if the tag is already
+* used by some other class.
+**********************************************************************/
+void
+_objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
+{
+ if (objc_debug_taggedpointer_mask == 0) {
+ _objc_fatal("tagged pointers are disabled");
}
- else {
- // this is objc_msgSend_super, and supr->current_class->superclass
- // is the class to search for the method implementation
- assert(isRealized((class_t *)supr->current_class));
- isa = getSuperclass((class_t *)supr->current_class);
+
+ if ((unsigned int)tag >= TAG_COUNT) {
+ _objc_fatal("tag index %u is too large.", tag);
+ }
+
+ int slot = tagSlotForTagIndex(tag);
+ Class oldCls = objc_tag_classes[slot];
+
+ if (cls && oldCls && cls != oldCls) {
+ _objc_fatal("tag index %u used for two different classes "
+ "(was %p %s, now %p %s)", tag,
+ oldCls, oldCls->nameForLogging(),
+ cls, cls->nameForLogging());
}
+ objc_tag_classes[slot] = cls;
+}
+
+
+// Deprecated name.
+void _objc_insert_tagged_isa(unsigned char slotNumber, Class isa)
+{
+ return _objc_registerTaggedPointerClass((objc_tag_index_t)slotNumber, isa);
+}
+
+
+/***********************************************************************
+* _objc_getClassForTag
+* Returns the class that is using the given tagged pointer tag.
+* Returns nil if no class is using that tag or the tag is out of range.
+**********************************************************************/
+Class
+_objc_getClassForTag(objc_tag_index_t tag)
+{
+ if ((unsigned int)tag >= TAG_COUNT) return nil;
+ return objc_tag_classes[tagSlotForTagIndex(tag)];
+}
+
+#endif
+
+
+#if SUPPORT_FIXUP
+
+OBJC_EXTERN void objc_msgSend_fixup(void);
+OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
+OBJC_EXTERN void objc_msgSend_stret_fixup(void);
+OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
+#if defined(__i386__) || defined(__x86_64__)
+OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
+#endif
+#if defined(__x86_64__)
+OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
+#endif
+
+OBJC_EXTERN void objc_msgSend_fixedup(void);
+OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
+OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
+OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
+#if defined(__i386__) || defined(__x86_64__)
+OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
+#endif
+#if defined(__x86_64__)
+OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
+#endif
+
+/***********************************************************************
+* fixupMessageRef
+* Repairs an old vtable dispatch call site.
+* vtable dispatch itself is not supported.
+**********************************************************************/
+static void
+fixupMessageRef(message_ref_t *msg)
+{
msg->sel = sel_registerName((const char *)msg->sel);
if (ignoreSelector(msg->sel)) {
// ignored selector - bypass dispatcher
- msg->imp = (IMP)&vtable_ignored;
- imp = (IMP)&_objc_ignored_method;
- }
-#if SUPPORT_VTABLE
- else if (msg->imp == (IMP)&objc_msgSend_fixup &&
- (vtableIndex = vtable_getIndex(msg->sel)) >= 0)
- {
- // vtable dispatch
- msg->imp = vtableTrampolines[vtableIndex];
- imp = isa->vtable[vtableIndex];
- }
-#endif
- else {
- // ordinary dispatch
- imp = lookUpMethod((Class)isa, msg->sel, YES/*initialize*/, YES/*cache*/);
-
- if (msg->imp == (IMP)&objc_msgSend_fixup) {
- msg->imp = (IMP)&objc_msgSend_fixedup;
- }
- else if (msg->imp == (IMP)&objc_msgSendSuper2_fixup) {
- msg->imp = (IMP)&objc_msgSendSuper2_fixedup;
- }
- else if (msg->imp == (IMP)&objc_msgSend_stret_fixup) {
- msg->imp = (IMP)&objc_msgSend_stret_fixedup;
- }
- else if (msg->imp == (IMP)&objc_msgSendSuper2_stret_fixup) {
- msg->imp = (IMP)&objc_msgSendSuper2_stret_fixedup;
- }
+ msg->imp = (IMP)&_objc_ignored_method;
+ }
+ else if (msg->imp == &objc_msgSend_fixup) {
+ if (msg->sel == SEL_alloc) {
+ msg->imp = (IMP)&objc_alloc;
+ } else if (msg->sel == SEL_allocWithZone) {
+ msg->imp = (IMP)&objc_allocWithZone;
+ } else if (msg->sel == SEL_retain) {
+ msg->imp = (IMP)&objc_retain;
+ } else if (msg->sel == SEL_release) {
+ msg->imp = (IMP)&objc_release;
+ } else if (msg->sel == SEL_autorelease) {
+ msg->imp = (IMP)&objc_autorelease;
+ } else {
+ msg->imp = &objc_msgSend_fixedup;
+ }
+ }
+ else if (msg->imp == &objc_msgSendSuper2_fixup) {
+ msg->imp = &objc_msgSendSuper2_fixedup;
+ }
+ else if (msg->imp == &objc_msgSend_stret_fixup) {
+ msg->imp = &objc_msgSend_stret_fixedup;
+ }
+ else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
+ msg->imp = &objc_msgSendSuper2_stret_fixedup;
+ }
#if defined(__i386__) || defined(__x86_64__)
- else if (msg->imp == (IMP)&objc_msgSend_fpret_fixup) {
- msg->imp = (IMP)&objc_msgSend_fpret_fixedup;
- }
+ else if (msg->imp == &objc_msgSend_fpret_fixup) {
+ msg->imp = &objc_msgSend_fpret_fixedup;
+ }
#endif
#if defined(__x86_64__)
- else if (msg->imp == (IMP)&objc_msgSend_fp2ret_fixup) {
- msg->imp = (IMP)&objc_msgSend_fp2ret_fixedup;
- }
+ else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
+ msg->imp = &objc_msgSend_fp2ret_fixedup;
+ }
#endif
- else {
- // The ref may already have been fixed up, either by another thread
- // or by +initialize via lookUpMethod above.
- }
- }
-
- return imp;
}
// SUPPORT_FIXUP
// ProKit SPI
-static class_t *setSuperclass(class_t *cls, class_t *newSuper)
+static Class setSuperclass(Class cls, Class newSuper)
{
- class_t *oldSuper;
+ Class oldSuper;
rwlock_assert_writing(&runtimeLock);
+ assert(cls->isRealized());
+ assert(newSuper->isRealized());
+
oldSuper = cls->superclass;
removeSubclass(oldSuper, cls);
- removeSubclass(oldSuper->isa, cls->isa);
+ removeSubclass(oldSuper->ISA(), cls->ISA());
cls->superclass = newSuper;
- cls->isa->superclass = newSuper->isa;
+ cls->ISA()->superclass = newSuper->ISA();
addSubclass(newSuper, cls);
- addSubclass(newSuper->isa, cls->isa);
-
- flushCaches(cls);
- flushCaches(cls->isa);
- flushVtables(cls);
- flushVtables(cls->isa);
+ addSubclass(newSuper->ISA(), cls->ISA());
+ // Flush subclass's method caches.
+ // If subclass is not yet +initialized then its cache will be empty.
+ // Otherwise this is very slow for sel-side caches.
+ if (cls->isInitialized() || cls->ISA()->isInitialized()) {
+ flushCaches(cls);
+ }
+
return oldSuper;
}
-Class class_setSuperclass(Class cls_gen, Class newSuper_gen)
+Class class_setSuperclass(Class cls, Class newSuper)
{
- class_t *cls = newcls(cls_gen);
- class_t *newSuper = newcls(newSuper_gen);
- class_t *oldSuper;
+ Class oldSuper;
rwlock_write(&runtimeLock);
oldSuper = setSuperclass(cls, newSuper);
rwlock_unlock_write(&runtimeLock);
- return (Class)oldSuper;
+ return oldSuper;
}
+
+// __OBJC2__
#endif