From 34d5b5e889e3923108b39f500cb2351aca8497fa Mon Sep 17 00:00:00 2001 From: Apple Date: Mon, 21 Dec 2020 23:08:51 +0000 Subject: [PATCH] objc4-818.2.tar.gz --- markgc.cpp | 8 + objc.xcodeproj/project.pbxproj | 58 +- objc4.plist | 11 + objcdt/objcdt.1 | 1 - objcdt/objcdt.mm | 2 + runtime/Messengers.subproj/objc-msg-arm.s | 2 +- runtime/Messengers.subproj/objc-msg-arm64.s | 458 +++++--- .../objc-msg-simulator-x86_64.s | 48 +- runtime/Messengers.subproj/objc-msg-x86_64.s | 48 +- runtime/NSObject-internal.h | 10 + runtime/NSObject.mm | 321 +++++- runtime/Protocol.mm | 2 +- runtime/arm64-asm.h | 37 +- runtime/dummy-library-mac-i386.c | 356 +++++++ runtime/isa.h | 55 +- runtime/objc-abi.h | 13 +- runtime/objc-api.h | 6 + runtime/objc-block-trampolines.mm | 49 +- runtime/objc-blocktramps-i386.s | 8 +- runtime/objc-blocktramps-x86_64.s | 74 +- runtime/objc-cache-old.mm | 4 - runtime/objc-cache.h | 23 - runtime/objc-cache.mm | 652 ++++++++---- runtime/objc-class-old.mm | 13 +- runtime/objc-class.mm | 48 +- runtime/objc-config.h | 96 +- runtime/objc-env.h | 5 + runtime/objc-exception.mm | 2 +- runtime/objc-file.h | 10 +- runtime/objc-file.mm | 6 + runtime/objc-gdb.h | 10 + runtime/objc-initialize.mm | 8 +- runtime/objc-internal.h | 217 +++- runtime/objc-lockdebug.h | 36 +- runtime/objc-lockdebug.mm | 8 + runtime/objc-object.h | 554 +++++++--- runtime/objc-opt.mm | 2 +- runtime/objc-os.h | 104 +- runtime/objc-os.mm | 16 +- runtime/objc-private.h | 100 +- runtime/objc-ptrauth.h | 22 +- runtime/objc-references.h | 2 +- runtime/objc-references.mm | 43 +- runtime/objc-runtime-new.h | 550 ++++++++-- runtime/objc-runtime-new.mm | 985 +++++++++++++----- runtime/objc-runtime.mm | 56 +- runtime/objc-sel-set.mm | 2 +- runtime/objc-sel-table.s | 5 + runtime/objc-sel.mm | 12 +- runtime/objc-weak.h | 8 +- runtime/objc-weak.mm | 51 +- runtime/objc.h | 2 +- runtime/runtime.h | 71 +- test/association.m | 104 ++ test/badPoolCompat-ios-tvos.m | 14 - test/badPoolCompat-ios.m | 18 + test/badPoolCompat-macos.m | 8 +- test/badPoolCompat-tvos.m | 18 + test/badPoolCompat-watchos.m | 6 +- test/badSuperclass.m | 2 +- test/bigrc.m | 41 +- test/bool.c | 6 +- test/cacheflush-constant.m | 44 + test/category.m | 22 +- test/consolidatePoolPointers.m | 142 +++ test/customrr-nsobject.m | 20 +- test/customrr.m | 53 +- test/evil-class-def.m | 82 +- test/exchangeImp.m | 107 ++ test/fakeRealizedClass.m | 74 ++ test/fakeRealizedClass2.m | 74 ++ test/forward.m | 2 +- test/gc-main.m | 10 - test/gc.c | 1 - test/gc.m | 8 - test/gcenforcer-app-aso.m | 12 - test/gcenforcer-app-gc.m | 14 - test/gcenforcer-app-gcaso.m | 14 - test/gcenforcer-app-gcaso2.m | 14 - test/gcenforcer-app-gconly.m | 14 - test/gcenforcer-app-nogc.m | 12 - test/gcenforcer-app-noobjc.m | 12 - test/gcenforcer-dylib-nogc.m | 11 - test/gcenforcer-dylib-noobjc.m | 9 - test/gcenforcer-dylib-requiresgc.m | 22 - test/gcenforcer-dylib-supportsgc.m | 9 - test/gcenforcer-preflight.m | 88 -- test/gcfiles/evil1 | Bin 441 -> 0 bytes test/gcfiles/i386-aso | Bin 12624 -> 0 bytes test/gcfiles/i386-aso--x86_64-aso | Bin 29060 -> 0 bytes test/gcfiles/i386-broken | Bin 12608 -> 0 bytes test/gcfiles/i386-broken--x86_64-gc | Bin 29056 -> 0 bytes test/gcfiles/i386-broken--x86_64-nogc | Bin 29056 -> 0 bytes test/gcfiles/i386-gc | Bin 12608 -> 0 bytes test/gcfiles/i386-gc--x86_64-broken | Bin 29056 -> 0 bytes test/gcfiles/i386-gc--x86_64-gc | Bin 29056 -> 0 bytes test/gcfiles/i386-gc--x86_64-nogc | Bin 29056 -> 0 bytes test/gcfiles/i386-gcaso | Bin 12716 -> 0 bytes test/gcfiles/i386-gcaso2 | Bin 12644 -> 0 bytes test/gcfiles/i386-gconly | Bin 12608 -> 0 bytes test/gcfiles/i386-nogc | Bin 12608 -> 0 bytes test/gcfiles/i386-nogc--x86_64-broken | Bin 29056 -> 0 bytes test/gcfiles/i386-nogc--x86_64-gc | Bin 29056 -> 0 bytes test/gcfiles/i386-nogc--x86_64-nogc | Bin 29056 -> 0 bytes test/gcfiles/i386-noobjc | Bin 4228 -> 0 bytes test/gcfiles/libnogc.dylib | Bin 74696 -> 0 bytes test/gcfiles/libnoobjc.dylib | Bin 41640 -> 0 bytes test/gcfiles/librequiresgc.dylib | Bin 74696 -> 0 bytes test/gcfiles/librequiresgc.fake.dylib | Bin 74696 -> 0 bytes test/gcfiles/libsupportsgc.dylib | Bin 74696 -> 0 bytes test/gcfiles/x86_64-aso | Bin 8580 -> 0 bytes test/gcfiles/x86_64-broken | Bin 8576 -> 0 bytes test/gcfiles/x86_64-gc | Bin 8576 -> 0 bytes test/gcfiles/x86_64-gcaso | Bin 8920 -> 0 bytes test/gcfiles/x86_64-gcaso2 | Bin 8640 -> 0 bytes test/gcfiles/x86_64-gconly | Bin 8576 -> 0 bytes test/gcfiles/x86_64-nogc | Bin 8576 -> 0 bytes test/gcfiles/x86_64-noobjc | Bin 4248 -> 0 bytes test/isaValidation.m | 6 +- test/ivarSlide.m | 26 +- test/lazyClassName.m | 136 +++ test/libraryPath.c | 17 +- test/methodCacheLeaks.m | 7 +- test/methodListSmall.h | 18 +- test/methodListSmall.mm | 6 + test/methodListSmallMutableMemory.mm | 18 - test/nonpointerisa.m | 49 +- test/preopt-caches.entitlements | 12 + test/preopt-caches.mm | 380 +++++++ test/protocolSmall.m | 91 ++ test/readClassPair.m | 8 +- test/rr-sidetable.m | 2 +- test/runtime.m | 7 + test/setAssociatedObjectHook.m | 45 +- test/swift-class-def.m | 18 +- test/swiftMetadataInitializerRealloc.m | 3 + test/taggedPointers.m | 20 + test/taggedPointersTagObfuscationDisabled.m | 8 +- test/test-defines.h | 1 + test/test.h | 58 +- test/test.pl | 221 +++- test/unload.m | 16 +- test/weakReferenceHook.m | 49 + 143 files changed, 5716 insertions(+), 1883 deletions(-) create mode 100644 objc4.plist create mode 100644 runtime/dummy-library-mac-i386.c delete mode 100644 runtime/objc-cache.h delete mode 100644 test/badPoolCompat-ios-tvos.m create mode 100644 test/badPoolCompat-ios.m create mode 100644 test/badPoolCompat-tvos.m create mode 100644 test/cacheflush-constant.m create mode 100644 test/consolidatePoolPointers.m create mode 100644 test/fakeRealizedClass.m create mode 100644 test/fakeRealizedClass2.m delete mode 100644 test/gc-main.m delete mode 100644 test/gc.c delete mode 100644 test/gc.m delete mode 100644 test/gcenforcer-app-aso.m delete mode 100644 test/gcenforcer-app-gc.m delete mode 100644 test/gcenforcer-app-gcaso.m delete mode 100644 test/gcenforcer-app-gcaso2.m delete mode 100644 test/gcenforcer-app-gconly.m delete mode 100644 test/gcenforcer-app-nogc.m delete mode 100644 test/gcenforcer-app-noobjc.m delete mode 100644 test/gcenforcer-dylib-nogc.m delete mode 100644 test/gcenforcer-dylib-noobjc.m delete mode 100644 test/gcenforcer-dylib-requiresgc.m delete mode 100644 test/gcenforcer-dylib-supportsgc.m delete mode 100644 test/gcenforcer-preflight.m delete mode 100644 test/gcfiles/evil1 delete mode 100755 test/gcfiles/i386-aso delete mode 100755 test/gcfiles/i386-aso--x86_64-aso delete mode 100755 test/gcfiles/i386-broken delete mode 100755 test/gcfiles/i386-broken--x86_64-gc delete mode 100755 test/gcfiles/i386-broken--x86_64-nogc delete mode 100755 test/gcfiles/i386-gc delete mode 100755 test/gcfiles/i386-gc--x86_64-broken delete mode 100755 test/gcfiles/i386-gc--x86_64-gc delete mode 100755 test/gcfiles/i386-gc--x86_64-nogc delete mode 100755 test/gcfiles/i386-gcaso delete mode 100755 test/gcfiles/i386-gcaso2 delete mode 100755 test/gcfiles/i386-gconly delete mode 100755 test/gcfiles/i386-nogc delete mode 100755 test/gcfiles/i386-nogc--x86_64-broken delete mode 100755 test/gcfiles/i386-nogc--x86_64-gc delete mode 100755 test/gcfiles/i386-nogc--x86_64-nogc delete mode 100755 test/gcfiles/i386-noobjc delete mode 100755 test/gcfiles/libnogc.dylib delete mode 100755 test/gcfiles/libnoobjc.dylib delete mode 100755 test/gcfiles/librequiresgc.dylib delete mode 100755 test/gcfiles/librequiresgc.fake.dylib delete mode 100755 test/gcfiles/libsupportsgc.dylib delete mode 100755 test/gcfiles/x86_64-aso delete mode 100755 test/gcfiles/x86_64-broken delete mode 100755 test/gcfiles/x86_64-gc delete mode 100755 test/gcfiles/x86_64-gcaso delete mode 100755 test/gcfiles/x86_64-gcaso2 delete mode 100755 test/gcfiles/x86_64-gconly delete mode 100755 test/gcfiles/x86_64-nogc delete mode 100755 test/gcfiles/x86_64-noobjc create mode 100644 test/lazyClassName.m delete mode 100644 test/methodListSmallMutableMemory.mm create mode 100644 test/preopt-caches.entitlements create mode 100644 test/preopt-caches.mm create mode 100644 test/protocolSmall.m create mode 100644 test/test-defines.h create mode 100644 test/weakReferenceHook.m diff --git a/markgc.cpp b/markgc.cpp index 4543ad6..bed92dd 100644 --- a/markgc.cpp +++ b/markgc.cpp @@ -391,6 +391,14 @@ void dosect(uint8_t *start, macho_section

*sect) sect->set_sectname("__objc_init_func"); if (debug) printf("disabled __mod_init_func section\n"); } + if (segnameStartsWith(sect->segname(), "__TEXT") && + sectnameEquals(sect->sectname(), "__init_offsets")) + { + // section type 0 is S_REGULAR + sect->set_flags(sect->flags() & ~SECTION_TYPE); + sect->set_sectname("__objc_init_offs"); + if (debug) printf("disabled __mod_init_func section\n"); + } if (segnameStartsWith(sect->segname(), "__DATA") && sectnameEquals(sect->sectname(), "__mod_term_func")) { diff --git a/objc.xcodeproj/project.pbxproj b/objc.xcodeproj/project.pbxproj index 5587470..9f3248f 100644 --- a/objc.xcodeproj/project.pbxproj +++ b/objc.xcodeproj/project.pbxproj @@ -62,7 +62,6 @@ 6EF877E22325D93200963DBB /* Symbolication.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6EF877E12325D93200963DBB /* Symbolication.framework */; }; 6EF877E52325FAC400963DBB /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6EF877E42325FAC400963DBB /* Foundation.framework */; }; 6EF877E82326184000963DBB /* json.mm in Sources */ = {isa = PBXBuildFile; fileRef = 6EF877E72326184000963DBB /* json.mm */; }; - 6EF877E923261D3E00963DBB /* objc-cache.mm in Sources */ = {isa = PBXBuildFile; fileRef = 838485CB0D6D68A200CEA253 /* objc-cache.mm */; }; 6EF877EC232635A700963DBB /* objcdt.1 in Install Manpages */ = {isa = PBXBuildFile; fileRef = 6EF877EA232633CC00963DBB /* objcdt.1 */; }; 7213C36321FA7C730090A271 /* NSObject-internal.h in Headers */ = {isa = PBXBuildFile; fileRef = 7213C36221FA7C730090A271 /* NSObject-internal.h */; settings = {ATTRIBUTES = (Private, ); }; }; 7593EC58202248E50046AB96 /* objc-object.h in Headers */ = {isa = PBXBuildFile; fileRef = 7593EC57202248DF0046AB96 /* objc-object.h */; }; @@ -149,7 +148,9 @@ 9672F7EE14D5F488007CEC96 /* NSObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = 9672F7ED14D5F488007CEC96 /* NSObject.mm */; }; C22F5208230EF38B001BFE14 /* objc-ptrauth.h in Headers */ = {isa = PBXBuildFile; fileRef = C22F5207230EF38B001BFE14 /* objc-ptrauth.h */; }; C2E6D3FC2225DCF00059DFAA /* DenseMapExtras.h in Headers */ = {isa = PBXBuildFile; fileRef = C2E6D3FB2225DCF00059DFAA /* DenseMapExtras.h */; }; + C2EB731D23D8A38A0040672B /* dummy-library-mac-i386.c in Sources */ = {isa = PBXBuildFile; fileRef = C2EB731C23D8A38A0040672B /* dummy-library-mac-i386.c */; }; E8923DA5116AB2820071B552 /* objc-block-trampolines.mm in Sources */ = {isa = PBXBuildFile; fileRef = E8923DA0116AB2820071B552 /* objc-block-trampolines.mm */; }; + E934A9F123E996D00088F26F /* objc4.plist in CopyFiles */ = {isa = PBXBuildFile; fileRef = E934A9EF23E9967D0088F26F /* objc4.plist */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; }; F9BCC71B205C68E800DD9AFC /* objc-blocktramps-arm64.s in Sources */ = {isa = PBXBuildFile; fileRef = 8379996D13CBAF6F007C2B5F /* objc-blocktramps-arm64.s */; }; /* End PBXBuildFile section */ @@ -189,6 +190,16 @@ name = "Install Manpages"; runOnlyForDeploymentPostprocessing = 1; }; + E934A9F023E996CC0088F26F /* CopyFiles */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 8; + dstPath = /System/Library/FeatureFlags/Domain; + dstSubfolderSpec = 0; + files = ( + E934A9F123E996D00088F26F /* objc4.plist in CopyFiles */, + ); + runOnlyForDeploymentPostprocessing = 1; + }; /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ @@ -303,12 +314,17 @@ 9672F7ED14D5F488007CEC96 /* NSObject.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = NSObject.mm; path = runtime/NSObject.mm; sourceTree = ""; }; BC8B5D1212D3D48100C78A5B /* libauto.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libauto.dylib; path = /usr/lib/libauto.dylib; sourceTree = ""; }; C217B55222DE556D004369BA /* objc-env.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "objc-env.h"; path = "runtime/objc-env.h"; sourceTree = ""; }; + C2296C682457336C003FAE61 /* objc-bp-assist.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "objc-bp-assist.h"; path = "runtime/objc-bp-assist.h"; sourceTree = ""; }; C22F5207230EF38B001BFE14 /* objc-ptrauth.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-ptrauth.h"; path = "runtime/objc-ptrauth.h"; sourceTree = ""; }; C2E6D3FB2225DCF00059DFAA /* DenseMapExtras.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DenseMapExtras.h; path = runtime/DenseMapExtras.h; sourceTree = ""; }; + C2EB731C23D8A38A0040672B /* dummy-library-mac-i386.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "dummy-library-mac-i386.c"; path = "runtime/dummy-library-mac-i386.c"; sourceTree = ""; }; D2AAC0630554660B00DB518D /* libobjc.A.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; includeInIndex = 0; path = libobjc.A.dylib; sourceTree = BUILT_PRODUCTS_DIR; }; E8923D9C116AB2820071B552 /* objc-blocktramps-i386.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-i386.s"; path = "runtime/objc-blocktramps-i386.s"; sourceTree = ""; }; E8923D9D116AB2820071B552 /* objc-blocktramps-x86_64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-x86_64.s"; path = "runtime/objc-blocktramps-x86_64.s"; sourceTree = ""; }; E8923DA0116AB2820071B552 /* objc-block-trampolines.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "objc-block-trampolines.mm"; path = "runtime/objc-block-trampolines.mm"; sourceTree = ""; }; + E934A9EF23E9967D0088F26F /* objc4.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = objc4.plist; sourceTree = ""; }; + E97047552497CC5300781D29 /* check_preopt_caches.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = check_preopt_caches.entitlements; sourceTree = ""; }; + E9AD465924925261002AF1DB /* check_preopt_caches.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = check_preopt_caches.mm; sourceTree = ""; }; F9BCC727205C68E800DD9AFC /* libobjc-trampolines.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; includeInIndex = 0; path = "libobjc-trampolines.dylib"; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ @@ -352,6 +368,7 @@ 08FB7795FE84155DC02AAC07 /* Source */, 838485B20D6D67F900CEA253 /* Other */, 6EF877D82325D62600963DBB /* objcdt */, + E9AD465824925261002AF1DB /* check-preopt-caches */, 1AB674ADFE9D54B511CA2CBB /* Products */, F9BCC72A205C6A1600DD9AFC /* Frameworks */, ); @@ -361,6 +378,7 @@ 08FB7795FE84155DC02AAC07 /* Source */ = { isa = PBXGroup; children = ( + C2EB731C23D8A38A0040672B /* dummy-library-mac-i386.c */, 838485B80D6D687300CEA253 /* hashtable2.mm */, 838485BC0D6D687300CEA253 /* maptable.mm */, 9672F7ED14D5F488007CEC96 /* NSObject.mm */, @@ -439,6 +457,7 @@ 838485B40D6D683300CEA253 /* APPLE_LICENSE */, 838485B50D6D683300CEA253 /* ReleaseNotes.rtf */, 83CE671D1E6E76B60095A33E /* interposable.txt */, + E934A9EF23E9967D0088F26F /* objc4.plist */, 838485B30D6D682B00CEA253 /* libobjc.order */, ); name = Other; @@ -514,6 +533,7 @@ 83D9269721225A7400299F69 /* arm64-asm.h */, 83D92695212254CF00299F69 /* isa.h */, 838485CF0D6D68A200CEA253 /* objc-config.h */, + C2296C682457336C003FAE61 /* objc-bp-assist.h */, C217B55222DE556D004369BA /* objc-env.h */, 83BE02E50FCCB24D00661494 /* objc-file-old.h */, 83BE02E60FCCB24D00661494 /* objc-file.h */, @@ -535,6 +555,15 @@ name = "Project Headers"; sourceTree = ""; }; + E9AD465824925261002AF1DB /* check-preopt-caches */ = { + isa = PBXGroup; + children = ( + E97047552497CC5300781D29 /* check_preopt_caches.entitlements */, + E9AD465924925261002AF1DB /* check_preopt_caches.mm */, + ); + path = "check-preopt-caches"; + sourceTree = ""; + }; F9BCC72A205C6A1600DD9AFC /* Frameworks */ = { isa = PBXGroup; children = ( @@ -644,6 +673,7 @@ D289988505E68E00004EDB86 /* Frameworks */, 830F2AB60D739AB600392440 /* Run Script (markgc) */, 830F2AFA0D73BC5800392440 /* Run Script (symlink) */, + E934A9F023E996CC0088F26F /* CopyFiles */, ); buildRules = ( ); @@ -678,7 +708,6 @@ 08FB7793FE84155DC02AAC07 /* Project object */ = { isa = PBXProject; attributes = { - BuildIndependentTargetsInParallel = NO; LastUpgradeCheck = 0440; TargetAttributes = { 6EF877D62325D62600963DBB = { @@ -777,7 +806,6 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - 6EF877E923261D3E00963DBB /* objc-cache.mm in Sources */, 6EF877E82326184000963DBB /* json.mm in Sources */, 6EF877DA2325D62600963DBB /* objcdt.mm in Sources */, 6EF877DE2325D79000963DBB /* objc-probes.d in Sources */, @@ -824,6 +852,7 @@ 83B1A8BE0FF1AC0D0019EA5B /* objc-msg-simulator-i386.s in Sources */, 83EB007B121C9EC200B92C16 /* objc-sel-table.s in Sources */, 39ABD72412F0B61800D1054C /* objc-weak.mm in Sources */, + C2EB731D23D8A38A0040672B /* dummy-library-mac-i386.c in Sources */, 83D49E4F13C7C84F0057F1DD /* objc-msg-arm64.s in Sources */, 9672F7EE14D5F488007CEC96 /* NSObject.mm in Sources */, 83725F4A14CA5BFA0014370E /* objc-opt.mm in Sources */, @@ -875,6 +904,8 @@ COPY_PHASE_STRIP = NO; DEPLOYMENT_LOCATION = YES; DYLIB_CURRENT_VERSION = 228; + EXCLUDED_SOURCE_FILE_NAMES = "dummy-library-mac-i386.c"; + "EXCLUDED_SOURCE_FILE_NAMES[sdk=macosx*][arch=i386]" = "*"; EXECUTABLE_PREFIX = lib; GCC_CW_ASM_SYNTAX = NO; GCC_OPTIMIZATION_LEVEL = 0; @@ -886,6 +917,7 @@ "$(CONFIGURATION_BUILD_DIR)/usr/local/include/**", /System/Library/Frameworks/System.framework/PrivateHeaders, ); + "INCLUDED_SOURCE_FILE_NAMES[sdk=macosx*][arch=i386]" = "dummy-library-mac-i386.c"; INSTALL_PATH = /usr/lib; IS_ZIPPERED = YES; LLVM_LTO = NO; @@ -911,6 +943,10 @@ "-interposable_list", "-Xlinker", interposable.txt, + "-Xlinker", + "-headerpad", + "-Xlinker", + 0x100, ); "OTHER_LDFLAGS[sdk=iphonesimulator*][arch=*]" = ( "-lc++abi", @@ -934,7 +970,9 @@ "-interposable_list", "-Xlinker", interposable.txt, + "-loah", ); + "OTHER_LDFLAGS[sdk=macosx*][arch=i386]" = "-nodefaultlibs"; OTHER_TAPI_FLAGS = "-exclude-public-header $(DSTROOT)/usr/include/objc/ObjectiveC.apinotes -exclude-public-header $(DSTROOT)/usr/include/objc/module.modulemap -Xparser -Wno-deprecated-declarations -Xparser -Wno-unavailable-declarations -Xparser -D_OBJC_PRIVATE_H_=1 -DOBJC_DECLARE_SYMBOLS=1"; PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc; PRODUCT_NAME = objc.A; @@ -953,6 +991,8 @@ "COPY_HEADERS_UNIFDEF_FLAGS[sdk=macosx*]" = "-DBUILD_FOR_OSX"; DEPLOYMENT_LOCATION = YES; DYLIB_CURRENT_VERSION = 228; + EXCLUDED_SOURCE_FILE_NAMES = "dummy-library-mac-i386.c"; + "EXCLUDED_SOURCE_FILE_NAMES[sdk=macosx*][arch=i386]" = "*"; EXECUTABLE_PREFIX = lib; GCC_CW_ASM_SYNTAX = NO; GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = NO; @@ -963,6 +1003,7 @@ "$(CONFIGURATION_BUILD_DIR)/usr/local/include/**", /System/Library/Frameworks/System.framework/PrivateHeaders, ); + "INCLUDED_SOURCE_FILE_NAMES[sdk=macosx*][arch=i386]" = "dummy-library-mac-i386.c"; INSTALL_PATH = /usr/lib; IS_ZIPPERED = YES; ORDER_FILE = "$(SDKROOT)/AppleInternal/OrderFiles/libobjc.order"; @@ -987,6 +1028,10 @@ "-interposable_list", "-Xlinker", interposable.txt, + "-Xlinker", + "-headerpad", + "-Xlinker", + 0x100, ); "OTHER_LDFLAGS[sdk=iphonesimulator*][arch=*]" = ( "-lc++abi", @@ -1010,7 +1055,9 @@ "-interposable_list", "-Xlinker", interposable.txt, + "-loah", ); + "OTHER_LDFLAGS[sdk=macosx*][arch=i386]" = "-nodefaultlibs"; OTHER_TAPI_FLAGS = "-exclude-public-header $(DSTROOT)/usr/include/objc/ObjectiveC.apinotes -exclude-public-header $(DSTROOT)/usr/include/objc/module.modulemap -Xparser -Wno-deprecated-declarations -Xparser -Wno-unavailable-declarations -Xparser -D_OBJC_PRIVATE_H_=1 -DOBJC_DECLARE_SYMBOLS=1"; PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc; PRODUCT_NAME = objc.A; @@ -1032,6 +1079,7 @@ CLANG_CXX_LIBRARY = "libc++"; CLANG_LINK_OBJC_RUNTIME = NO; CLANG_OBJC_RUNTIME = NO; + CODE_SIGN_IDENTITY = "-"; DEBUG_INFORMATION_FORMAT = dwarf; GCC_ENABLE_CPP_EXCEPTIONS = NO; GCC_ENABLE_CPP_RTTI = NO; @@ -1078,6 +1126,7 @@ CLANG_CXX_LIBRARY = "libc++"; CLANG_LINK_OBJC_RUNTIME = NO; CLANG_OBJC_RUNTIME = NO; + CODE_SIGN_IDENTITY = "-"; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; GCC_ENABLE_CPP_EXCEPTIONS = NO; GCC_ENABLE_CPP_RTTI = NO; @@ -1124,6 +1173,7 @@ buildSettings = { CODE_SIGN_ENTITLEMENTS = "objcdt/objcdt-entitlements.plist"; CODE_SIGN_IDENTITY = "-"; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; GCC_PREPROCESSOR_DEFINITIONS = ( "__BUILDING_OBJCDT__=1", "$(inherited)", @@ -1232,6 +1282,7 @@ OTHER_LDFLAGS = ( "-Xlinker", "-not_for_dyld_shared_cache", + "-nodefaultlibs", ); PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -1268,6 +1319,7 @@ OTHER_LDFLAGS = ( "-Xlinker", "-not_for_dyld_shared_cache", + "-nodefaultlibs", ); PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc; PRODUCT_NAME = "$(TARGET_NAME)"; diff --git a/objc4.plist b/objc4.plist new file mode 100644 index 0000000..157aea8 --- /dev/null +++ b/objc4.plist @@ -0,0 +1,11 @@ + + + + + preoptimizedCaches + + Enabled + + + + diff --git a/objcdt/objcdt.1 b/objcdt/objcdt.1 index 5522491..999a155 100644 --- a/objcdt/objcdt.1 +++ b/objcdt/objcdt.1 @@ -17,4 +17,3 @@ the Objective-C runtime state in live processes. Help can be obtained using .Nm .Ar help -.Ed diff --git a/objcdt/objcdt.mm b/objcdt/objcdt.mm index 81a5a49..eca5fa2 100644 --- a/objcdt/objcdt.mm +++ b/objcdt/objcdt.mm @@ -27,6 +27,8 @@ #include #include #include +#include +#include int main(int argc, const char *argv[]) { diff --git a/runtime/Messengers.subproj/objc-msg-arm.s b/runtime/Messengers.subproj/objc-msg-arm.s index 67317c7..4947209 100644 --- a/runtime/Messengers.subproj/objc-msg-arm.s +++ b/runtime/Messengers.subproj/objc-msg-arm.s @@ -711,7 +711,7 @@ LNilReceiver: mov r1, r2 // selector .endif mov r2, r9 // class to search - mov r3, #3 // LOOKUP_INITIALIZE | LOOKUP_INITIALIZE + mov r3, #3 // LOOKUP_INITIALIZE | LOOKUP_RESOLVER blx _lookUpImpOrForward mov r12, r0 // r12 = IMP diff --git a/runtime/Messengers.subproj/objc-msg-arm64.s b/runtime/Messengers.subproj/objc-msg-arm64.s index 595b03e..7794ad5 100755 --- a/runtime/Messengers.subproj/objc-msg-arm64.s +++ b/runtime/Messengers.subproj/objc-msg-arm64.s @@ -30,8 +30,19 @@ #include #include "isa.h" -#include "arm64-asm.h" #include "objc-config.h" +#include "arm64-asm.h" + +#if TARGET_OS_IPHONE && __LP64__ + .section __TEXT,__objc_methname,cstring_literals +l_MagicSelector: /* the shared cache builder knows about this value */ + .byte 0xf0, 0x9f, 0xa4, 0xaf, 0 + + .section __DATA,__objc_selrefs,literal_pointers,no_dead_strip + .p2align 3 +_MagicSelRef: + .quad l_MagicSelector +#endif .data @@ -57,7 +68,6 @@ _objc_restartableRanges: RestartableEntry _cache_getImp RestartableEntry _objc_msgSend - RestartableEntry _objc_msgSendSuper RestartableEntry _objc_msgSendSuper2 RestartableEntry _objc_msgLookup RestartableEntry _objc_msgLookupSuper2 @@ -81,13 +91,13 @@ _objc_restartableRanges: /******************************************************************** - * GetClassFromIsa_p16 src + * GetClassFromIsa_p16 src, needs_auth, auth_address * src is a raw isa field. Sets p16 to the corresponding class pointer. * The raw isa might be an indexed isa to be decoded, or a * packed isa that needs to be masked. * * On exit: - * $0 is unchanged + * src is unchanged * p16 is a class pointer * x10 is clobbered ********************************************************************/ @@ -99,11 +109,11 @@ _objc_indexed_classes: .fill ISA_INDEX_COUNT, PTRSIZE, 0 #endif -.macro GetClassFromIsa_p16 /* src */ +.macro GetClassFromIsa_p16 src, needs_auth, auth_address /* note: auth_address is not required if !needs_auth */ #if SUPPORT_INDEXED_ISA // Indexed isa - mov p16, $0 // optimistically set dst = src + mov p16, \src // optimistically set dst = src tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa // isa in p16 is indexed adrp x10, _objc_indexed_classes@PAGE @@ -113,12 +123,15 @@ _objc_indexed_classes: 1: #elif __LP64__ +.if \needs_auth == 0 // _cache_getImp takes an authed class already + mov p16, \src +.else // 64-bit packed isa - and p16, $0, #ISA_MASK - + ExtractISA p16, \src, \auth_address +.endif #else // 32-bit raw isa - mov p16, $0 + mov p16, \src #endif @@ -169,6 +182,9 @@ LExit$0: #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves +#define MSGSEND 100 +#define METHOD_INVOKE 101 + ////////////////////////////////////////////////////////////////////// // // SAVE_REGS @@ -177,7 +193,7 @@ LExit$0: // for a function call. ////////////////////////////////////////////////////////////////////// -.macro SAVE_REGS +.macro SAVE_REGS kind // push frame SignLR @@ -185,16 +201,23 @@ LExit$0: mov fp, sp // save parameter registers: x0..x8, q0..q7 - sub sp, sp, #(10*8 + 8*16) - stp q0, q1, [sp, #(0*16)] - stp q2, q3, [sp, #(2*16)] - stp q4, q5, [sp, #(4*16)] - stp q6, q7, [sp, #(6*16)] - stp x0, x1, [sp, #(8*16+0*8)] - stp x2, x3, [sp, #(8*16+2*8)] - stp x4, x5, [sp, #(8*16+4*8)] - stp x6, x7, [sp, #(8*16+6*8)] - str x8, [sp, #(8*16+8*8)] + sub sp, sp, #(10*8 + 8*16) + stp q0, q1, [sp, #(0*16)] + stp q2, q3, [sp, #(2*16)] + stp q4, q5, [sp, #(4*16)] + stp q6, q7, [sp, #(6*16)] + stp x0, x1, [sp, #(8*16+0*8)] + stp x2, x3, [sp, #(8*16+2*8)] + stp x4, x5, [sp, #(8*16+4*8)] + stp x6, x7, [sp, #(8*16+6*8)] +.if \kind == MSGSEND + stp x8, x15, [sp, #(8*16+8*8)] + mov x16, x15 // stashed by CacheLookup, restore to x16 +.elseif \kind == METHOD_INVOKE + str x8, [sp, #(8*16+8*8)] +.else +.abort Unknown kind. +.endif .endmacro @@ -207,17 +230,24 @@ LExit$0: // SAVE_REGS. ////////////////////////////////////////////////////////////////////// -.macro RESTORE_REGS - - ldp q0, q1, [sp, #(0*16)] - ldp q2, q3, [sp, #(2*16)] - ldp q4, q5, [sp, #(4*16)] - ldp q6, q7, [sp, #(6*16)] - ldp x0, x1, [sp, #(8*16+0*8)] - ldp x2, x3, [sp, #(8*16+2*8)] - ldp x4, x5, [sp, #(8*16+4*8)] - ldp x6, x7, [sp, #(8*16+6*8)] - ldr x8, [sp, #(8*16+8*8)] +.macro RESTORE_REGS kind + + ldp q0, q1, [sp, #(0*16)] + ldp q2, q3, [sp, #(2*16)] + ldp q4, q5, [sp, #(4*16)] + ldp q6, q7, [sp, #(6*16)] + ldp x0, x1, [sp, #(8*16+0*8)] + ldp x2, x3, [sp, #(8*16+2*8)] + ldp x4, x5, [sp, #(8*16+4*8)] + ldp x6, x7, [sp, #(8*16+6*8)] +.if \kind == MSGSEND + ldp x8, x16, [sp, #(8*16+8*8)] + orr x16, x16, #2 // for the sake of instrumentations, remember it was the slowpath +.elseif \kind == METHOD_INVOKE + ldr x8, [sp, #(8*16+8*8)] +.else +.abort Unknown kind. +.endif mov sp, fp ldp fp, lr, [sp], #16 @@ -228,7 +258,9 @@ LExit$0: /******************************************************************** * - * CacheLookup NORMAL|GETIMP|LOOKUP + * CacheLookup NORMAL|GETIMP|LOOKUP MissLabelDynamic MissLabelConstant + * + * MissLabelConstant is only used for the GETIMP variant. * * Locate the implementation for a selector in a class method cache. * @@ -242,11 +274,27 @@ LExit$0: * x16 = class to be searched * * Kills: - * x9,x10,x11,x12, x17 + * x9,x10,x11,x12,x13,x15,x17 + * + * Untouched: + * x14 * * On exit: (found) calls or returns IMP * with x16 = class, x17 = IMP + * In LOOKUP mode, the two low bits are set to 0x3 + * if we hit a constant cache (used in objc_trace) * (not found) jumps to LCacheMiss + * with x15 = class + * For constant caches in LOOKUP mode, the low bit + * of x16 is set to 0x1 to indicate we had to fallback. + * In addition, when LCacheMiss is __objc_msgSend_uncached or + * __objc_msgLookup_uncached, 0x2 will be set in x16 + * to remember we took the slowpath. + * So the two low bits of x16 on exit mean: + * 0: dynamic hit + * 1: fallback to the parent class, when there is a preoptimized cache + * 2: slowpath + * 3: preoptimized cache hit * ********************************************************************/ @@ -254,60 +302,37 @@ LExit$0: #define GETIMP 1 #define LOOKUP 2 -// CacheHit: x17 = cached IMP, x12 = address of cached IMP, x1 = SEL, x16 = isa +// CacheHit: x17 = cached IMP, x10 = address of buckets, x1 = SEL, x16 = isa .macro CacheHit .if $0 == NORMAL - TailCallCachedImp x17, x12, x1, x16 // authenticate and call imp + TailCallCachedImp x17, x10, x1, x16 // authenticate and call imp .elseif $0 == GETIMP mov p0, p17 cbz p0, 9f // don't ptrauth a nil imp - AuthAndResignAsIMP x0, x12, x1, x16 // authenticate imp and re-sign as IMP + AuthAndResignAsIMP x0, x10, x1, x16 // authenticate imp and re-sign as IMP 9: ret // return IMP .elseif $0 == LOOKUP // No nil check for ptrauth: the caller would crash anyway when they // jump to a nil IMP. We don't care if that jump also fails ptrauth. - AuthAndResignAsIMP x17, x12, x1, x16 // authenticate imp and re-sign as IMP + AuthAndResignAsIMP x17, x10, x1, x16 // authenticate imp and re-sign as IMP + cmp x16, x15 + cinc x16, x16, ne // x16 += 1 when x15 != x16 (for instrumentation ; fallback to the parent class) ret // return imp via x17 .else .abort oops .endif .endmacro -.macro CheckMiss - // miss if bucket->sel == 0 -.if $0 == GETIMP - cbz p9, LGetImpMiss -.elseif $0 == NORMAL - cbz p9, __objc_msgSend_uncached -.elseif $0 == LOOKUP - cbz p9, __objc_msgLookup_uncached -.else -.abort oops -.endif -.endmacro - -.macro JumpMiss -.if $0 == GETIMP - b LGetImpMiss -.elseif $0 == NORMAL - b __objc_msgSend_uncached -.elseif $0 == LOOKUP - b __objc_msgLookup_uncached -.else -.abort oops -.endif -.endmacro - -.macro CacheLookup +.macro CacheLookup Mode, Function, MissLabelDynamic, MissLabelConstant // // Restart protocol: // - // As soon as we're past the LLookupStart$1 label we may have loaded - // an invalid cache pointer or mask. + // As soon as we're past the LLookupStart\Function label we may have + // loaded an invalid cache pointer or mask. // // When task_restartable_ranges_synchronize() is called, - // (or when a signal hits us) before we're past LLookupEnd$1, - // then our PC will be reset to LLookupRecover$1 which forcefully + // (or when a signal hits us) before we're past LLookupEnd\Function, + // then our PC will be reset to LLookupRecover\Function which forcefully // jumps to the cache-miss codepath which have the following // requirements: // @@ -320,70 +345,158 @@ LExit$0: // - x16 contains the isa // - other registers are set as per calling conventions // -LLookupStart$1: + mov x15, x16 // stash the original isa +LLookupStart\Function: // p1 = SEL, p16 = isa - ldr p11, [x16, #CACHE] // p11 = mask|buckets - -#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 +#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS + ldr p10, [x16, #CACHE] // p10 = mask|buckets + lsr p11, p10, #48 // p11 = mask + and p10, p10, #0xffffffffffff // p10 = buckets + and w12, w1, w11 // x12 = _cmd & mask +#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 + ldr p11, [x16, #CACHE] // p11 = mask|buckets +#if CONFIG_USE_PREOPT_CACHES +#if __has_feature(ptrauth_calls) + tbnz p11, #0, LLookupPreopt\Function + and p10, p11, #0x0000ffffffffffff // p10 = buckets +#else + and p10, p11, #0x0000fffffffffffe // p10 = buckets + tbnz p11, #0, LLookupPreopt\Function +#endif + eor p12, p1, p1, LSR #7 + and p12, p12, p11, LSR #48 // x12 = (_cmd ^ (_cmd >> 7)) & mask +#else and p10, p11, #0x0000ffffffffffff // p10 = buckets and p12, p1, p11, LSR #48 // x12 = _cmd & mask +#endif // CONFIG_USE_PREOPT_CACHES #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4 + ldr p11, [x16, #CACHE] // p11 = mask|buckets and p10, p11, #~0xf // p10 = buckets and p11, p11, #0xf // p11 = maskShift mov p12, #0xffff - lsr p11, p12, p11 // p11 = mask = 0xffff >> p11 - and p12, p1, p11 // x12 = _cmd & mask + lsr p11, p12, p11 // p11 = mask = 0xffff >> p11 + and p12, p1, p11 // x12 = _cmd & mask #else #error Unsupported cache mask storage for ARM64. #endif + add p13, p10, p12, LSL #(1+PTRSHIFT) + // p13 = buckets + ((_cmd & mask) << (1+PTRSHIFT)) + + // do { +1: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket-- + cmp p9, p1 // if (sel != _cmd) { + b.ne 3f // scan more + // } else { +2: CacheHit \Mode // hit: call or return imp + // } +3: cbz p9, \MissLabelDynamic // if (sel == 0) goto Miss; + cmp p13, p10 // } while (bucket >= buckets) + b.hs 1b + + // wrap-around: + // p10 = first bucket + // p11 = mask (and maybe other bits on LP64) + // p12 = _cmd & mask + // + // A full cache can happen with CACHE_ALLOW_FULL_UTILIZATION. + // So stop when we circle back to the first probed bucket + // rather than when hitting the first bucket again. + // + // Note that we might probe the initial bucket twice + // when the first probed slot is the last entry. - add p12, p10, p12, LSL #(1+PTRSHIFT) - // p12 = buckets + ((_cmd & mask) << (1+PTRSHIFT)) - ldp p17, p9, [x12] // {imp, sel} = *bucket -1: cmp p9, p1 // if (bucket->sel != _cmd) - b.ne 2f // scan more - CacheHit $0 // call or return imp - -2: // not hit: p12 = not-hit bucket - CheckMiss $0 // miss if bucket->sel == 0 - cmp p12, p10 // wrap if bucket == buckets - b.eq 3f - ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket - b 1b // loop - -3: // wrap: p12 = first bucket, w11 = mask -#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 - add p12, p12, p11, LSR #(48 - (1+PTRSHIFT)) - // p12 = buckets + (mask << 1+PTRSHIFT) +#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS + add p13, p10, w11, UXTW #(1+PTRSHIFT) + // p13 = buckets + (mask << 1+PTRSHIFT) +#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 + add p13, p10, p11, LSR #(48 - (1+PTRSHIFT)) + // p13 = buckets + (mask << 1+PTRSHIFT) + // see comment about maskZeroBits #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4 - add p12, p12, p11, LSL #(1+PTRSHIFT) - // p12 = buckets + (mask << 1+PTRSHIFT) + add p13, p10, p11, LSL #(1+PTRSHIFT) + // p13 = buckets + (mask << 1+PTRSHIFT) #else #error Unsupported cache mask storage for ARM64. +#endif + add p12, p10, p12, LSL #(1+PTRSHIFT) + // p12 = first probed bucket + + // do { +4: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket-- + cmp p9, p1 // if (sel == _cmd) + b.eq 2b // goto hit + cmp p9, #0 // } while (sel != 0 && + ccmp p13, p12, #0, ne // bucket > first_probed) + b.hi 4b + +LLookupEnd\Function: +LLookupRecover\Function: + b \MissLabelDynamic + +#if CONFIG_USE_PREOPT_CACHES +#if CACHE_MASK_STORAGE != CACHE_MASK_STORAGE_HIGH_16 +#error config unsupported +#endif +LLookupPreopt\Function: +#if __has_feature(ptrauth_calls) + and p10, p11, #0x007ffffffffffffe // p10 = buckets + autdb x10, x16 // auth as early as possible #endif - // Clone scanning loop to miss instead of hang when cache is corrupt. - // The slow path may detect any corruption and halt later. + // x12 = (_cmd - first_shared_cache_sel) + adrp x9, _MagicSelRef@PAGE + ldr p9, [x9, _MagicSelRef@PAGEOFF] + sub p12, p1, p9 - ldp p17, p9, [x12] // {imp, sel} = *bucket -1: cmp p9, p1 // if (bucket->sel != _cmd) - b.ne 2f // scan more - CacheHit $0 // call or return imp - -2: // not hit: p12 = not-hit bucket - CheckMiss $0 // miss if bucket->sel == 0 - cmp p12, p10 // wrap if bucket == buckets - b.eq 3f - ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket - b 1b // loop - -LLookupEnd$1: -LLookupRecover$1: -3: // double wrap - JumpMiss $0 + // w9 = ((_cmd - first_shared_cache_sel) >> hash_shift & hash_mask) +#if __has_feature(ptrauth_calls) + // bits 63..60 of x11 are the number of bits in hash_mask + // bits 59..55 of x11 is hash_shift + + lsr x17, x11, #55 // w17 = (hash_shift, ...) + lsr w9, w12, w17 // >>= shift + + lsr x17, x11, #60 // w17 = mask_bits + mov x11, #0x7fff + lsr x11, x11, x17 // p11 = mask (0x7fff >> mask_bits) + and x9, x9, x11 // &= mask +#else + // bits 63..53 of x11 is hash_mask + // bits 52..48 of x11 is hash_shift + lsr x17, x11, #48 // w17 = (hash_shift, hash_mask) + lsr w9, w12, w17 // >>= shift + and x9, x9, x11, LSR #53 // &= mask +#endif + + ldr x17, [x10, x9, LSL #3] // x17 == sel_offs | (imp_offs << 32) + cmp x12, w17, uxtw + +.if \Mode == GETIMP + b.ne \MissLabelConstant // cache miss + sub x0, x16, x17, LSR #32 // imp = isa - imp_offs + SignAsImp x0 + ret +.else + b.ne 5f // cache miss + sub x17, x16, x17, LSR #32 // imp = isa - imp_offs +.if \Mode == NORMAL + br x17 +.elseif \Mode == LOOKUP + orr x16, x16, #3 // for instrumentation, note that we hit a constant cache + SignAsImp x17 + ret +.else +.abort unhandled mode \Mode +.endif + +5: ldursw x9, [x10, #-8] // offset -8 is the fallback offset + add x16, x16, x9 // compute the fallback isa + b LLookupStart\Function // lookup again with a new isa +.endif +#endif // CONFIG_USE_PREOPT_CACHES .endmacro @@ -402,12 +515,37 @@ LLookupRecover$1: #if SUPPORT_TAGGED_POINTERS .data .align 3 - .globl _objc_debug_taggedpointer_classes -_objc_debug_taggedpointer_classes: - .fill 16, 8, 0 .globl _objc_debug_taggedpointer_ext_classes _objc_debug_taggedpointer_ext_classes: .fill 256, 8, 0 + +// Dispatch for split tagged pointers take advantage of the fact that +// the extended tag classes array immediately precedes the standard +// tag array. The .alt_entry directive ensures that the two stay +// together. This is harmless when using non-split tagged pointers. + .globl _objc_debug_taggedpointer_classes + .alt_entry _objc_debug_taggedpointer_classes +_objc_debug_taggedpointer_classes: + .fill 16, 8, 0 + +// Look up the class for a tagged pointer in x0, placing it in x16. +.macro GetTaggedClass + + and x10, x0, #0x7 // x10 = small tag + asr x11, x0, #55 // x11 = large tag with 1s filling the top (because bit 63 is 1 on a tagged pointer) + cmp x10, #7 // tag == 7? + csel x12, x11, x10, eq // x12 = index in tagged pointer classes array, negative for extended tags. + // The extended tag array is placed immediately before the basic tag array + // so this looks into the right place either way. The sign extension done + // by the asr instruction produces the value extended_tag - 256, which produces + // the correct index in the extended tagged pointer classes array. + + // x16 = _objc_debug_taggedpointer_classes[x12] + adrp x10, _objc_debug_taggedpointer_classes@PAGE + add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF + ldr x16, [x10, x12, LSL #3] + +.endmacro #endif ENTRY _objc_msgSend @@ -420,30 +558,15 @@ _objc_debug_taggedpointer_ext_classes: b.eq LReturnZero #endif ldr p13, [x0] // p13 = isa - GetClassFromIsa_p16 p13 // p16 = class + GetClassFromIsa_p16 p13, 1, x0 // p16 = class LGetIsaDone: // calls imp or objc_msgSend_uncached - CacheLookup NORMAL, _objc_msgSend + CacheLookup NORMAL, _objc_msgSend, __objc_msgSend_uncached #if SUPPORT_TAGGED_POINTERS LNilOrTagged: b.eq LReturnZero // nil check - - // tagged - adrp x10, _objc_debug_taggedpointer_classes@PAGE - add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF - ubfx x11, x0, #60, #4 - ldr x16, [x10, x11, LSL #3] - adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE - add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF - cmp x10, x16 - b.ne LGetIsaDone - - // ext tagged - adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE - add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF - ubfx x11, x0, #52, #8 - ldr x16, [x10, x11, LSL #3] + GetTaggedClass b LGetIsaDone // SUPPORT_TAGGED_POINTERS #endif @@ -469,37 +592,22 @@ LReturnZero: b.eq LLookup_Nil #endif ldr p13, [x0] // p13 = isa - GetClassFromIsa_p16 p13 // p16 = class + GetClassFromIsa_p16 p13, 1, x0 // p16 = class LLookup_GetIsaDone: // returns imp - CacheLookup LOOKUP, _objc_msgLookup + CacheLookup LOOKUP, _objc_msgLookup, __objc_msgLookup_uncached #if SUPPORT_TAGGED_POINTERS LLookup_NilOrTagged: b.eq LLookup_Nil // nil check - - // tagged - adrp x10, _objc_debug_taggedpointer_classes@PAGE - add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF - ubfx x11, x0, #60, #4 - ldr x16, [x10, x11, LSL #3] - adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE - add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF - cmp x10, x16 - b.ne LLookup_GetIsaDone - -LLookup_ExtTag: - adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE - add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF - ubfx x11, x0, #52, #8 - ldr x16, [x10, x11, LSL #3] + GetTaggedClass b LLookup_GetIsaDone // SUPPORT_TAGGED_POINTERS #endif LLookup_Nil: - adrp x17, __objc_msgNil@PAGE - add x17, x17, __objc_msgNil@PAGEOFF + adr x17, __objc_msgNil + SignAsImp x17 ret END_ENTRY _objc_msgLookup @@ -522,8 +630,7 @@ LLookup_Nil: UNWIND _objc_msgSendSuper, NoFrame ldp p0, p16, [x0] // p0 = real receiver, p16 = class - // calls imp or objc_msgSend_uncached - CacheLookup NORMAL, _objc_msgSendSuper + b L_objc_msgSendSuper2_body END_ENTRY _objc_msgSendSuper @@ -532,9 +639,18 @@ LLookup_Nil: ENTRY _objc_msgSendSuper2 UNWIND _objc_msgSendSuper2, NoFrame +#if __has_feature(ptrauth_calls) + ldp x0, x17, [x0] // x0 = real receiver, x17 = class + add x17, x17, #SUPERCLASS // x17 = &class->superclass + ldr x16, [x17] // x16 = class->superclass + AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS +LMsgSendSuperResume: +#else ldp p0, p16, [x0] // p0 = real receiver, p16 = class ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass - CacheLookup NORMAL, _objc_msgSendSuper2 +#endif +L_objc_msgSendSuper2_body: + CacheLookup NORMAL, _objc_msgSendSuper2, __objc_msgSend_uncached END_ENTRY _objc_msgSendSuper2 @@ -542,16 +658,24 @@ LLookup_Nil: ENTRY _objc_msgLookupSuper2 UNWIND _objc_msgLookupSuper2, NoFrame +#if __has_feature(ptrauth_calls) + ldp x0, x17, [x0] // x0 = real receiver, x17 = class + add x17, x17, #SUPERCLASS // x17 = &class->superclass + ldr x16, [x17] // x16 = class->superclass + AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS +LMsgLookupSuperResume: +#else ldp p0, p16, [x0] // p0 = real receiver, p16 = class ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass - CacheLookup LOOKUP, _objc_msgLookupSuper2 +#endif + CacheLookup LOOKUP, _objc_msgLookupSuper2, __objc_msgLookup_uncached END_ENTRY _objc_msgLookupSuper2 .macro MethodTableLookup - SAVE_REGS + SAVE_REGS MSGSEND // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER) // receiver and selector already in x0 and x1 @@ -562,7 +686,7 @@ LLookup_Nil: // IMP in x0 mov x17, x0 - RESTORE_REGS + RESTORE_REGS MSGSEND .endmacro @@ -570,7 +694,7 @@ LLookup_Nil: UNWIND __objc_msgSend_uncached, FrameWithNoSaves // THIS IS NOT A CALLABLE C FUNCTION - // Out-of-band p16 is the class to search + // Out-of-band p15 is the class to search MethodTableLookup TailCallFunctionPointer x17 @@ -582,7 +706,7 @@ LLookup_Nil: UNWIND __objc_msgLookup_uncached, FrameWithNoSaves // THIS IS NOT A CALLABLE C FUNCTION - // Out-of-band p16 is the class to search + // Out-of-band p15 is the class to search MethodTableLookup ret @@ -592,13 +716,17 @@ LLookup_Nil: STATIC_ENTRY _cache_getImp - GetClassFromIsa_p16 p0 - CacheLookup GETIMP, _cache_getImp + GetClassFromIsa_p16 p0, 0 + CacheLookup GETIMP, _cache_getImp, LGetImpMissDynamic, LGetImpMissConstant -LGetImpMiss: +LGetImpMissDynamic: mov p0, #0 ret +LGetImpMissConstant: + mov p0, p2 + ret + END_ENTRY _cache_getImp @@ -657,7 +785,7 @@ LGetImpMiss: L_method_invoke_small: // Small methods require a call to handle swizzling. - SAVE_REGS + SAVE_REGS METHOD_INVOKE mov p0, p1 bl __method_getImplementationAndName // ARM64_32 packs both return values into x0, with SEL in the high bits and IMP in the low. @@ -666,7 +794,7 @@ L_method_invoke_small: #if __LP64__ mov x16, x1 #endif - RESTORE_REGS + RESTORE_REGS METHOD_INVOKE #if __LP64__ mov x1, x16 #else diff --git a/runtime/Messengers.subproj/objc-msg-simulator-x86_64.s b/runtime/Messengers.subproj/objc-msg-simulator-x86_64.s index 9186278..402b97d 100644 --- a/runtime/Messengers.subproj/objc-msg-simulator-x86_64.s +++ b/runtime/Messengers.subproj/objc-msg-simulator-x86_64.s @@ -22,7 +22,7 @@ */ #include -#if __x86_64__ && TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC +#if __x86_64__ && TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST /******************************************************************** ******************************************************************** @@ -133,6 +133,10 @@ _objc_restartableRanges: #define GETIMP 101 #define LOOKUP 102 +#define MSGSEND 200 +#define METHOD_INVOKE 201 +#define METHOD_INVOKE_STRET 202 + /******************************************************************** * @@ -221,21 +225,28 @@ LExit$0: // for a function call. ////////////////////////////////////////////////////////////////////// -.macro SAVE_REGS +.macro SAVE_REGS kind +.if \kind != MSGSEND && \kind != METHOD_INVOKE && \kind != METHOD_INVOKE_STRET +.abort Unknown kind. +.endif push %rbp mov %rsp, %rbp - sub $$0x80+8, %rsp // +8 for alignment + sub $0x80, %rsp movdqa %xmm0, -0x80(%rbp) push %rax // might be xmm parameter count movdqa %xmm1, -0x70(%rbp) push %a1 movdqa %xmm2, -0x60(%rbp) +.if \kind == MSGSEND || \kind == METHOD_INVOKE_STRET push %a2 +.endif movdqa %xmm3, -0x50(%rbp) +.if \kind == MSGSEND || \kind == METHOD_INVOKE push %a3 +.endif movdqa %xmm4, -0x40(%rbp) push %a4 movdqa %xmm5, -0x30(%rbp) @@ -243,6 +254,9 @@ LExit$0: movdqa %xmm6, -0x20(%rbp) push %a6 movdqa %xmm7, -0x10(%rbp) +.if \kind == MSGSEND + push %r10 +.endif .endmacro @@ -255,8 +269,12 @@ LExit$0: // SAVE_REGS. ////////////////////////////////////////////////////////////////////// -.macro RESTORE_REGS +.macro RESTORE_REGS kind +.if \kind == MSGSEND + pop %r10 + orq $2, %r10 // for the sake of instrumentations, remember it was the slowpath +.endif movdqa -0x80(%rbp), %xmm0 pop %a6 movdqa -0x70(%rbp), %xmm1 @@ -264,9 +282,13 @@ LExit$0: movdqa -0x60(%rbp), %xmm2 pop %a4 movdqa -0x50(%rbp), %xmm3 +.if \kind == MSGSEND || \kind == METHOD_INVOKE pop %a3 +.endif movdqa -0x40(%rbp), %xmm4 +.if \kind == MSGSEND || \kind == METHOD_INVOKE_STRET pop %a2 +.endif movdqa -0x30(%rbp), %xmm5 pop %a1 movdqa -0x20(%rbp), %xmm6 @@ -412,7 +434,7 @@ LExit$0: .macro MethodTableLookup - SAVE_REGS + SAVE_REGS MSGSEND // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER) .if $0 == NORMAL @@ -429,7 +451,7 @@ LExit$0: // IMP is now in %rax movq %rax, %r11 - RESTORE_REGS + RESTORE_REGS MSGSEND .if $0 == NORMAL test %r11, %r11 // set ne for stret forwarding @@ -1145,13 +1167,12 @@ LCacheMiss: L_method_invoke_small: // Small methods require a call to handle swizzling. - SAVE_REGS + SAVE_REGS METHOD_INVOKE movq %a2, %a1 call __method_getImplementationAndName - movq %rdx, %r10 + movq %rdx, %a2 movq %rax, %r11 - RESTORE_REGS - movq %r10, %a2 + RESTORE_REGS METHOD_INVOKE jmp *%r11 END_ENTRY _method_invoke @@ -1170,13 +1191,12 @@ L_method_invoke_small: L_method_invoke_stret_small: // Small methods require a call to handle swizzling. - SAVE_REGS + SAVE_REGS METHOD_INVOKE_STRET movq %a3, %a1 call __method_getImplementationAndName - movq %rdx, %r10 + movq %rdx, %a3 movq %rax, %r11 - RESTORE_REGS - movq %r10, %a3 + RESTORE_REGS METHOD_INVOKE_STRET jmp *%r11 END_ENTRY _method_invoke_stret diff --git a/runtime/Messengers.subproj/objc-msg-x86_64.s b/runtime/Messengers.subproj/objc-msg-x86_64.s index d090995..0b8eff7 100644 --- a/runtime/Messengers.subproj/objc-msg-x86_64.s +++ b/runtime/Messengers.subproj/objc-msg-x86_64.s @@ -22,7 +22,7 @@ */ #include -#if __x86_64__ && !(TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC) +#if __x86_64__ && !(TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST) #include "isa.h" @@ -139,6 +139,10 @@ _objc_restartableRanges: #define GETIMP 101 #define LOOKUP 102 +#define MSGSEND 200 +#define METHOD_INVOKE 201 +#define METHOD_INVOKE_STRET 202 + /******************************************************************** * @@ -227,21 +231,28 @@ LExit$0: // for a function call. ////////////////////////////////////////////////////////////////////// -.macro SAVE_REGS +.macro SAVE_REGS kind +.if \kind != MSGSEND && \kind != METHOD_INVOKE && \kind != METHOD_INVOKE_STRET +.abort Unknown kind. +.endif push %rbp mov %rsp, %rbp - sub $$0x80+8, %rsp // +8 for alignment + sub $0x80, %rsp movdqa %xmm0, -0x80(%rbp) push %rax // might be xmm parameter count movdqa %xmm1, -0x70(%rbp) push %a1 movdqa %xmm2, -0x60(%rbp) +.if \kind == MSGSEND || \kind == METHOD_INVOKE_STRET push %a2 +.endif movdqa %xmm3, -0x50(%rbp) +.if \kind == MSGSEND || \kind == METHOD_INVOKE push %a3 +.endif movdqa %xmm4, -0x40(%rbp) push %a4 movdqa %xmm5, -0x30(%rbp) @@ -249,6 +260,9 @@ LExit$0: movdqa %xmm6, -0x20(%rbp) push %a6 movdqa %xmm7, -0x10(%rbp) +.if \kind == MSGSEND + push %r10 +.endif .endmacro @@ -261,8 +275,12 @@ LExit$0: // SAVE_REGS. ////////////////////////////////////////////////////////////////////// -.macro RESTORE_REGS +.macro RESTORE_REGS kind +.if \kind == MSGSEND + pop %r10 + orq $2, %r10 // for the sake of instrumentations, remember it was the slowpath +.endif movdqa -0x80(%rbp), %xmm0 pop %a6 movdqa -0x70(%rbp), %xmm1 @@ -270,9 +288,13 @@ LExit$0: movdqa -0x60(%rbp), %xmm2 pop %a4 movdqa -0x50(%rbp), %xmm3 +.if \kind == MSGSEND || \kind == METHOD_INVOKE pop %a3 +.endif movdqa -0x40(%rbp), %xmm4 +.if \kind == MSGSEND || \kind == METHOD_INVOKE_STRET pop %a2 +.endif movdqa -0x30(%rbp), %xmm5 pop %a1 movdqa -0x20(%rbp), %xmm6 @@ -447,7 +469,7 @@ LLookupEnd$2: .macro MethodTableLookup - SAVE_REGS + SAVE_REGS MSGSEND // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER) .if $0 == NORMAL @@ -464,7 +486,7 @@ LLookupEnd$2: // IMP is now in %rax movq %rax, %r11 - RESTORE_REGS + RESTORE_REGS MSGSEND .if $0 == NORMAL test %r11, %r11 // set ne for nonstret forwarding @@ -1257,13 +1279,12 @@ LCacheMiss_objc_msgLookupSuper2_stret: L_method_invoke_small: // Small methods require a call to handle swizzling. - SAVE_REGS + SAVE_REGS METHOD_INVOKE movq %a2, %a1 call __method_getImplementationAndName - movq %rdx, %r10 + movq %rdx, %a2 movq %rax, %r11 - RESTORE_REGS - movq %r10, %a2 + RESTORE_REGS METHOD_INVOKE jmp *%r11 END_ENTRY _method_invoke @@ -1282,13 +1303,12 @@ L_method_invoke_small: L_method_invoke_stret_small: // Small methods require a call to handle swizzling. - SAVE_REGS + SAVE_REGS METHOD_INVOKE_STRET movq %a3, %a1 call __method_getImplementationAndName - movq %rdx, %r10 + movq %rdx, %a3 movq %rax, %r11 - RESTORE_REGS - movq %r10, %a3 + RESTORE_REGS METHOD_INVOKE_STRET jmp *%r11 END_ENTRY _method_invoke_stret diff --git a/runtime/NSObject-internal.h b/runtime/NSObject-internal.h index c23fbc2..978799a 100644 --- a/runtime/NSObject-internal.h +++ b/runtime/NSObject-internal.h @@ -123,6 +123,16 @@ struct magic_t { class AutoreleasePoolPage; struct AutoreleasePoolPageData { +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + struct AutoreleasePoolEntry { + uintptr_t ptr: 48; + uintptr_t count: 16; + + static const uintptr_t maxCount = 65535; // 2^16 - 1 + }; + static_assert((AutoreleasePoolEntry){ .ptr = MACH_VM_MAX_ADDRESS }.ptr == MACH_VM_MAX_ADDRESS, "MACH_VM_MAX_ADDRESS doesn't fit into AutoreleasePoolEntry::ptr!"); +#endif + magic_t const magic; __unsafe_unretained id *next; pthread_t const thread; diff --git a/runtime/NSObject.mm b/runtime/NSObject.mm index f672f4c..6d2e14f 100644 --- a/runtime/NSObject.mm +++ b/runtime/NSObject.mm @@ -39,6 +39,12 @@ #include #include #include "NSObject-internal.h" +#include + +extern "C" { +#include +#include +} @interface NSInvocation - (SEL)selector; @@ -51,7 +57,13 @@ OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_parent_offset = __buil OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_child_offset = __builtin_offsetof(AutoreleasePoolPageData, child); OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_depth_offset = __builtin_offsetof(AutoreleasePoolPageData, depth); OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_hiwat_offset = __builtin_offsetof(AutoreleasePoolPageData, hiwat); +OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_begin_offset = sizeof(AutoreleasePoolPageData); #if __OBJC2__ +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS +OBJC_EXTERN const uintptr_t objc_debug_autoreleasepoolpage_ptr_mask = (AutoreleasePoolPageData::AutoreleasePoolEntry){ .ptr = ~(uintptr_t)0 }.ptr; +#else +OBJC_EXTERN const uintptr_t objc_debug_autoreleasepoolpage_ptr_mask = ~(uintptr_t)0; +#endif OBJC_EXTERN const uint32_t objc_class_abi_version = OBJC_CLASS_ABI_VERSION_MAX; #endif @@ -79,8 +91,42 @@ void _objc_setBadAllocHandler(id(*newHandler)(Class)) } +static id _initializeSwiftRefcountingThenCallRetain(id objc); +static void _initializeSwiftRefcountingThenCallRelease(id objc); + +explicit_atomic swiftRetain{&_initializeSwiftRefcountingThenCallRetain}; +explicit_atomic swiftRelease{&_initializeSwiftRefcountingThenCallRelease}; + +static void _initializeSwiftRefcounting() { + void *const token = dlopen("/usr/lib/swift/libswiftCore.dylib", RTLD_LAZY | RTLD_LOCAL); + ASSERT(token); + swiftRetain.store((id(*)(id))dlsym(token, "swift_retain"), memory_order_relaxed); + ASSERT(swiftRetain.load(memory_order_relaxed)); + swiftRelease.store((void(*)(id))dlsym(token, "swift_release"), memory_order_relaxed); + ASSERT(swiftRelease.load(memory_order_relaxed)); + dlclose(token); +} + +static id _initializeSwiftRefcountingThenCallRetain(id objc) { + _initializeSwiftRefcounting(); + return swiftRetain.load(memory_order_relaxed)(objc); +} + +static void _initializeSwiftRefcountingThenCallRelease(id objc) { + _initializeSwiftRefcounting(); + swiftRelease.load(memory_order_relaxed)(objc); +} + +namespace objc { + extern int PageCountWarning; +} + namespace { +#if TARGET_OS_IPHONE && !TARGET_OS_SIMULATOR +uint32_t numFaults = 0; +#endif + // The order of these bits is important. #define SIDE_TABLE_WEAKLY_REFERENCED (1UL<<0) #define SIDE_TABLE_DEALLOCATING (1UL<<1) // MSB-ward of weak bit @@ -221,6 +267,23 @@ void SideTableLocksSucceedLocks(StripedMap& oldlocks) { } } +// Call out to the _setWeaklyReferenced method on obj, if implemented. +static void callSetWeaklyReferenced(id obj) { + if (!obj) + return; + + Class cls = obj->getIsa(); + + if (slowpath(cls->hasCustomRR() && !object_isClass(obj))) { + ASSERT(((objc_class *)cls)->isInitializing() || ((objc_class *)cls)->isInitialized()); + void (*setWeaklyReferenced)(id, SEL) = (void(*)(id, SEL)) + class_getMethodImplementation(cls, @selector(_setWeaklyReferenced)); + if ((IMP)setWeaklyReferenced != _objc_msgForward) { + (*setWeaklyReferenced)(obj, @selector(_setWeaklyReferenced)); + } + } +} + // // The -fobjc-arc flag causes the compiler to issue calls to objc_{retain/release/autorelease/retain_block} // @@ -269,7 +332,7 @@ enum CrashIfDeallocating { DontCrashIfDeallocating = false, DoCrashIfDeallocating = true }; template + enum CrashIfDeallocating crashIfDeallocating> static id storeWeak(id *location, objc_object *newObj) { @@ -336,11 +399,11 @@ storeWeak(id *location, objc_object *newObj) if (haveNew) { newObj = (objc_object *) weak_register_no_lock(&newTable->weak_table, (id)newObj, location, - crashIfDeallocating); + crashIfDeallocating ? CrashIfDeallocating : ReturnNilIfDeallocating); // weak_register_no_lock returns nil if weak store should be rejected // Set is-weakly-referenced bit in refcount table. - if (newObj && !newObj->isTaggedPointer()) { + if (!newObj->isTaggedPointerOrNil()) { newObj->setWeaklyReferenced_nolock(); } @@ -353,6 +416,12 @@ storeWeak(id *location, objc_object *newObj) SideTable::unlockTwo(oldTable, newTable); + // This must be called without the locks held, as it can invoke + // arbitrary code. In particular, even if _setWeaklyReferenced + // is not implemented, resolveInstanceMethod: may be, and may + // call back into the weak reference machinery. + callSetWeaklyReferenced((id)newObj); + return (id)newObj; } @@ -474,8 +543,7 @@ objc_loadWeakRetained(id *location) retry: // fixme std::atomic this load obj = *location; - if (!obj) return nil; - if (obj->isTaggedPointer()) return obj; + if (obj->isTaggedPointerOrNil()) return obj; table = &SideTables()[obj]; @@ -499,9 +567,12 @@ objc_loadWeakRetained(id *location) else { // Slow case. We must check for +initialize and call it outside // the lock if necessary in order to avoid deadlocks. + // Use lookUpImpOrForward so we can avoid the assert in + // class_getInstanceMethod, since we intentionally make this + // callout with the lock held. if (cls->isInitialized() || _thisThreadIsInitializingClass(cls)) { BOOL (*tryRetain)(id, SEL) = (BOOL(*)(id, SEL)) - class_getMethodImplementation(cls, @selector(retainWeakReference)); + lookUpImpOrForwardTryCache(obj, @selector(retainWeakReference), cls); if ((IMP)tryRetain == _objc_msgForward) { result = nil; } @@ -572,9 +643,28 @@ objc_copyWeak(id *dst, id *src) void objc_moveWeak(id *dst, id *src) { - objc_copyWeak(dst, src); - objc_destroyWeak(src); + id obj; + SideTable *table; + +retry: + obj = *src; + if (obj == nil) { + *dst = nil; + return; + } + + table = &SideTables()[obj]; + table->lock(); + if (*src != obj) { + table->unlock(); + goto retry; + } + + weak_unregister_no_lock(&table->weak_table, obj, src); + weak_register_no_lock(&table->weak_table, obj, dst, DontCheckDeallocating); + *dst = obj; *src = nil; + table->unlock(); } @@ -611,6 +701,7 @@ private: static pthread_key_t const key = AUTORELEASE_POOL_KEY; static uint8_t const SCRIBBLE = 0xA3; // 0xA3A3A3A3 after releasing static size_t const COUNT = SIZE / sizeof(id); + static size_t const MAX_FAULTS = 2; // EMPTY_POOL_PLACEHOLDER is stored in TLS when exactly one pool is // pushed and it has never contained any objects. This saves memory @@ -643,13 +734,33 @@ private: #endif } + void checkTooMuchAutorelease() + { +#if TARGET_OS_IPHONE && !TARGET_OS_SIMULATOR + bool objcModeNoFaults = DisableFaults || getpid() == 1 || + !os_variant_has_internal_diagnostics("com.apple.obj-c"); + if (!objcModeNoFaults) { + if (depth+1 >= (uint32_t)objc::PageCountWarning && numFaults < MAX_FAULTS) { //depth is 0 when first page is allocated + os_fault_with_payload(OS_REASON_LIBSYSTEM, + OS_REASON_LIBSYSTEM_CODE_FAULT, + NULL, 0, "Large Autorelease Pool", 0); + numFaults++; + } + } +#endif + } + AutoreleasePoolPage(AutoreleasePoolPage *newParent) : AutoreleasePoolPageData(begin(), objc_thread_self(), newParent, newParent ? 1+newParent->depth : 0, newParent ? newParent->hiwat : 0) - { + { + if (objc::PageCountWarning != -1) { + checkTooMuchAutorelease(); + } + if (parent) { parent->check(); ASSERT(!parent->child); @@ -744,8 +855,49 @@ private: { ASSERT(!full()); unprotect(); - id *ret = next; // faster than `return next-1` because of aliasing + id *ret; + +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + if (!DisableAutoreleaseCoalescing || !DisableAutoreleaseCoalescingLRU) { + if (!DisableAutoreleaseCoalescingLRU) { + if (!empty() && (obj != POOL_BOUNDARY)) { + AutoreleasePoolEntry *topEntry = (AutoreleasePoolEntry *)next - 1; + for (uintptr_t offset = 0; offset < 4; offset++) { + AutoreleasePoolEntry *offsetEntry = topEntry - offset; + if (offsetEntry <= (AutoreleasePoolEntry*)begin() || *(id *)offsetEntry == POOL_BOUNDARY) { + break; + } + if (offsetEntry->ptr == (uintptr_t)obj && offsetEntry->count < AutoreleasePoolEntry::maxCount) { + if (offset > 0) { + AutoreleasePoolEntry found = *offsetEntry; + memmove(offsetEntry, offsetEntry + 1, offset * sizeof(*offsetEntry)); + *topEntry = found; + } + topEntry->count++; + ret = (id *)topEntry; // need to reset ret + goto done; + } + } + } + } else { + if (!empty() && (obj != POOL_BOUNDARY)) { + AutoreleasePoolEntry *prevEntry = (AutoreleasePoolEntry *)next - 1; + if (prevEntry->ptr == (uintptr_t)obj && prevEntry->count < AutoreleasePoolEntry::maxCount) { + prevEntry->count++; + ret = (id *)prevEntry; // need to reset ret + goto done; + } + } + } + } +#endif + ret = next; // faster than `return next-1` because of aliasing *next++ = obj; +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + // Make sure obj fits in the bits available for it + ASSERT(((AutoreleasePoolEntry *)ret)->ptr == (uintptr_t)obj); +#endif + done: protect(); return ret; } @@ -772,12 +924,28 @@ private: } page->unprotect(); +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + AutoreleasePoolEntry* entry = (AutoreleasePoolEntry*) --page->next; + + // create an obj with the zeroed out top byte and release that + id obj = (id)entry->ptr; + int count = (int)entry->count; // grab these before memset +#else id obj = *--page->next; +#endif memset((void*)page->next, SCRIBBLE, sizeof(*page->next)); page->protect(); if (obj != POOL_BOUNDARY) { +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + // release count+1 times since it is count of the additional + // autoreleases beyond the first one + for (int i = 0; i < count + 1; i++) { + objc_release(obj); + } +#else objc_release(obj); +#endif } } @@ -984,10 +1152,13 @@ private: public: static inline id autorelease(id obj) { - ASSERT(obj); - ASSERT(!obj->isTaggedPointer()); + ASSERT(!obj->isTaggedPointerOrNil()); id *dest __unused = autoreleaseFast(obj); +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + ASSERT(!dest || dest == EMPTY_POOL_PLACEHOLDER || (id)((AutoreleasePoolEntry *)dest)->ptr == obj); +#else ASSERT(!dest || dest == EMPTY_POOL_PLACEHOLDER || *dest == obj); +#endif return obj; } @@ -1024,9 +1195,9 @@ public: _objc_inform_now_and_on_crash ("Invalid or prematurely-freed autorelease pool %p. " "Set a breakpoint on objc_autoreleasePoolInvalid to debug. " - "Proceeding anyway because the app is old " - "(SDK version " SDK_FORMAT "). Memory errors are likely.", - token, FORMAT_SDK(sdkVersion())); + "Proceeding anyway because the app is old. Memory errors " + "are likely.", + token); } objc_autoreleasePoolInvalid(token); } @@ -1127,8 +1298,19 @@ public: if (*p == POOL_BOUNDARY) { _objc_inform("[%p] ################ POOL %p", p, p); } else { - _objc_inform("[%p] %#16lx %s", +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + AutoreleasePoolEntry *entry = (AutoreleasePoolEntry *)p; + if (entry->count > 0) { + id obj = (id)entry->ptr; + _objc_inform("[%p] %#16lx %s autorelease count %u", + p, (unsigned long)obj, object_getClassName(obj), + entry->count + 1); + goto done; + } +#endif + _objc_inform("[%p] %#16lx %s", p, (unsigned long)*p, object_getClassName(*p)); + done:; } } } @@ -1161,6 +1343,20 @@ public: _objc_inform("##############"); } +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + __attribute__((noinline, cold)) + unsigned sumOfExtraReleases() + { + unsigned sumOfExtraReleases = 0; + for (id *p = begin(); p < next; p++) { + if (*p != POOL_BOUNDARY) { + sumOfExtraReleases += ((AutoreleasePoolEntry *)p)->count; + } + } + return sumOfExtraReleases; + } +#endif + __attribute__((noinline, cold)) static void printHiwat() { @@ -1168,16 +1364,29 @@ public: // Ignore high water marks under 256 to suppress noise. AutoreleasePoolPage *p = hotPage(); uint32_t mark = p->depth*COUNT + (uint32_t)(p->next - p->begin()); - if (mark > p->hiwat && mark > 256) { + if (mark > p->hiwat + 256) { +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + unsigned sumOfExtraReleases = 0; +#endif for( ; p; p = p->parent) { p->unprotect(); p->hiwat = mark; p->protect(); + +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + sumOfExtraReleases += p->sumOfExtraReleases(); +#endif } _objc_inform("POOL HIGHWATER: new high water mark of %u " "pending releases for thread %p:", mark, objc_thread_self()); +#if SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS + if (sumOfExtraReleases > 0) { + _objc_inform("POOL HIGHWATER: extra sequential autoreleases of objects: %u", + sumOfExtraReleases); + } +#endif void *stack[128]; int count = backtrace(stack, sizeof(stack)/sizeof(stack[0])); @@ -1201,14 +1410,14 @@ public: NEVER_INLINE id objc_object::rootRetain_overflow(bool tryRetain) { - return rootRetain(tryRetain, true); + return rootRetain(tryRetain, RRVariant::Full); } NEVER_INLINE uintptr_t objc_object::rootRelease_underflow(bool performDealloc) { - return rootRelease(performDealloc, true); + return rootRelease(performDealloc, RRVariant::Full); } @@ -1317,7 +1526,7 @@ objc_object::sidetable_moveExtraRC_nolock(size_t extra_rc, ASSERT((oldRefcnt & SIDE_TABLE_WEAKLY_REFERENCED) == 0); uintptr_t carry; - size_t refcnt = addc(oldRefcnt, extra_rc << SIDE_TABLE_RC_SHIFT, 0, &carry); + size_t refcnt = addc(oldRefcnt, (extra_rc - 1) << SIDE_TABLE_RC_SHIFT, 0, &carry); if (carry) refcnt = SIDE_TABLE_RC_PINNED; if (isDeallocating) refcnt |= SIDE_TABLE_DEALLOCATING; if (weaklyReferenced) refcnt |= SIDE_TABLE_WEAKLY_REFERENCED; @@ -1359,7 +1568,7 @@ objc_object::sidetable_addExtraRC_nolock(size_t delta_rc) // Move some retain counts from the side table to the isa field. // Returns the actual count subtracted, which may be less than the request. -size_t +objc_object::SidetableBorrow objc_object::sidetable_subExtraRC_nolock(size_t delta_rc) { ASSERT(isa.nonpointer); @@ -1368,7 +1577,7 @@ objc_object::sidetable_subExtraRC_nolock(size_t delta_rc) RefcountMap::iterator it = table.refcnts.find(this); if (it == table.refcnts.end() || it->second == 0) { // Side table retain count is zero. Can't borrow. - return 0; + return { 0, 0 }; } size_t oldRefcnt = it->second; @@ -1379,7 +1588,7 @@ objc_object::sidetable_subExtraRC_nolock(size_t delta_rc) size_t newRefcnt = oldRefcnt - (delta_rc << SIDE_TABLE_RC_SHIFT); ASSERT(oldRefcnt > newRefcnt); // shouldn't underflow it->second = newRefcnt; - return delta_rc; + return { delta_rc, newRefcnt >> SIDE_TABLE_RC_SHIFT }; } @@ -1394,19 +1603,29 @@ objc_object::sidetable_getExtraRC_nolock() } +void +objc_object::sidetable_clearExtraRC_nolock() +{ + ASSERT(isa.nonpointer); + SideTable& table = SideTables()[this]; + RefcountMap::iterator it = table.refcnts.find(this); + table.refcnts.erase(it); +} + + // SUPPORT_NONPOINTER_ISA #endif id -objc_object::sidetable_retain() +objc_object::sidetable_retain(bool locked) { #if SUPPORT_NONPOINTER_ISA ASSERT(!isa.nonpointer); #endif SideTable& table = SideTables()[this]; - table.lock(); + if (!locked) table.lock(); size_t& refcntStorage = table.refcnts[this]; if (! (refcntStorage & SIDE_TABLE_RC_PINNED)) { refcntStorage += SIDE_TABLE_RC_ONE; @@ -1505,6 +1724,14 @@ objc_object::sidetable_isWeaklyReferenced() return result; } +#if OBJC_WEAK_FORMATION_CALLOUT_DEFINED +//Clients can dlsym() for this symbol to see if an ObjC supporting +//-_setWeaklyReferenced is present +OBJC_EXPORT const uintptr_t _objc_has_weak_formation_callout = 0; +static_assert(SUPPORT_NONPOINTER_ISA, "Weak formation callout must only be defined when nonpointer isa is supported."); +#else +static_assert(!SUPPORT_NONPOINTER_ISA, "If weak callout is not present then we must not support nonpointer isas."); +#endif void objc_object::sidetable_setWeaklyReferenced_nolock() @@ -1512,9 +1739,9 @@ objc_object::sidetable_setWeaklyReferenced_nolock() #if SUPPORT_NONPOINTER_ISA ASSERT(!isa.nonpointer); #endif - + SideTable& table = SideTables()[this]; - + table.refcnts[this] |= SIDE_TABLE_WEAKLY_REFERENCED; } @@ -1523,7 +1750,7 @@ objc_object::sidetable_setWeaklyReferenced_nolock() // return uintptr_t instead of bool so that the various raw-isa // -release paths all return zero in eax uintptr_t -objc_object::sidetable_release(bool performDealloc) +objc_object::sidetable_release(bool locked, bool performDealloc) { #if SUPPORT_NONPOINTER_ISA ASSERT(!isa.nonpointer); @@ -1532,7 +1759,7 @@ objc_object::sidetable_release(bool performDealloc) bool do_dealloc = false; - table.lock(); + if (!locked) table.lock(); auto it = table.refcnts.try_emplace(this, SIDE_TABLE_DEALLOCATING); auto &refcnt = it.first->second; if (it.second) { @@ -1583,8 +1810,7 @@ __attribute__((aligned(16), flatten, noinline)) id objc_retain(id obj) { - if (!obj) return obj; - if (obj->isTaggedPointer()) return obj; + if (obj->isTaggedPointerOrNil()) return obj; return obj->retain(); } @@ -1593,8 +1819,7 @@ __attribute__((aligned(16), flatten, noinline)) void objc_release(id obj) { - if (!obj) return; - if (obj->isTaggedPointer()) return; + if (obj->isTaggedPointerOrNil()) return; return obj->release(); } @@ -1603,8 +1828,7 @@ __attribute__((aligned(16), flatten, noinline)) id objc_autorelease(id obj) { - if (!obj) return obj; - if (obj->isTaggedPointer()) return obj; + if (obj->isTaggedPointerOrNil()) return obj; return obj->autorelease(); } @@ -1694,8 +1918,7 @@ _objc_rootRelease(id obj) obj->rootRelease(); } - -// Call [cls alloc] or [cls allocWithZone:nil], with appropriate +// Call [cls alloc] or [cls allocWithZone:nil], with appropriate // shortcutting optimizations. static ALWAYS_INLINE id callAlloc(Class cls, bool checkNil, bool allocWithZone=false) @@ -1731,7 +1954,7 @@ objc_alloc(Class cls) } // Calls [cls allocWithZone:nil]. -id +id objc_allocWithZone(Class cls) { return callAlloc(cls, true/*checkNil*/, true/*allocWithZone*/); @@ -1750,7 +1973,7 @@ objc_opt_new(Class cls) { #if __OBJC2__ if (fastpath(cls && !cls->ISA()->hasCustomCore())) { - return [callAlloc(cls, false/*checkNil*/, true/*allocWithZone*/) init]; + return [callAlloc(cls, false/*checkNil*/) init]; } #endif return ((id(*)(id, SEL))objc_msgSend)(cls, @selector(new)); @@ -1761,7 +1984,7 @@ id objc_opt_self(id obj) { #if __OBJC2__ - if (fastpath(!obj || obj->isTaggedPointer() || !obj->ISA()->hasCustomCore())) { + if (fastpath(obj->isTaggedPointerOrNil() || !obj->ISA()->hasCustomCore())) { return obj; } #endif @@ -1790,7 +2013,7 @@ objc_opt_isKindOfClass(id obj, Class otherClass) if (slowpath(!obj)) return NO; Class cls = obj->getIsa(); if (fastpath(!cls->hasCustomCore())) { - for (Class tcls = cls; tcls; tcls = tcls->superclass) { + for (Class tcls = cls; tcls; tcls = tcls->getSuperclass()) { if (tcls == otherClass) return YES; } return NO; @@ -2019,11 +2242,11 @@ __attribute__((objc_nonlazy_class)) } + (Class)superclass { - return self->superclass; + return self->getSuperclass(); } - (Class)superclass { - return [self class]->superclass; + return [self class]->getSuperclass(); } + (BOOL)isMemberOfClass:(Class)cls { @@ -2035,28 +2258,28 @@ __attribute__((objc_nonlazy_class)) } + (BOOL)isKindOfClass:(Class)cls { - for (Class tcls = self->ISA(); tcls; tcls = tcls->superclass) { + for (Class tcls = self->ISA(); tcls; tcls = tcls->getSuperclass()) { if (tcls == cls) return YES; } return NO; } - (BOOL)isKindOfClass:(Class)cls { - for (Class tcls = [self class]; tcls; tcls = tcls->superclass) { + for (Class tcls = [self class]; tcls; tcls = tcls->getSuperclass()) { if (tcls == cls) return YES; } return NO; } + (BOOL)isSubclassOfClass:(Class)cls { - for (Class tcls = self; tcls; tcls = tcls->superclass) { + for (Class tcls = self; tcls; tcls = tcls->getSuperclass()) { if (tcls == cls) return YES; } return NO; } + (BOOL)isAncestorOfObject:(NSObject *)obj { - for (Class tcls = [obj class]; tcls; tcls = tcls->superclass) { + for (Class tcls = [obj class]; tcls; tcls = tcls->getSuperclass()) { if (tcls == self) return YES; } return NO; @@ -2076,7 +2299,7 @@ __attribute__((objc_nonlazy_class)) + (BOOL)conformsToProtocol:(Protocol *)protocol { if (!protocol) return NO; - for (Class tcls = self; tcls; tcls = tcls->superclass) { + for (Class tcls = self; tcls; tcls = tcls->getSuperclass()) { if (class_conformsToProtocol(tcls, protocol)) return YES; } return NO; @@ -2084,7 +2307,7 @@ __attribute__((objc_nonlazy_class)) - (BOOL)conformsToProtocol:(Protocol *)protocol { if (!protocol) return NO; - for (Class tcls = [self class]; tcls; tcls = tcls->superclass) { + for (Class tcls = [self class]; tcls; tcls = tcls->getSuperclass()) { if (class_conformsToProtocol(tcls, protocol)) return YES; } return NO; diff --git a/runtime/Protocol.mm b/runtime/Protocol.mm index 9e97419..9432267 100644 --- a/runtime/Protocol.mm +++ b/runtime/Protocol.mm @@ -100,7 +100,7 @@ __attribute__((objc_nonlazy_class)) // check isKindOf: Class cls; Class protoClass = objc_getClass("Protocol"); - for (cls = object_getClass(other); cls; cls = cls->superclass) { + for (cls = object_getClass(other); cls; cls = cls->getSuperclass()) { if (cls == protoClass) break; } if (!cls) return NO; diff --git a/runtime/arm64-asm.h b/runtime/arm64-asm.h index fb15e5e..a6f7d38 100644 --- a/runtime/arm64-asm.h +++ b/runtime/arm64-asm.h @@ -28,6 +28,8 @@ #if __arm64__ +#include "objc-config.h" + #if __LP64__ // true arm64 @@ -129,11 +131,35 @@ // note: assumes the imp is not nil eor $1, $1, $2 // mix SEL into ptrauth modifier eor $1, $1, $3 // mix isa into ptrauth modifier - autib $0, $1 // authenticate cached imp + autib $0, $1 // authenticate cached imp ldr xzr, [$0] // crash if authentication failed paciza $0 // resign cached imp as IMP .endmacro +.macro ExtractISA + and $0, $1, #ISA_MASK +#if ISA_SIGNING_AUTH_MODE == ISA_SIGNING_STRIP + xpacd $0 +#elif ISA_SIGNING_AUTH_MODE == ISA_SIGNING_AUTH + mov x10, $2 + movk x10, #ISA_SIGNING_DISCRIMINATOR, LSL #48 + autda $0, x10 +#endif +.endmacro + +.macro AuthISASuper dst, addr_mutable, discriminator +#if ISA_SIGNING_AUTH_MODE == ISA_SIGNING_AUTH + movk \addr_mutable, #\discriminator, LSL #48 + autda \dst, \addr_mutable +#elif ISA_SIGNING_AUTH_MODE == ISA_SIGNING_STRIP + xpacd \dst +#endif +.endmacro + +.macro SignAsImp + paciza $0 +.endmacro + // JOP #else // not JOP @@ -162,7 +188,14 @@ .macro AuthAndResignAsIMP // $0 = cached imp, $1 = address of cached imp, $2 = SEL eor $0, $0, $3 -.endmacro +.endmacro + +.macro SignAsImp +.endmacro + +.macro ExtractISA + and $0, $1, #ISA_MASK +.endmacro // not JOP #endif diff --git a/runtime/dummy-library-mac-i386.c b/runtime/dummy-library-mac-i386.c new file mode 100644 index 0000000..a8cd20f --- /dev/null +++ b/runtime/dummy-library-mac-i386.c @@ -0,0 +1,356 @@ +// This file contains stubs matching the sybols previously exported by libobjc +// when i386 Mac was actually supported. These stubs allow us to tease apart the +// dependencies to prepare for removing i386 Mac libobjc entirely. +// +// This file is not built when building for any other arch/OS combination. When +// building for i386 Mac, no other source files are built, just this one. This +// is handled using the Included/Excluded Source File Names settings in Xcode, +// with arch/OS-specific overrides. +// +// rdar://problem/58541885 + +#pragma GCC visibility push(default) +const char ___ld_hide_os10_5__objc_class_name_NSObject __asm__("$ld$hide$os10.5$.objc_class_name_NSObject"); +const char ___ld_hide_os10_6__objc_class_name_NSObject __asm__("$ld$hide$os10.6$.objc_class_name_NSObject"); +const char ___ld_hide_os10_7__objc_class_name_NSObject __asm__("$ld$hide$os10.7$.objc_class_name_NSObject"); +const char ___objc_class_name_List __asm__(".objc_class_name_List"); +const char ___objc_class_name_NSObject __asm__(".objc_class_name_NSObject"); +const char ___objc_class_name_Object __asm__(".objc_class_name_Object"); +const char ___objc_class_name_Protocol __asm__(".objc_class_name_Protocol"); +void NXCompareHashTables(void) {} +void NXCompareMapTables(void) {} +void NXCopyHashTable(void) {} +void NXCopyStringBuffer(void) {} +void NXCopyStringBufferFromZone(void) {} +void NXCountHashTable(void) {} +void NXCountMapTable(void) {} +void NXCreateHashTable(void) {} +void NXCreateHashTableFromZone(void) {} +void NXCreateMapTable(void) {} +void NXCreateMapTableFromZone(void) {} +void NXEmptyHashTable(void) {} +void NXFreeHashTable(void) {} +void NXFreeMapTable(void) {} +void NXHashGet(void) {} +void NXHashInsert(void) {} +void NXHashInsertIfAbsent(void) {} +void NXHashMember(void) {} +void NXHashRemove(void) {} +void NXInitHashState(void) {} +void NXInitMapState(void) {} +void NXMapGet(void) {} +void NXMapInsert(void) {} +void NXMapMember(void) {} +void NXMapRemove(void) {} +void NXNextHashState(void) {} +void NXNextMapState(void) {} +void NXNoEffectFree(void) {} +const char NXObjectMapPrototype; +void NXPtrHash(void) {} +void NXPtrIsEqual(void) {} +const char NXPtrPrototype; +const char NXPtrStructKeyPrototype; +const char NXPtrValueMapPrototype; +void NXReallyFree(void) {} +void NXResetHashTable(void) {} +void NXResetMapTable(void) {} +void NXStrHash(void) {} +void NXStrIsEqual(void) {} +const char NXStrPrototype; +const char NXStrStructKeyPrototype; +const char NXStrValueMapPrototype; +void NXUniqueString(void) {} +void NXUniqueStringNoCopy(void) {} +void NXUniqueStringWithLength(void) {} +char _alloc; +void _class_getIvarMemoryManagement(void) {} +void _class_isFutureClass(void) {} +void _class_isSwift(void) {} +char _copy; +char _dealloc; +char _error; +void _objcInit(void) {} +void _objc_addWillInitializeClassFunc(void) {} +void _objc_atfork_child(void) {} +void _objc_atfork_parent(void) {} +void _objc_atfork_prepare(void) {} +void _objc_autoreleasePoolPop(void) {} +void _objc_autoreleasePoolPrint(void) {} +void _objc_autoreleasePoolPush(void) {} +void _objc_deallocOnMainThreadHelper(void) {} +const char _objc_debug_class_hash; +const char _objc_empty_cache; +void _objc_error(void) {} +void _objc_flush_caches(void) {} +void _objc_getFreedObjectClass(void) {} +void _objc_init(void) {} +void _objc_msgForward(void) {} +void _objc_msgForward_stret(void) {} +void _objc_resolve_categories_for_class(void) {} +void _objc_rootAlloc(void) {} +void _objc_rootAllocWithZone(void) {} +void _objc_rootAutorelease(void) {} +void _objc_rootDealloc(void) {} +void _objc_rootFinalize(void) {} +void _objc_rootHash(void) {} +void _objc_rootInit(void) {} +void _objc_rootIsDeallocating(void) {} +void _objc_rootRelease(void) {} +void _objc_rootReleaseWasZero(void) {} +void _objc_rootRetain(void) {} +void _objc_rootRetainCount(void) {} +void _objc_rootTryRetain(void) {} +void _objc_rootZone(void) {} +void _objc_setBadAllocHandler(void) {} +void _objc_setClassLoader(void) {} +void _protocol_getMethodTypeEncoding(void) {} +char _realloc; +char _zoneAlloc; +char _zoneCopy; +char _zoneRealloc; +void class_addIvar(void) {} +void class_addMethod(void) {} +void class_addMethods(void) {} +void class_addProperty(void) {} +void class_addProtocol(void) {} +void class_conformsToProtocol(void) {} +void class_copyIvarList(void) {} +void class_copyMethodList(void) {} +void class_copyPropertyList(void) {} +void class_copyProtocolList(void) {} +void class_createInstance(void) {} +void class_createInstanceFromZone(void) {} +void class_createInstances(void) {} +void class_getClassMethod(void) {} +void class_getClassVariable(void) {} +void class_getImageName(void) {} +void class_getInstanceMethod(void) {} +void class_getInstanceSize(void) {} +void class_getInstanceVariable(void) {} +void class_getIvarLayout(void) {} +void class_getMethodImplementation(void) {} +void class_getMethodImplementation_stret(void) {} +void class_getName(void) {} +void class_getProperty(void) {} +void class_getSuperclass(void) {} +void class_getVersion(void) {} +void class_getWeakIvarLayout(void) {} +void class_isMetaClass(void) {} +void class_lookupMethod(void) {} +void class_nextMethodList(void) {} +void class_poseAs(void) {} +void class_removeMethods(void) {} +void class_replaceMethod(void) {} +void class_replaceProperty(void) {} +void class_respondsToMethod(void) {} +void class_respondsToSelector(void) {} +void class_setIvarLayout(void) {} +void class_setSuperclass(void) {} +void class_setVersion(void) {} +void class_setWeakIvarLayout(void) {} +void gdb_class_getClass(void) {} +void gdb_object_getClass(void) {} +void imp_getBlock(void) {} +void imp_implementationWithBlock(void) {} +void imp_removeBlock(void) {} +void instrumentObjcMessageSends(void) {} +void ivar_getName(void) {} +void ivar_getOffset(void) {} +void ivar_getTypeEncoding(void) {} +void method_copyArgumentType(void) {} +void method_copyReturnType(void) {} +void method_exchangeImplementations(void) {} +void method_getArgumentType(void) {} +void method_getDescription(void) {} +void method_getImplementation(void) {} +void method_getName(void) {} +void method_getNumberOfArguments(void) {} +void method_getReturnType(void) {} +void method_getSizeOfArguments(void) {} +void method_getTypeEncoding(void) {} +void method_invoke(void) {} +void method_invoke_stret(void) {} +void method_setImplementation(void) {} +void objc_addClass(void) {} +void objc_addLoadImageFunc(void) {} +void objc_alloc(void) {} +void objc_allocWithZone(void) {} +void objc_alloc_init(void) {} +void objc_allocateClassPair(void) {} +void objc_allocateProtocol(void) {} +void objc_allocate_object(void) {} +void objc_appRequiresGC(void) {} +void objc_assertRegisteredThreadWithCollector(void) {} +void objc_assign_global(void) {} +void objc_assign_ivar(void) {} +void objc_assign_strongCast(void) {} +void objc_assign_threadlocal(void) {} +void objc_assign_weak(void) {} +void objc_atomicCompareAndSwapGlobal(void) {} +void objc_atomicCompareAndSwapGlobalBarrier(void) {} +void objc_atomicCompareAndSwapInstanceVariable(void) {} +void objc_atomicCompareAndSwapInstanceVariableBarrier(void) {} +void objc_atomicCompareAndSwapPtr(void) {} +void objc_atomicCompareAndSwapPtrBarrier(void) {} +void objc_autorelease(void) {} +void objc_autoreleasePoolPop(void) {} +void objc_autoreleasePoolPush(void) {} +void objc_autoreleaseReturnValue(void) {} +void objc_clear_deallocating(void) {} +void objc_clear_stack(void) {} +void objc_collect(void) {} +void objc_collect_init(void) {} +void objc_collectableZone(void) {} +void objc_collectingEnabled(void) {} +void objc_collecting_enabled(void) {} +void objc_constructInstance(void) {} +void objc_copyClassList(void) {} +void objc_copyClassNamesForImage(void) {} +void objc_copyClassNamesForImageHeader(void) {} +void objc_copyCppObjectAtomic(void) {} +void objc_copyImageNames(void) {} +void objc_copyProtocolList(void) {} +void objc_copyStruct(void) {} +void objc_copyWeak(void) {} +const char objc_debug_autoreleasepoolpage_child_offset; +const char objc_debug_autoreleasepoolpage_depth_offset; +const char objc_debug_autoreleasepoolpage_hiwat_offset; +const char objc_debug_autoreleasepoolpage_magic_offset; +const char objc_debug_autoreleasepoolpage_next_offset; +const char objc_debug_autoreleasepoolpage_parent_offset; +const char objc_debug_autoreleasepoolpage_thread_offset; +void objc_destroyWeak(void) {} +void objc_destructInstance(void) {} +void objc_disposeClassPair(void) {} +void objc_dumpHeap(void) {} +void objc_duplicateClass(void) {} +void objc_enumerationMutation(void) {} +void objc_exception_extract(void) {} +void objc_exception_get_functions(void) {} +void objc_exception_match(void) {} +void objc_exception_set_functions(void) {} +void objc_exception_throw(void) {} +void objc_exception_try_enter(void) {} +void objc_exception_try_exit(void) {} +void objc_finalizeOnMainThread(void) {} +void objc_getAssociatedObject(void) {} +void objc_getClass(void) {} +void objc_getClassList(void) {} +void objc_getClasses(void) {} +void objc_getFutureClass(void) {} +void objc_getMetaClass(void) {} +void objc_getOrigClass(void) {} +void objc_getProperty(void) {} +void objc_getProtocol(void) {} +void objc_getRequiredClass(void) {} +void objc_initWeak(void) {} +void objc_initWeakOrNil(void) {} +void objc_initializeClassPair(void) {} +void objc_isAuto(void) {} +void objc_is_finalized(void) {} +void objc_loadModule(void) {} +void objc_loadModules(void) {} +void objc_loadWeak(void) {} +void objc_loadWeakRetained(void) {} +void objc_lookUpClass(void) {} +void objc_memmove_collectable(void) {} +void objc_moveWeak(void) {} +void objc_msgSend(void) {} +void objc_msgSendSuper(void) {} +void objc_msgSendSuper_stret(void) {} +void objc_msgSend_fpret(void) {} +void objc_msgSend_stret(void) {} +void objc_msgSendv(void) {} +void objc_msgSendv_fpret(void) {} +void objc_msgSendv_stret(void) {} +void objc_opt_class(void) {} +void objc_opt_isKindOfClass(void) {} +void objc_opt_new(void) {} +void objc_opt_respondsToSelector(void) {} +void objc_opt_self(void) {} +void objc_read_weak(void) {} +void objc_registerClassPair(void) {} +void objc_registerProtocol(void) {} +void objc_registerThreadWithCollector(void) {} +void objc_release(void) {} +void objc_removeAssociatedObjects(void) {} +void objc_retain(void) {} +void objc_retainAutorelease(void) {} +void objc_retainAutoreleaseReturnValue(void) {} +void objc_retainAutoreleasedReturnValue(void) {} +void objc_retainBlock(void) {} +void objc_retain_autorelease(void) {} +void objc_retainedObject(void) {} +void objc_setAssociatedObject(void) {} +void objc_setClassHandler(void) {} +void objc_setCollectionRatio(void) {} +void objc_setCollectionThreshold(void) {} +void objc_setEnumerationMutationHandler(void) {} +void objc_setForwardHandler(void) {} +void objc_setHook_getImageName(void) {} +void objc_setMultithreaded(void) {} +void objc_setProperty(void) {} +void objc_setProperty_atomic(void) {} +void objc_setProperty_atomic_copy(void) {} +void objc_setProperty_nonatomic(void) {} +void objc_setProperty_nonatomic_copy(void) {} +void objc_set_collection_ratio(void) {} +void objc_set_collection_threshold(void) {} +void objc_should_deallocate(void) {} +void objc_startCollectorThread(void) {} +void objc_start_collector_thread(void) {} +void objc_storeStrong(void) {} +void objc_storeWeak(void) {} +void objc_storeWeakOrNil(void) {} +void objc_sync_enter(void) {} +void objc_sync_exit(void) {} +void objc_sync_try_enter(void) {} +void objc_unloadModules(void) {} +void objc_unregisterThreadWithCollector(void) {} +void objc_unretainedObject(void) {} +void objc_unretainedPointer(void) {} +void objc_unsafeClaimAutoreleasedReturnValue(void) {} +void object_copy(void) {} +void object_copyFromZone(void) {} +void object_dispose(void) {} +void object_getClass(void) {} +void object_getClassName(void) {} +void object_getIndexedIvars(void) {} +void object_getInstanceVariable(void) {} +void object_getIvar(void) {} +void object_getMethodImplementation(void) {} +void object_getMethodImplementation_stret(void) {} +void object_isClass(void) {} +void object_realloc(void) {} +void object_reallocFromZone(void) {} +void object_setClass(void) {} +void object_setInstanceVariable(void) {} +void object_setInstanceVariableWithStrongDefault(void) {} +void object_setIvar(void) {} +void object_setIvarWithStrongDefault(void) {} +void property_copyAttributeList(void) {} +void property_copyAttributeValue(void) {} +void property_getAttributes(void) {} +void property_getName(void) {} +void protocol_addMethodDescription(void) {} +void protocol_addProperty(void) {} +void protocol_addProtocol(void) {} +void protocol_conformsToProtocol(void) {} +void protocol_copyMethodDescriptionList(void) {} +void protocol_copyPropertyList(void) {} +void protocol_copyPropertyList2(void) {} +void protocol_copyProtocolList(void) {} +void protocol_getMethodDescription(void) {} +void protocol_getName(void) {} +void protocol_getProperty(void) {} +void protocol_isEqual(void) {} +void sel_getName(void) {} +void sel_getUid(void) {} +void sel_isEqual(void) {} +void sel_isMapped(void) {} +void sel_registerName(void) {} +void objc_cache_buckets(void) {} +void objc_cache_bytesForCapacity(void) {} +void objc_cache_capacity(void) {} +void objc_cache_occupied(void) {} +void objc_copyClassesForImage(void) {} diff --git a/runtime/isa.h b/runtime/isa.h index b4741cb..8b552c2 100644 --- a/runtime/isa.h +++ b/runtime/isa.h @@ -55,26 +55,46 @@ // uintptr_t extraBytes : 1; // allocated with extra bytes # if __arm64__ -# define ISA_MASK 0x0000000ffffffff8ULL -# define ISA_MAGIC_MASK 0x000003f000000001ULL -# define ISA_MAGIC_VALUE 0x000001a000000001ULL -# define ISA_BITFIELD \ - uintptr_t nonpointer : 1; \ - uintptr_t has_assoc : 1; \ - uintptr_t has_cxx_dtor : 1; \ - uintptr_t shiftcls : 33; /*MACH_VM_MAX_ADDRESS 0x1000000000*/ \ - uintptr_t magic : 6; \ - uintptr_t weakly_referenced : 1; \ - uintptr_t deallocating : 1; \ - uintptr_t has_sidetable_rc : 1; \ - uintptr_t extra_rc : 19 -# define RC_ONE (1ULL<<45) -# define RC_HALF (1ULL<<18) +// ARM64 simulators have a larger address space, so use the ARM64e +// scheme even when simulators build for ARM64-not-e. +# if __has_feature(ptrauth_calls) || TARGET_OS_SIMULATOR +# define ISA_MASK 0x007ffffffffffff8ULL +# define ISA_MAGIC_MASK 0x0000000000000001ULL +# define ISA_MAGIC_VALUE 0x0000000000000001ULL +# define ISA_HAS_CXX_DTOR_BIT 0 +# define ISA_BITFIELD \ + uintptr_t nonpointer : 1; \ + uintptr_t has_assoc : 1; \ + uintptr_t weakly_referenced : 1; \ + uintptr_t shiftcls_and_sig : 52; \ + uintptr_t has_sidetable_rc : 1; \ + uintptr_t extra_rc : 8 +# define RC_ONE (1ULL<<56) +# define RC_HALF (1ULL<<7) +# else +# define ISA_MASK 0x0000000ffffffff8ULL +# define ISA_MAGIC_MASK 0x000003f000000001ULL +# define ISA_MAGIC_VALUE 0x000001a000000001ULL +# define ISA_HAS_CXX_DTOR_BIT 1 +# define ISA_BITFIELD \ + uintptr_t nonpointer : 1; \ + uintptr_t has_assoc : 1; \ + uintptr_t has_cxx_dtor : 1; \ + uintptr_t shiftcls : 33; /*MACH_VM_MAX_ADDRESS 0x1000000000*/ \ + uintptr_t magic : 6; \ + uintptr_t weakly_referenced : 1; \ + uintptr_t unused : 1; \ + uintptr_t has_sidetable_rc : 1; \ + uintptr_t extra_rc : 19 +# define RC_ONE (1ULL<<45) +# define RC_HALF (1ULL<<18) +# endif # elif __x86_64__ # define ISA_MASK 0x00007ffffffffff8ULL # define ISA_MAGIC_MASK 0x001f800000000001ULL # define ISA_MAGIC_VALUE 0x001d800000000001ULL +# define ISA_HAS_CXX_DTOR_BIT 1 # define ISA_BITFIELD \ uintptr_t nonpointer : 1; \ uintptr_t has_assoc : 1; \ @@ -82,7 +102,7 @@ uintptr_t shiftcls : 44; /*MACH_VM_MAX_ADDRESS 0x7fffffe00000*/ \ uintptr_t magic : 6; \ uintptr_t weakly_referenced : 1; \ - uintptr_t deallocating : 1; \ + uintptr_t unused : 1; \ uintptr_t has_sidetable_rc : 1; \ uintptr_t extra_rc : 8 # define RC_ONE (1ULL<<56) @@ -109,6 +129,7 @@ # define ISA_INDEX_COUNT (1 << ISA_INDEX_BITS) # define ISA_INDEX_MAGIC_MASK 0x001E0001 # define ISA_INDEX_MAGIC_VALUE 0x001C0001 +# define ISA_HAS_CXX_DTOR_BIT 1 # define ISA_BITFIELD \ uintptr_t nonpointer : 1; \ uintptr_t has_assoc : 1; \ @@ -116,7 +137,7 @@ uintptr_t magic : 4; \ uintptr_t has_cxx_dtor : 1; \ uintptr_t weakly_referenced : 1; \ - uintptr_t deallocating : 1; \ + uintptr_t unused : 1; \ uintptr_t has_sidetable_rc : 1; \ uintptr_t extra_rc : 7 # define RC_ONE (1ULL<<25) diff --git a/runtime/objc-abi.h b/runtime/objc-abi.h index 18430df..937a4be 100644 --- a/runtime/objc-abi.h +++ b/runtime/objc-abi.h @@ -46,7 +46,7 @@ /* Linker metadata symbols */ // NSObject was in Foundation/CF on macOS < 10.8. -#if TARGET_OS_OSX +#if TARGET_OS_OSX && (__x86_64__ || __i386__) #if __OBJC2__ OBJC_EXPORT const char __objc_nsobject_class_10_5 @@ -171,6 +171,15 @@ HasClassProperties: Old ABI: Set by some compilers. Not used by the runtime. */ +// Description of an expected duplicate class name. +// __DATA,__objc_dupclass stores one of these. Only the main image is +// consulted for these purposes. +typedef struct _objc_duplicate_class { + uint32_t version; + uint32_t flags; + const char name[64]; +} objc_duplicate_class; +#define OBJC_HAS_DUPLICATE_CLASS 1 /* Properties */ @@ -412,7 +421,7 @@ objc_retainBlock(id _Nullable) // Extract class pointer from an isa field. -#if TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC +#if TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST && !__arm64__ // No simulators use nonpointer isa yet. #elif __LP64__ diff --git a/runtime/objc-api.h b/runtime/objc-api.h index 284f24f..26b30bf 100644 --- a/runtime/objc-api.h +++ b/runtime/objc-api.h @@ -118,6 +118,12 @@ # define NS_ENFORCE_NSOBJECT_DESIGNATED_INITIALIZER 1 #endif +/* The arm64 ABI requires proper casting to ensure arguments are passed + * * correctly. */ +#if defined(__arm64__) && !__swift__ +# undef OBJC_OLD_DISPATCH_PROTOTYPES +# define OBJC_OLD_DISPATCH_PROTOTYPES 0 +#endif /* OBJC_OLD_DISPATCH_PROTOTYPES == 0 enforces the rule that the dispatch * functions must be cast to an appropriate function pointer type. */ diff --git a/runtime/objc-block-trampolines.mm b/runtime/objc-block-trampolines.mm index 9dea652..f905d35 100644 --- a/runtime/objc-block-trampolines.mm +++ b/runtime/objc-block-trampolines.mm @@ -57,6 +57,16 @@ # define TrampolinePtrauth #endif +// A page of trampolines is as big as the maximum supported page size +// everywhere except i386. i386 only exists for the watch simulator +// now, and we know it really only has 4kB pages. Also see comments +// below about PAGE_SIZE and PAGE_MAX_SIZE. +#ifdef __i386__ +#define TRAMPOLINE_PAGE_SIZE PAGE_MIN_SIZE +#else +#define TRAMPOLINE_PAGE_SIZE PAGE_MAX_SIZE +#endif + class TrampolinePointerWrapper { struct TrampolinePointers { class TrampolineAddress { @@ -103,22 +113,22 @@ class TrampolinePointerWrapper { void check() { #if DEBUG - ASSERT(impl.address() == textSegment + PAGE_MAX_SIZE); - ASSERT(impl.address() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE - assert(impl.address() + PAGE_MAX_SIZE == + ASSERT(impl.address() == textSegment + TRAMPOLINE_PAGE_SIZE); + ASSERT(impl.address() % PAGE_SIZE == 0); // not TRAMPOLINE_PAGE_SIZE + ASSERT(impl.address() + TRAMPOLINE_PAGE_SIZE == last.address() + SLOT_SIZE); ASSERT(last.address()+8 < textSegment + textSegmentSize); ASSERT((last.address() - start.address()) % SLOT_SIZE == 0); # if SUPPORT_STRET - ASSERT(impl_stret.address() == textSegment + 2*PAGE_MAX_SIZE); - ASSERT(impl_stret.address() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE - assert(impl_stret.address() + PAGE_MAX_SIZE == + ASSERT(impl_stret.address() == textSegment + 2*TRAMPOLINE_PAGE_SIZE); + ASSERT(impl_stret.address() % PAGE_SIZE == 0); // not TRAMPOLINE_PAGE_SIZE + ASSERT(impl_stret.address() + TRAMPOLINE_PAGE_SIZE == last_stret.address() + SLOT_SIZE); - assert(start.address() - impl.address() == + ASSERT(start.address() - impl.address() == start_stret.address() - impl_stret.address()); - assert(last_stret.address() + SLOT_SIZE < + ASSERT(last_stret.address() + SLOT_SIZE < textSegment + textSegmentSize); - assert((last_stret.address() - start_stret.address()) + ASSERT((last_stret.address() - start_stret.address()) % SLOT_SIZE == 0); # endif #endif @@ -178,8 +188,7 @@ public: uintptr_t textSegment() { return get()->textSegment; } uintptr_t textSegmentSize() { return get()->textSegmentSize; } - // See comments below about PAGE_SIZE and PAGE_MAX_SIZE. - uintptr_t dataSize() { return PAGE_MAX_SIZE; } + uintptr_t dataSize() { return TRAMPOLINE_PAGE_SIZE; } uintptr_t impl() { return get()->impl.address(); } uintptr_t start() { return get()->start.address(); } @@ -202,11 +211,13 @@ typedef enum { // We must take care with our data layout on architectures that support // multiple page sizes. // -// The trampoline template in __TEXT is sized and aligned with PAGE_MAX_SIZE. -// On some platforms this requires additional linker flags. +// The trampoline template in __TEXT is sized and aligned with PAGE_MAX_SIZE, +// except on i386 which is a weird special case that uses PAGE_MIN_SIZE. +// The TRAMPOLINE_PAGE_SIZE macro handles this difference. On some platforms, +// aligning to PAGE_MAX_SIZE requires additional linker flags. // -// When we allocate a page group, we use PAGE_MAX_SIZE size. -// This allows trampoline code to find its data by subtracting PAGE_MAX_SIZE. +// When we allocate a page group, we use TRAMPOLINE_PAGE_SIZE size. +// This allows trampoline code to find its data by subtracting TRAMPOLINE_PAGE_SIZE. // // When we allocate a page group, we use the process's page alignment. // This simplifies allocation because we don't need to force greater than @@ -231,14 +242,14 @@ struct TrampolineBlockPageGroup // Payload data: block pointers and free list. // Bytes parallel with trampoline header code are the fields above or unused - // uint8_t payloads[PAGE_MAX_SIZE - sizeof(TrampolineBlockPageGroup)] + // uint8_t payloads[TRAMPOLINE_PAGE_SIZE - sizeof(TrampolineBlockPageGroup)] // Code: Mach-O header, then trampoline header followed by trampolines. // On platforms with struct return we have non-stret trampolines and // stret trampolines. The stret and non-stret trampolines at a given // index share the same data page. - // uint8_t macho[PAGE_MAX_SIZE]; - // uint8_t trampolines[ArgumentModeCount][PAGE_MAX_SIZE]; + // uint8_t macho[TRAMPOLINE_PAGE_SIZE]; + // uint8_t trampolines[ArgumentModeCount][TRAMPOLINE_PAGE_SIZE]; // Per-trampoline block data format: // initial value is 0 while page data is filled sequentially @@ -280,7 +291,7 @@ struct TrampolineBlockPageGroup // Skip over the data area, one page of Mach-O headers, // and one text page for each mode before this one. return (uintptr_t)this + Trampolines.dataSize() + - PAGE_MAX_SIZE * (1 + aMode); + TRAMPOLINE_PAGE_SIZE * (1 + aMode); } IMP trampoline(int aMode, uintptr_t index) { diff --git a/runtime/objc-blocktramps-i386.s b/runtime/objc-blocktramps-i386.s index d4f1eb8..cd7c9ce 100755 --- a/runtime/objc-blocktramps-i386.s +++ b/runtime/objc-blocktramps-i386.s @@ -30,13 +30,13 @@ .globl __objc_blockTrampolineStart .globl __objc_blockTrampolineLast -.align PAGE_SHIFT +.align 12 /* PAGE_SHIFT */ __objc_blockTrampolineImpl: movl (%esp), %eax // return address pushed by trampoline // 4(%esp) is return address pushed by the call site movl 8(%esp), %ecx // self -> ecx movl %ecx, 12(%esp) // ecx -> _cmd - movl -2*PAGE_SIZE-5(%eax), %ecx // block object pointer -> ecx + movl -2*4096/*PAGE_SIZE */-5(%eax), %ecx // block object pointer -> ecx // trampoline is -5 bytes from the return address // data is -2 pages from the trampoline movl %ecx, 8(%esp) // ecx -> self @@ -567,14 +567,14 @@ __objc_blockTrampolineLast: .globl __objc_blockTrampolineStart_stret .globl __objc_blockTrampolineLast_stret -.align PAGE_SHIFT +.align 12 /* PAGE_SHIFT */ __objc_blockTrampolineImpl_stret: movl (%esp), %eax // return address pushed by trampoline // 4(%esp) is return address pushed by the call site // 8(%esp) is struct-return address movl 12(%esp), %ecx // self -> ecx movl %ecx, 16(%esp) // ecx -> _cmd - movl -3*PAGE_SIZE-5(%eax), %ecx // block object pointer -> ecx + movl -3*4096/*PAGE_SIZE*/-5(%eax), %ecx // block object pointer -> ecx // trampoline is -5 bytes from the return address // data is -3 pages from the trampoline movl %ecx, 12(%esp) // ecx -> self diff --git a/runtime/objc-blocktramps-x86_64.s b/runtime/objc-blocktramps-x86_64.s index 5f377f0..618e0f1 100755 --- a/runtime/objc-blocktramps-x86_64.s +++ b/runtime/objc-blocktramps-x86_64.s @@ -30,22 +30,37 @@ .globl __objc_blockTrampolineStart .globl __objc_blockTrampolineLast -.align PAGE_SHIFT +.align PAGE_MAX_SHIFT __objc_blockTrampolineImpl: movq (%rsp), %r10 // read return address pushed by TrampolineEntry's callq movq %rdi, %rsi // arg1 -> arg2 - movq -2*PAGE_SIZE-5(%r10), %rdi // block object pointer -> arg1 + movq -2*PAGE_MAX_SIZE-5(%r10), %rdi // block object pointer -> arg1 // trampoline is -5 bytes from the return address // data is -2 pages from the trampoline ret // back to TrampolineEntry to preserve CPU's return stack -.macro TrampolineEntry +.macro TrampolineEntry1 // This trampoline is 8 bytes long. // This callq is 5 bytes long. callq __objc_blockTrampolineImpl jmp *16(%rdi) .endmacro +.macro TrampolineEntry4 + TrampolineEntry1 + TrampolineEntry1 + TrampolineEntry1 + TrampolineEntry1 +.endmacro + +#if PAGE_MAX_SHIFT == 12 +#define TrampolineEntry TrampolineEntry1 +#elif PAGE_MAX_SHIFT == 14 +#define TrampolineEntry TrampolineEntry4 +#else +#error "unknown PAGE_MAX_SHIFT value" +#endif + .align 5 __objc_blockTrampolineStart: TrampolineEntry @@ -555,8 +570,26 @@ __objc_blockTrampolineStart: TrampolineEntry TrampolineEntry TrampolineEntry + +// The above is 507 entries. +#if PAGE_MAX_SHIFT == 14 +// With 16kB pages, we need (4096*4-32)/8 = 2044 single entries, or +// 511 "quad" entries as above. We need 3 more regular entries, then +// 3 more singular entries, and finally a singular entry labeled Last. + TrampolineEntry + TrampolineEntry + TrampolineEntry + TrampolineEntry1 + TrampolineEntry1 + TrampolineEntry1 +__objc_blockTrampolineLast: + TrampolineEntry1 +#else +// With 4kB pages, we need (4096-32)/8 = 508 entries. We have one +// more at the end with the Last label for a total of 508. __objc_blockTrampolineLast: TrampolineEntry +#endif .text @@ -564,24 +597,39 @@ __objc_blockTrampolineLast: .globl __objc_blockTrampolineStart_stret .globl __objc_blockTrampolineLast_stret -.align PAGE_SHIFT +.align PAGE_MAX_SHIFT __objc_blockTrampolineImpl_stret: // %rdi -- arg1 -- is address of return value's space. Don't mess with it. movq (%rsp), %r10 // read return address pushed by TrampolineEntry's callq movq %rsi, %rdx // arg2 -> arg3 - movq -3*PAGE_SIZE-5(%r10), %rsi // block object pointer -> arg2 + movq -3*PAGE_MAX_SIZE-5(%r10), %rsi // block object pointer -> arg2 // trampoline is -5 bytes from the return address // data is -3 pages from the trampoline ret // back to TrampolineEntry to preserve CPU's return stack -.macro TrampolineEntry_stret +.macro TrampolineEntry_stret1 // This trampoline is 8 bytes long. // This callq is 5 bytes long. callq __objc_blockTrampolineImpl_stret jmp *16(%rsi) .endmacro +.macro TrampolineEntry_stret4 + TrampolineEntry_stret1 + TrampolineEntry_stret1 + TrampolineEntry_stret1 + TrampolineEntry_stret1 +.endmacro + +#if PAGE_MAX_SHIFT == 12 +#define TrampolineEntry_stret TrampolineEntry_stret1 +#elif PAGE_MAX_SHIFT == 14 +#define TrampolineEntry_stret TrampolineEntry_stret4 +#else +#error "unknown PAGE_MAX_SHIFT value" +#endif + .align 5 __objc_blockTrampolineStart_stret: TrampolineEntry_stret @@ -1091,7 +1139,21 @@ __objc_blockTrampolineStart_stret: TrampolineEntry_stret TrampolineEntry_stret TrampolineEntry_stret + +// See the comment on non-stret's Last for why we have additional +// entries here. +#if PAGE_MAX_SHIFT == 14 + TrampolineEntry_stret + TrampolineEntry_stret + TrampolineEntry_stret + TrampolineEntry_stret1 + TrampolineEntry_stret1 + TrampolineEntry_stret1 +__objc_blockTrampolineLast_stret: + TrampolineEntry_stret1 +#else __objc_blockTrampolineLast_stret: TrampolineEntry_stret +#endif #endif diff --git a/runtime/objc-cache-old.mm b/runtime/objc-cache-old.mm index fed884a..50fbab0 100644 --- a/runtime/objc-cache-old.mm +++ b/runtime/objc-cache-old.mm @@ -1795,9 +1795,5 @@ void _class_printMethodCacheStatistics(void) #endif -void cache_init() -{ -} - // !__OBJC2__ #endif diff --git a/runtime/objc-cache.h b/runtime/objc-cache.h deleted file mode 100644 index e0448e7..0000000 --- a/runtime/objc-cache.h +++ /dev/null @@ -1,23 +0,0 @@ - -#ifndef _OBJC_CACHE_H -#define _OBJC_CACHE_H - -#include "objc-private.h" - -__BEGIN_DECLS - -extern void cache_init(void); - -extern IMP cache_getImp(Class cls, SEL sel); - -extern void cache_fill(Class cls, SEL sel, IMP imp, id receiver); - -extern void cache_erase_nolock(Class cls); - -extern void cache_delete(Class cls); - -extern void cache_collect(bool collectALot); - -__END_DECLS - -#endif diff --git a/runtime/objc-cache.mm b/runtime/objc-cache.mm index 7656391..213d147 100644 --- a/runtime/objc-cache.mm +++ b/runtime/objc-cache.mm @@ -63,14 +63,12 @@ * objc_msgSend* * cache_getImp * - * Cache writers (hold cacheUpdateLock while reading or writing; not PC-checked) - * cache_fill (acquires lock) - * cache_expand (only called from cache_fill) - * cache_create (only called from cache_expand) - * bcopy (only called from instrumented cache_expand) - * flush_caches (acquires lock) - * cache_flush (only called from cache_fill and flush_caches) - * cache_collect_free (only called from cache_expand and cache_flush) + * Cache readers/writers (hold cacheUpdateLock during access; not PC-checked) + * cache_t::copyCacheNolock (caller must hold the lock) + * cache_t::eraseNolock (caller must hold the lock) + * cache_t::collectNolock (caller must hold the lock) + * cache_t::insert (acquires lock) + * cache_t::destroy (acquires lock) * * UNPROTECTED cache readers (NOT thread-safe; used for debug info only) * cache_print @@ -84,18 +82,81 @@ #if __OBJC2__ #include "objc-private.h" -#include "objc-cache.h" +#if TARGET_OS_OSX +#include +#include +#endif + +#if __arm__ || __x86_64__ || __i386__ + +// objc_msgSend has few registers available. +// Cache scan increments and wraps at special end-marking bucket. +#define CACHE_END_MARKER 1 + +// Historical fill ratio of 75% (since the new objc runtime was introduced). +static inline mask_t cache_fill_ratio(mask_t capacity) { + return capacity * 3 / 4; +} + +#elif __arm64__ && !__LP64__ + +// objc_msgSend has lots of registers available. +// Cache scan decrements. No end marker needed. +#define CACHE_END_MARKER 0 + +// Historical fill ratio of 75% (since the new objc runtime was introduced). +static inline mask_t cache_fill_ratio(mask_t capacity) { + return capacity * 3 / 4; +} + +#elif __arm64__ && __LP64__ + +// objc_msgSend has lots of registers available. +// Cache scan decrements. No end marker needed. +#define CACHE_END_MARKER 0 + +// Allow 87.5% fill ratio in the fast path for all cache sizes. +// Increasing the cache fill ratio reduces the fragmentation and wasted space +// in imp-caches at the cost of potentially increasing the average lookup of +// a selector in imp-caches by increasing collision chains. Another potential +// change is that cache table resizes / resets happen at different moments. +static inline mask_t cache_fill_ratio(mask_t capacity) { + return capacity * 7 / 8; +} + +// Allow 100% cache utilization for smaller cache sizes. This has the same +// advantages and disadvantages as the fill ratio. A very large percentage +// of caches end up with very few entries and the worst case of collision +// chains in small tables is relatively small. +// NOTE: objc_msgSend properly handles a cache lookup with a full cache. +#define CACHE_ALLOW_FULL_UTILIZATION 1 + +#else +#error unknown architecture +#endif /* Initial cache bucket count. INIT_CACHE_SIZE must be a power of two. */ enum { +#if CACHE_END_MARKER || (__arm64__ && !__LP64__) + // When we have a cache end marker it fills a bucket slot, so having a + // initial cache size of 2 buckets would not be efficient when one of the + // slots is always filled with the end marker. So start with a cache size + // 4 buckets. INIT_CACHE_SIZE_LOG2 = 2, +#else + // Allow an initial bucket size of 2 buckets, since a large number of + // classes, especially metaclasses, have very few imps, and we support + // the ability to fill 100% of the cache before resizing. + INIT_CACHE_SIZE_LOG2 = 1, +#endif INIT_CACHE_SIZE = (1 << INIT_CACHE_SIZE_LOG2), MAX_CACHE_SIZE_LOG2 = 16, MAX_CACHE_SIZE = (1 << MAX_CACHE_SIZE_LOG2), + FULL_UTILIZATION_CACHE_SIZE_LOG2 = 3, + FULL_UTILIZATION_CACHE_SIZE = (1 << FULL_UTILIZATION_CACHE_SIZE_LOG2), }; -static void cache_collect_free(struct bucket_t *data, mask_t capacity); static int _collecting_in_critical(void); static void _garbage_make_room(void); @@ -171,25 +232,21 @@ asm("\n .section __TEXT,__const" #endif ); +#if CONFIG_USE_PREOPT_CACHES +__attribute__((used, section("__DATA_CONST,__objc_scoffs"))) +uintptr_t objc_opt_offsets[__OBJC_OPT_OFFSETS_COUNT]; +#endif -#if __arm__ || __x86_64__ || __i386__ -// objc_msgSend has few registers available. -// Cache scan increments and wraps at special end-marking bucket. -#define CACHE_END_MARKER 1 +#if CACHE_END_MARKER static inline mask_t cache_next(mask_t i, mask_t mask) { return (i+1) & mask; } - #elif __arm64__ -// objc_msgSend has lots of registers available. -// Cache scan decrements. No end marker needed. -#define CACHE_END_MARKER 0 static inline mask_t cache_next(mask_t i, mask_t mask) { return i ? i-1 : mask; } - #else -#error unknown architecture +#error unexpected configuration #endif @@ -249,29 +306,27 @@ ldp(uintptr_t& onep, uintptr_t& twop, const void *srcp) static inline mask_t cache_hash(SEL sel, mask_t mask) { - return (mask_t)(uintptr_t)sel & mask; -} - -cache_t *getCache(Class cls) -{ - ASSERT(cls); - return &cls->cache; + uintptr_t value = (uintptr_t)sel; +#if CONFIG_USE_PREOPT_CACHES + value ^= value >> 7; +#endif + return (mask_t)(value & mask); } #if __arm64__ template -void bucket_t::set(SEL newSel, IMP newImp, Class cls) +void bucket_t::set(bucket_t *base, SEL newSel, IMP newImp, Class cls) { - ASSERT(_sel.load(memory_order::memory_order_relaxed) == 0 || - _sel.load(memory_order::memory_order_relaxed) == newSel); + ASSERT(_sel.load(memory_order_relaxed) == 0 || + _sel.load(memory_order_relaxed) == newSel); static_assert(offsetof(bucket_t,_imp) == 0 && offsetof(bucket_t,_sel) == sizeof(void *), "bucket_t layout doesn't match arm64 bucket_t::set()"); uintptr_t encodedImp = (impEncoding == Encoded - ? encodeImp(newImp, newSel, cls) + ? encodeImp(base, newImp, newSel, cls) : (uintptr_t)newImp); // LDP/STP guarantees that all observers get @@ -282,10 +337,10 @@ void bucket_t::set(SEL newSel, IMP newImp, Class cls) #else template -void bucket_t::set(SEL newSel, IMP newImp, Class cls) +void bucket_t::set(bucket_t *base, SEL newSel, IMP newImp, Class cls) { - ASSERT(_sel.load(memory_order::memory_order_relaxed) == 0 || - _sel.load(memory_order::memory_order_relaxed) == newSel); + ASSERT(_sel.load(memory_order_relaxed) == 0 || + _sel.load(memory_order_relaxed) == newSel); // objc_msgSend uses sel and imp with no locks. // It is safe for objc_msgSend to see new imp but NULL sel @@ -294,29 +349,195 @@ void bucket_t::set(SEL newSel, IMP newImp, Class cls) // Therefore we write new imp, wait a lot, then write new sel. uintptr_t newIMP = (impEncoding == Encoded - ? encodeImp(newImp, newSel, cls) + ? encodeImp(base, newImp, newSel, cls) : (uintptr_t)newImp); if (atomicity == Atomic) { - _imp.store(newIMP, memory_order::memory_order_relaxed); + _imp.store(newIMP, memory_order_relaxed); - if (_sel.load(memory_order::memory_order_relaxed) != newSel) { + if (_sel.load(memory_order_relaxed) != newSel) { #ifdef __arm__ mega_barrier(); - _sel.store(newSel, memory_order::memory_order_relaxed); + _sel.store(newSel, memory_order_relaxed); #elif __x86_64__ || __i386__ - _sel.store(newSel, memory_order::memory_order_release); + _sel.store(newSel, memory_order_release); #else #error Don't know how to do bucket_t::set on this architecture. #endif } } else { - _imp.store(newIMP, memory_order::memory_order_relaxed); - _sel.store(newSel, memory_order::memory_order_relaxed); + _imp.store(newIMP, memory_order_relaxed); + _sel.store(newSel, memory_order_relaxed); + } +} + +#endif + +void cache_t::initializeToEmpty() +{ + _bucketsAndMaybeMask.store((uintptr_t)&_objc_empty_cache, std::memory_order_relaxed); + _originalPreoptCache.store(nullptr, std::memory_order_relaxed); +} + +#if CONFIG_USE_PREOPT_CACHES +/* + * The shared cache builder will sometimes have prebuilt an IMP cache + * for the class and left a `preopt_cache_t` pointer in _originalPreoptCache. + * + * However we have this tension: + * - when the class is realized it has to have a cache that can't resolve any + * selector until the class is properly initialized so that every + * caller falls in the slowpath and synchronizes with the class initializing, + * - we need to remember that cache pointer and we have no space for that. + * + * The caches are designed so that preopt_cache::bit_one is set to 1, + * so we "disguise" the pointer so that it looks like a cache of capacity 1 + * where that bit one aliases with where the top bit of a SEL in the bucket_t + * would live: + * + * +----------------+----------------+ + * | IMP | SEL | << a bucket_t + * +----------------+----------------+--------------... + * preopt_cache_t >>| 1| ... + * +----------------+--------------... + * + * The shared cache guarantees that there's valid memory to read under "IMP" + * + * This lets us encode the original preoptimized cache pointer during + * initialization, and we can reconstruct its original address and install + * it back later. + */ +void cache_t::initializeToPreoptCacheInDisguise(const preopt_cache_t *cache) +{ + // preopt_cache_t::bit_one is 1 which sets the top bit + // and is never set on any valid selector + + uintptr_t value = (uintptr_t)cache + sizeof(preopt_cache_t) - + (bucket_t::offsetOfSel() + sizeof(SEL)); + + _originalPreoptCache.store(nullptr, std::memory_order_relaxed); + setBucketsAndMask((bucket_t *)value, 0); + _occupied = cache->occupied; +} + +void cache_t::maybeConvertToPreoptimized() +{ + const preopt_cache_t *cache = disguised_preopt_cache(); + + if (cache == nil) { + return; } + + if (!cls()->allowsPreoptCaches() || + (cache->has_inlines && !cls()->allowsPreoptInlinedSels())) { + if (PrintCaches) { + _objc_inform("CACHES: %sclass %s: dropping cache (from %s)", + cls()->isMetaClass() ? "meta" : "", + cls()->nameForLogging(), "setInitialized"); + } + return setBucketsAndMask(emptyBuckets(), 0); + } + + uintptr_t value = (uintptr_t)&cache->entries; +#if __has_feature(ptrauth_calls) + value = (uintptr_t)ptrauth_sign_unauthenticated((void *)value, + ptrauth_key_process_dependent_data, (uintptr_t)cls()); +#endif + value |= preoptBucketsHashParams(cache) | preoptBucketsMarker; + _bucketsAndMaybeMask.store(value, memory_order_relaxed); + _occupied = cache->occupied; +} + +void cache_t::initializeToEmptyOrPreoptimizedInDisguise() +{ + if (os_fastpath(!DisablePreoptCaches)) { + if (!objc::dataSegmentsRanges.inSharedCache((uintptr_t)this)) { + if (dyld_shared_cache_some_image_overridden()) { + // If the system has roots, then we must disable preoptimized + // caches completely. If a class in another image has a + // superclass in the root, the offset to the superclass will + // be wrong. rdar://problem/61601961 + cls()->setDisallowPreoptCachesRecursively("roots"); + } + return initializeToEmpty(); + } + + auto cache = _originalPreoptCache.load(memory_order_relaxed); + if (cache) { + return initializeToPreoptCacheInDisguise(cache); + } + } + + return initializeToEmpty(); } +const preopt_cache_t *cache_t::preopt_cache() const +{ + auto addr = _bucketsAndMaybeMask.load(memory_order_relaxed); + addr &= preoptBucketsMask; +#if __has_feature(ptrauth_calls) +#if __BUILDING_OBJCDT__ + addr = (uintptr_t)ptrauth_strip((preopt_cache_entry_t *)addr, + ptrauth_key_process_dependent_data); +#else + addr = (uintptr_t)ptrauth_auth_data((preopt_cache_entry_t *)addr, + ptrauth_key_process_dependent_data, (uintptr_t)cls()); #endif +#endif + return (preopt_cache_t *)(addr - sizeof(preopt_cache_t)); +} + +const preopt_cache_t *cache_t::disguised_preopt_cache() const +{ + bucket_t *b = buckets(); + if ((intptr_t)b->sel() >= 0) return nil; + + uintptr_t value = (uintptr_t)b + bucket_t::offsetOfSel() + sizeof(SEL); + return (preopt_cache_t *)(value - sizeof(preopt_cache_t)); +} + +Class cache_t::preoptFallbackClass() const +{ + return (Class)((uintptr_t)cls() + preopt_cache()->fallback_class_offset); +} + +bool cache_t::isConstantOptimizedCache(bool strict, uintptr_t empty_addr) const +{ + uintptr_t addr = _bucketsAndMaybeMask.load(memory_order_relaxed); + if (addr & preoptBucketsMarker) { + return true; + } + if (strict) { + return false; + } + return mask() == 0 && addr != empty_addr; +} + +bool cache_t::shouldFlush(SEL sel, IMP imp) const +{ + // This test isn't backwards: disguised caches aren't "strict" + // constant optimized caches + if (!isConstantOptimizedCache(/*strict*/true)) { + const preopt_cache_t *cache = disguised_preopt_cache(); + if (cache) { + uintptr_t offs = (uintptr_t)sel - (uintptr_t)@selector(🤯); + uintptr_t slot = ((offs >> cache->shift) & cache->mask); + auto &entry = cache->entries[slot]; + + return entry.sel_offs == offs && + (uintptr_t)cls() - entry.imp_offs == + (uintptr_t)ptrauth_strip(imp, ptrauth_key_function_pointer); + } + } + + return cache_getImp(cls(), sel) == imp; +} + +bool cache_t::isConstantOptimizedCacheWithInlinedSels() const +{ + return isConstantOptimizedCache(/* strict */true) && preopt_cache()->has_inlines; +} +#endif // CONFIG_USE_PREOPT_CACHES #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_OUTLINED @@ -333,135 +554,85 @@ void cache_t::setBucketsAndMask(struct bucket_t *newBuckets, mask_t newMask) // ensure other threads see buckets contents before buckets pointer mega_barrier(); - _buckets.store(newBuckets, memory_order::memory_order_relaxed); - + _bucketsAndMaybeMask.store((uintptr_t)newBuckets, memory_order_relaxed); + // ensure other threads see new buckets before new mask mega_barrier(); - - _mask.store(newMask, memory_order::memory_order_relaxed); + + _maybeMask.store(newMask, memory_order_relaxed); _occupied = 0; #elif __x86_64__ || i386 // ensure other threads see buckets contents before buckets pointer - _buckets.store(newBuckets, memory_order::memory_order_release); - + _bucketsAndMaybeMask.store((uintptr_t)newBuckets, memory_order_release); + // ensure other threads see new buckets before new mask - _mask.store(newMask, memory_order::memory_order_release); + _maybeMask.store(newMask, memory_order_release); _occupied = 0; #else #error Don't know how to do setBucketsAndMask on this architecture. #endif } -struct bucket_t *cache_t::emptyBuckets() -{ - return (bucket_t *)&_objc_empty_cache; -} - -struct bucket_t *cache_t::buckets() +mask_t cache_t::mask() const { - return _buckets.load(memory_order::memory_order_relaxed); + return _maybeMask.load(memory_order_relaxed); } -mask_t cache_t::mask() -{ - return _mask.load(memory_order::memory_order_relaxed); -} - -void cache_t::initializeToEmpty() -{ - bzero(this, sizeof(*this)); - _buckets.store((bucket_t *)&_objc_empty_cache, memory_order::memory_order_relaxed); -} - -#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 +#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 || CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS void cache_t::setBucketsAndMask(struct bucket_t *newBuckets, mask_t newMask) { uintptr_t buckets = (uintptr_t)newBuckets; uintptr_t mask = (uintptr_t)newMask; - + ASSERT(buckets <= bucketsMask); ASSERT(mask <= maxMask); - - _maskAndBuckets.store(((uintptr_t)newMask << maskShift) | (uintptr_t)newBuckets, std::memory_order_relaxed); - _occupied = 0; -} -struct bucket_t *cache_t::emptyBuckets() -{ - return (bucket_t *)&_objc_empty_cache; -} - -struct bucket_t *cache_t::buckets() -{ - uintptr_t maskAndBuckets = _maskAndBuckets.load(memory_order::memory_order_relaxed); - return (bucket_t *)(maskAndBuckets & bucketsMask); + _bucketsAndMaybeMask.store(((uintptr_t)newMask << maskShift) | (uintptr_t)newBuckets, memory_order_relaxed); + _occupied = 0; } -mask_t cache_t::mask() +mask_t cache_t::mask() const { - uintptr_t maskAndBuckets = _maskAndBuckets.load(memory_order::memory_order_relaxed); + uintptr_t maskAndBuckets = _bucketsAndMaybeMask.load(memory_order_relaxed); return maskAndBuckets >> maskShift; } -void cache_t::initializeToEmpty() -{ - bzero(this, sizeof(*this)); - _maskAndBuckets.store((uintptr_t)&_objc_empty_cache, std::memory_order_relaxed); -} - #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4 void cache_t::setBucketsAndMask(struct bucket_t *newBuckets, mask_t newMask) { uintptr_t buckets = (uintptr_t)newBuckets; unsigned mask = (unsigned)newMask; - + ASSERT(buckets == (buckets & bucketsMask)); ASSERT(mask <= 0xffff); - - // The shift amount is equal to the number of leading zeroes in - // the last 16 bits of mask. Count all the leading zeroes, then - // subtract to ignore the top half. - uintptr_t maskShift = __builtin_clz(mask) - (sizeof(mask) * CHAR_BIT - 16); - ASSERT(mask == (0xffff >> maskShift)); - - _maskAndBuckets.store(buckets | maskShift, memory_order::memory_order_relaxed); + + _bucketsAndMaybeMask.store(buckets | objc::mask16ShiftBits(mask), memory_order_relaxed); _occupied = 0; - + ASSERT(this->buckets() == newBuckets); ASSERT(this->mask() == newMask); } -struct bucket_t *cache_t::emptyBuckets() +mask_t cache_t::mask() const { - return (bucket_t *)((uintptr_t)&_objc_empty_cache & bucketsMask); -} - -struct bucket_t *cache_t::buckets() -{ - uintptr_t maskAndBuckets = _maskAndBuckets.load(memory_order::memory_order_relaxed); - return (bucket_t *)(maskAndBuckets & bucketsMask); -} - -mask_t cache_t::mask() -{ - uintptr_t maskAndBuckets = _maskAndBuckets.load(memory_order::memory_order_relaxed); + uintptr_t maskAndBuckets = _bucketsAndMaybeMask.load(memory_order_relaxed); uintptr_t maskShift = (maskAndBuckets & maskMask); return 0xffff >> maskShift; } -void cache_t::initializeToEmpty() -{ - bzero(this, sizeof(*this)); - _maskAndBuckets.store((uintptr_t)&_objc_empty_cache, std::memory_order_relaxed); -} - #else #error Unknown cache mask storage type. #endif -mask_t cache_t::occupied() +struct bucket_t *cache_t::buckets() const +{ + uintptr_t addr = _bucketsAndMaybeMask.load(memory_order_relaxed); + return (bucket_t *)(addr & bucketsMask); +} + +mask_t cache_t::occupied() const { return _occupied; } @@ -471,11 +642,15 @@ void cache_t::incrementOccupied() _occupied++; } -unsigned cache_t::capacity() +unsigned cache_t::capacity() const { return mask() ? mask()+1 : 0; } +Class cache_t::cls() const +{ + return (Class)((uintptr_t)this - offsetof(objc_class, cache)); +} size_t cache_t::bytesForCapacity(uint32_t cap) { @@ -489,22 +664,21 @@ bucket_t *cache_t::endMarker(struct bucket_t *b, uint32_t cap) return (bucket_t *)((uintptr_t)b + bytesForCapacity(cap)) - 1; } -bucket_t *allocateBuckets(mask_t newCapacity) +bucket_t *cache_t::allocateBuckets(mask_t newCapacity) { // Allocate one extra bucket to mark the end of the list. // This can't overflow mask_t because newCapacity is a power of 2. - bucket_t *newBuckets = (bucket_t *) - calloc(cache_t::bytesForCapacity(newCapacity), 1); + bucket_t *newBuckets = (bucket_t *)calloc(bytesForCapacity(newCapacity), 1); - bucket_t *end = cache_t::endMarker(newBuckets, newCapacity); + bucket_t *end = endMarker(newBuckets, newCapacity); #if __arm__ // End marker's sel is 1 and imp points BEFORE the first bucket. // This saves an instruction in objc_msgSend. - end->set((SEL)(uintptr_t)1, (IMP)(newBuckets - 1), nil); + end->set(newBuckets, (SEL)(uintptr_t)1, (IMP)(newBuckets - 1), nil); #else // End marker's sel is 1 and imp points to the first bucket. - end->set((SEL)(uintptr_t)1, (IMP)newBuckets, nil); + end->set(newBuckets, (SEL)(uintptr_t)1, (IMP)newBuckets, nil); #endif if (PrintCaches) recordNewCache(newCapacity); @@ -514,17 +688,21 @@ bucket_t *allocateBuckets(mask_t newCapacity) #else -bucket_t *allocateBuckets(mask_t newCapacity) +bucket_t *cache_t::allocateBuckets(mask_t newCapacity) { if (PrintCaches) recordNewCache(newCapacity); - return (bucket_t *)calloc(cache_t::bytesForCapacity(newCapacity), 1); + return (bucket_t *)calloc(bytesForCapacity(newCapacity), 1); } #endif +struct bucket_t *cache_t::emptyBuckets() +{ + return (bucket_t *)((uintptr_t)&_objc_empty_cache & bucketsMask); +} -bucket_t *emptyBucketsForCapacity(mask_t capacity, bool allocate = true) +bucket_t *cache_t::emptyBucketsForCapacity(mask_t capacity, bool allocate) { #if CONFIG_USE_CACHE_LOCK cacheUpdateLock.assertLocked(); @@ -532,11 +710,11 @@ bucket_t *emptyBucketsForCapacity(mask_t capacity, bool allocate = true) runtimeLock.assertLocked(); #endif - size_t bytes = cache_t::bytesForCapacity(capacity); + size_t bytes = bytesForCapacity(capacity); // Use _objc_empty_cache if the buckets is small enough. if (bytes <= EMPTY_BYTES) { - return cache_t::emptyBuckets(); + return emptyBuckets(); } // Use shared empty buckets allocated on the heap. @@ -568,17 +746,16 @@ bucket_t *emptyBucketsForCapacity(mask_t capacity, bool allocate = true) return emptyBucketsList[index]; } - -bool cache_t::isConstantEmptyCache() +bool cache_t::isConstantEmptyCache() const { - return - occupied() == 0 && + return + occupied() == 0 && buckets() == emptyBucketsForCapacity(capacity(), false); } -bool cache_t::canBeFreed() +bool cache_t::canBeFreed() const { - return !isConstantEmptyCache(); + return !isConstantEmptyCache() && !isConstantOptimizedCache(); } ALWAYS_INLINE @@ -597,68 +774,79 @@ void cache_t::reallocate(mask_t oldCapacity, mask_t newCapacity, bool freeOld) setBucketsAndMask(newBuckets, newCapacity - 1); if (freeOld) { - cache_collect_free(oldBuckets, oldCapacity); + collect_free(oldBuckets, oldCapacity); } } -void cache_t::bad_cache(id receiver, SEL sel, Class isa) +void cache_t::bad_cache(id receiver, SEL sel) { // Log in separate steps in case the logging itself causes a crash. _objc_inform_now_and_on_crash ("Method cache corrupted. This may be a message to an " "invalid object, or a memory error somewhere else."); - cache_t *cache = &isa->cache; #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_OUTLINED - bucket_t *buckets = cache->_buckets.load(memory_order::memory_order_relaxed); + bucket_t *b = buckets(); _objc_inform_now_and_on_crash ("%s %p, SEL %p, isa %p, cache %p, buckets %p, " "mask 0x%x, occupied 0x%x", receiver ? "receiver" : "unused", receiver, - sel, isa, cache, buckets, - cache->_mask.load(memory_order::memory_order_relaxed), - cache->_occupied); + sel, cls(), this, b, + _maybeMask.load(memory_order_relaxed), + _occupied); _objc_inform_now_and_on_crash ("%s %zu bytes, buckets %zu bytes", receiver ? "receiver" : "unused", malloc_size(receiver), - malloc_size(buckets)); + malloc_size(b)); #elif (CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 || \ + CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS || \ CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4) - uintptr_t maskAndBuckets = cache->_maskAndBuckets.load(memory_order::memory_order_relaxed); + uintptr_t maskAndBuckets = _bucketsAndMaybeMask.load(memory_order_relaxed); _objc_inform_now_and_on_crash ("%s %p, SEL %p, isa %p, cache %p, buckets and mask 0x%lx, " "occupied 0x%x", receiver ? "receiver" : "unused", receiver, - sel, isa, cache, maskAndBuckets, - cache->_occupied); + sel, cls(), this, maskAndBuckets, _occupied); _objc_inform_now_and_on_crash ("%s %zu bytes, buckets %zu bytes", receiver ? "receiver" : "unused", malloc_size(receiver), - malloc_size(cache->buckets())); + malloc_size(buckets())); #else #error Unknown cache mask storage type. #endif _objc_inform_now_and_on_crash ("selector '%s'", sel_getName(sel)); _objc_inform_now_and_on_crash - ("isa '%s'", isa->nameForLogging()); + ("isa '%s'", cls()->nameForLogging()); _objc_fatal ("Method cache corrupted. This may be a message to an " "invalid object, or a memory error somewhere else."); } -ALWAYS_INLINE -void cache_t::insert(Class cls, SEL sel, IMP imp, id receiver) +void cache_t::insert(SEL sel, IMP imp, id receiver) { -#if CONFIG_USE_CACHE_LOCK - cacheUpdateLock.assertLocked(); -#else runtimeLock.assertLocked(); + + // Never cache before +initialize is done + if (slowpath(!cls()->isInitialized())) { + return; + } + + if (isConstantOptimizedCache()) { + _objc_fatal("cache_t::insert() called with a preoptimized cache for %s", + cls()->nameForLogging()); + } + +#if DEBUG_TASK_THREADS + return _collecting_in_critical(); +#else +#if CONFIG_USE_CACHE_LOCK + mutex_locker_t lock(cacheUpdateLock); #endif - ASSERT(sel != 0 && cls->isInitialized()); + ASSERT(sel != 0 && cls()->isInitialized()); - // Use the cache as-is if it is less than 3/4 full + // Use the cache as-is if until we exceed our expected fill ratio. mask_t newOccupied = occupied() + 1; unsigned oldCapacity = capacity(), capacity = oldCapacity; if (slowpath(isConstantEmptyCache())) { @@ -666,9 +854,14 @@ void cache_t::insert(Class cls, SEL sel, IMP imp, id receiver) if (!capacity) capacity = INIT_CACHE_SIZE; reallocate(oldCapacity, capacity, /* freeOld */false); } - else if (fastpath(newOccupied + CACHE_END_MARKER <= capacity / 4 * 3)) { - // Cache is less than 3/4 full. Use it as-is. + else if (fastpath(newOccupied + CACHE_END_MARKER <= cache_fill_ratio(capacity))) { + // Cache is less than 3/4 or 7/8 full. Use it as-is. + } +#if CACHE_ALLOW_FULL_UTILIZATION + else if (capacity <= FULL_UTILIZATION_CACHE_SIZE && newOccupied + CACHE_END_MARKER <= capacity) { + // Allow 100% cache utilization for small buckets. Use it as-is. } +#endif else { capacity = capacity ? capacity * 2 : INIT_CACHE_SIZE; if (capacity > MAX_CACHE_SIZE) { @@ -683,12 +876,11 @@ void cache_t::insert(Class cls, SEL sel, IMP imp, id receiver) mask_t i = begin; // Scan for the first unused slot and insert there. - // There is guaranteed to be an empty slot because the - // minimum size is 4 and we resized at 3/4 full. + // There is guaranteed to be an empty slot. do { if (fastpath(b[i].sel() == 0)) { incrementOccupied(); - b[i].set(sel, imp, cls); + b[i].set(b, sel, imp, cls()); return; } if (b[i].sel() == sel) { @@ -698,31 +890,54 @@ void cache_t::insert(Class cls, SEL sel, IMP imp, id receiver) } } while (fastpath((i = cache_next(i, m)) != begin)); - cache_t::bad_cache(receiver, (SEL)sel, cls); + bad_cache(receiver, (SEL)sel); +#endif // !DEBUG_TASK_THREADS } -void cache_fill(Class cls, SEL sel, IMP imp, id receiver) +void cache_t::copyCacheNolock(objc_imp_cache_entry *buffer, int len) { - runtimeLock.assertLocked(); - -#if !DEBUG_TASK_THREADS - // Never cache before +initialize is done - if (cls->isInitialized()) { - cache_t *cache = getCache(cls); #if CONFIG_USE_CACHE_LOCK - mutex_locker_t lock(cacheUpdateLock); + cacheUpdateLock.assertLocked(); +#else + runtimeLock.assertLocked(); #endif - cache->insert(cls, sel, imp, receiver); + int wpos = 0; + +#if CONFIG_USE_PREOPT_CACHES + if (isConstantOptimizedCache()) { + auto cache = preopt_cache(); + auto mask = cache->mask; + uintptr_t sel_base = objc_opt_offsets[OBJC_OPT_METHODNAME_START]; + uintptr_t imp_base = (uintptr_t)&cache->entries; + + for (uintptr_t index = 0; index <= mask && wpos < len; index++) { + auto &ent = cache->entries[index]; + if (~ent.sel_offs) { + buffer[wpos].sel = (SEL)(sel_base + ent.sel_offs); + buffer[wpos].imp = (IMP)(imp_base - ent.imp_offs); + wpos++; + } + } + return; } -#else - _collecting_in_critical(); #endif + { + bucket_t *buckets = this->buckets(); + uintptr_t count = capacity(); + + for (uintptr_t index = 0; index < count && wpos < len; index++) { + if (buckets[index].sel()) { + buffer[wpos].imp = buckets[index].imp(buckets, cls()); + buffer[wpos].sel = buckets[index].sel(); + wpos++; + } + } + } } - // Reset this entire cache to the uncached lookup by reallocating it. // This must not shrink the cache - that breaks the lock-free scheme. -void cache_erase_nolock(Class cls) +void cache_t::eraseNolock(const char *func) { #if CONFIG_USE_CACHE_LOCK cacheUpdateLock.assertLocked(); @@ -730,29 +945,36 @@ void cache_erase_nolock(Class cls) runtimeLock.assertLocked(); #endif - cache_t *cache = getCache(cls); - - mask_t capacity = cache->capacity(); - if (capacity > 0 && cache->occupied() > 0) { - auto oldBuckets = cache->buckets(); + if (isConstantOptimizedCache()) { + auto c = cls(); + if (PrintCaches) { + _objc_inform("CACHES: %sclass %s: dropping and disallowing preopt cache (from %s)", + c->isMetaClass() ? "meta" : "", + c->nameForLogging(), func); + } + setBucketsAndMask(emptyBuckets(), 0); + c->setDisallowPreoptCaches(); + } else if (occupied() > 0) { + auto capacity = this->capacity(); + auto oldBuckets = buckets(); auto buckets = emptyBucketsForCapacity(capacity); - cache->setBucketsAndMask(buckets, capacity - 1); // also clears occupied - cache_collect_free(oldBuckets, capacity); + setBucketsAndMask(buckets, capacity - 1); // also clears occupied + collect_free(oldBuckets, capacity); } } -void cache_delete(Class cls) +void cache_t::destroy() { #if CONFIG_USE_CACHE_LOCK mutex_locker_t lock(cacheUpdateLock); #else runtimeLock.assertLocked(); #endif - if (cls->cache.canBeFreed()) { - if (PrintCaches) recordDeadCache(cls->cache.capacity()); - free(cls->cache.buckets()); + if (canBeFreed()) { + if (PrintCaches) recordDeadCache(capacity()); + free(buckets()); } } @@ -829,7 +1051,7 @@ extern "C" task_restartable_range_t objc_restartableRanges[]; static bool shouldUseRestartableRanges = true; #endif -void cache_init() +void cache_t::init() { #if HAVE_TASK_RESTARTABLE_RANGES mach_msg_type_number_t count = 0; @@ -895,7 +1117,18 @@ static int _collecting_in_critical(void) continue; // Find out where thread is executing +#if TARGET_OS_OSX + if (oah_is_current_process_translated()) { + kern_return_t ret = objc_thread_get_rip(threads[count], (uint64_t*)&pc); + if (ret != KERN_SUCCESS) { + pc = PC_SENTINEL; + } + } else { + pc = _get_pc_for_thread (threads[count]); + } +#else pc = _get_pc_for_thread (threads[count]); +#endif // Check for bad status, and if so, assume the worse (can't collect) if (pc == PC_SENTINEL) @@ -980,13 +1213,13 @@ static void _garbage_make_room(void) /*********************************************************************** -* cache_collect_free. Add the specified malloc'd memory to the list +* cache_t::collect_free. Add the specified malloc'd memory to the list * of them to free at some later point. * size is used for the collection threshold. It does not have to be * precisely the block's size. * Cache locks: cacheUpdateLock must be held by the caller. **********************************************************************/ -static void cache_collect_free(bucket_t *data, mask_t capacity) +void cache_t::collect_free(bucket_t *data, mask_t capacity) { #if CONFIG_USE_CACHE_LOCK cacheUpdateLock.assertLocked(); @@ -999,7 +1232,7 @@ static void cache_collect_free(bucket_t *data, mask_t capacity) _garbage_make_room (); garbage_byte_size += cache_t::bytesForCapacity(capacity); garbage_refs[garbage_count++] = data; - cache_collect(false); + cache_t::collectNolock(false); } @@ -1008,7 +1241,7 @@ static void cache_collect_free(bucket_t *data, mask_t capacity) * collectALot tries harder to free memory. * Cache locks: cacheUpdateLock must be held by the caller. **********************************************************************/ -void cache_collect(bool collectALot) +void cache_t::collectNolock(bool collectALot) { #if CONFIG_USE_CACHE_LOCK cacheUpdateLock.assertLocked(); @@ -1305,6 +1538,41 @@ static kern_return_t objc_task_threads // DEBUG_TASK_THREADS #endif +OBJC_EXPORT bucket_t * objc_cache_buckets(const cache_t * cache) { + return cache->buckets(); +} + +#if CONFIG_USE_PREOPT_CACHES + +OBJC_EXPORT const preopt_cache_t * _Nonnull objc_cache_preoptCache(const cache_t * _Nonnull cache) { + return cache->preopt_cache(); +} + +OBJC_EXPORT bool objc_cache_isConstantOptimizedCache(const cache_t * _Nonnull cache, bool strict, uintptr_t empty_addr) { + return cache->isConstantOptimizedCache(strict, empty_addr); +} + +OBJC_EXPORT unsigned objc_cache_preoptCapacity(const cache_t * _Nonnull cache) { + return cache->preopt_cache()->capacity(); +} + +OBJC_EXPORT Class _Nonnull objc_cache_preoptFallbackClass(const cache_t * _Nonnull cache) { + return cache->preoptFallbackClass(); +} + +#endif + +OBJC_EXPORT size_t objc_cache_bytesForCapacity(uint32_t cap) { + return cache_t::bytesForCapacity(cap); +} + +OBJC_EXPORT uint32_t objc_cache_occupied(const cache_t * _Nonnull cache) { + return cache->occupied(); +} + +OBJC_EXPORT unsigned objc_cache_capacity(const struct cache_t * _Nonnull cache) { + return cache->capacity(); +} // __OBJC2__ #endif diff --git a/runtime/objc-class-old.mm b/runtime/objc-class-old.mm index acc269e..c0a79a7 100644 --- a/runtime/objc-class-old.mm +++ b/runtime/objc-class-old.mm @@ -336,7 +336,7 @@ static void _class_resolveClassMethod(id inst, SEL sel, Class cls) ASSERT(cls->isMetaClass()); SEL resolve_sel = @selector(resolveClassMethod:); - if (!lookUpImpOrNil(inst, resolve_sel, cls)) { + if (!lookUpImpOrNilTryCache(inst, resolve_sel, cls)) { // Resolver not implemented. return; } @@ -346,7 +346,7 @@ static void _class_resolveClassMethod(id inst, SEL sel, Class cls) // Cache the result (good or bad) so the resolver doesn't fire next time. // +resolveClassMethod adds to self->ISA() a.k.a. cls - IMP imp = lookUpImpOrNil(inst, sel, cls); + IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); if (resolved && PrintResolving) { if (imp) { _objc_inform("RESOLVE: method %c[%s %s] " @@ -376,7 +376,7 @@ static void _class_resolveInstanceMethod(id inst, SEL sel, Class cls) { SEL resolve_sel = @selector(resolveInstanceMethod:); - if (! lookUpImpOrNil(cls, resolve_sel, cls->ISA())) { + if (! lookUpImpOrNilTryCache(cls, resolve_sel, cls->ISA())) { // Resolver not implemented. return; } @@ -386,7 +386,7 @@ static void _class_resolveInstanceMethod(id inst, SEL sel, Class cls) // Cache the result (good or bad) so the resolver doesn't fire next time. // +resolveInstanceMethod adds to self a.k.a. cls - IMP imp = lookUpImpOrNil(inst, sel, cls); + IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); if (resolved && PrintResolving) { if (imp) { @@ -424,7 +424,7 @@ _class_resolveMethod(id inst, SEL sel, Class cls) // try [nonMetaClass resolveClassMethod:sel] // and [cls resolveInstanceMethod:sel] _class_resolveClassMethod(inst, sel, cls); - if (!lookUpImpOrNil(inst, sel, cls)) { + if (!lookUpImpOrNilTryCache(inst, sel, cls)) { _class_resolveInstanceMethod(inst, sel, cls); } } @@ -2593,8 +2593,7 @@ id object_reallocFromZone(id obj, size_t nBytes, void *z) void *object_getIndexedIvars(id obj) { // ivars are tacked onto the end of the object - if (!obj) return nil; - if (obj->isTaggedPointer()) return nil; + if (obj->isTaggedPointerOrNil()) return nil; return ((char *) obj) + obj->ISA()->alignedInstanceSize(); } diff --git a/runtime/objc-class.mm b/runtime/objc-class.mm index 776f3fa..13ea069 100644 --- a/runtime/objc-class.mm +++ b/runtime/objc-class.mm @@ -159,6 +159,9 @@ #include "objc-private.h" #include "objc-abi.h" #include +#if !TARGET_OS_WIN32 +#include +#endif /*********************************************************************** * Information about multi-thread support: @@ -195,9 +198,9 @@ Class object_setClass(id obj, Class cls) // weakly-referenced object has an un-+initialized isa. // Unresolved future classes are not so protected. if (!cls->isFuture() && !cls->isInitialized()) { - // use lookUpImpOrNil to indirectly provoke +initialize + // use lookUpImpOrNilTryCache to indirectly provoke +initialize // to avoid duplicating the code to actually send +initialize - lookUpImpOrNil(nil, @selector(initialize), cls, LOOKUP_INITIALIZE); + lookUpImpOrNilTryCache(nil, @selector(initialize), cls, LOOKUP_INITIALIZE); } return obj->changeIsa(cls); @@ -281,7 +284,7 @@ _class_lookUpIvar(Class cls, Ivar ivar, ptrdiff_t& ivarOffset, // Preflight the hasAutomaticIvars check // because _class_getClassForIvar() may need to take locks. bool hasAutomaticIvars = NO; - for (Class c = cls; c; c = c->superclass) { + for (Class c = cls; c; c = c->getSuperclass()) { if (c->hasAutomaticIvars()) { hasAutomaticIvars = YES; break; @@ -337,7 +340,7 @@ _class_getIvarMemoryManagement(Class cls, Ivar ivar) static ALWAYS_INLINE void _object_setIvar(id obj, Ivar ivar, id value, bool assumeStrong) { - if (!obj || !ivar || obj->isTaggedPointer()) return; + if (!ivar || obj->isTaggedPointerOrNil()) return; ptrdiff_t offset; objc_ivar_memory_management_t memoryManagement; @@ -371,7 +374,7 @@ void object_setIvarWithStrongDefault(id obj, Ivar ivar, id value) id object_getIvar(id obj, Ivar ivar) { - if (!obj || !ivar || obj->isTaggedPointer()) return nil; + if (!ivar || obj->isTaggedPointerOrNil()) return nil; ptrdiff_t offset; objc_ivar_memory_management_t memoryManagement; @@ -393,7 +396,7 @@ Ivar _object_setInstanceVariable(id obj, const char *name, void *value, { Ivar ivar = nil; - if (obj && name && !obj->isTaggedPointer()) { + if (name && !obj->isTaggedPointerOrNil()) { if ((ivar = _class_getVariable(obj->ISA(), name))) { _object_setIvar(obj, ivar, (id)value, assumeStrong); } @@ -415,7 +418,7 @@ Ivar object_setInstanceVariableWithStrongDefault(id obj, const char *name, Ivar object_getInstanceVariable(id obj, const char *name, void **value) { - if (obj && name && !obj->isTaggedPointer()) { + if (name && !obj->isTaggedPointerOrNil()) { Ivar ivar; if ((ivar = class_getInstanceVariable(obj->ISA(), name))) { if (value) *value = (void *)object_getIvar(obj, ivar); @@ -440,7 +443,7 @@ static void object_cxxDestructFromClass(id obj, Class cls) // Call cls's dtor first, then superclasses's dtors. - for ( ; cls; cls = cls->superclass) { + for ( ; cls; cls = cls->getSuperclass()) { if (!cls->hasCxxDtor()) return; dtor = (void(*)(id)) lookupMethodInClassAndLoadCache(cls, SEL_cxx_destruct); @@ -462,8 +465,7 @@ static void object_cxxDestructFromClass(id obj, Class cls) **********************************************************************/ void object_cxxDestruct(id obj) { - if (!obj) return; - if (obj->isTaggedPointer()) return; + if (obj->isTaggedPointerOrNil()) return; object_cxxDestructFromClass(obj, obj->ISA()); } @@ -491,7 +493,7 @@ object_cxxConstructFromClass(id obj, Class cls, int flags) id (*ctor)(id); Class supercls; - supercls = cls->superclass; + supercls = cls->getSuperclass(); // Call superclasses' ctors first, if any. if (supercls && supercls->hasCxxCtor()) { @@ -510,7 +512,7 @@ object_cxxConstructFromClass(id obj, Class cls, int flags) } if (fastpath((*ctor)(obj))) return obj; // ctor called and succeeded - ok - supercls = cls->superclass; // this reload avoids a spill on the stack + supercls = cls->getSuperclass(); // this reload avoids a spill on the stack // This class's ctor was called and failed. // Call superclasses's dtors to clean up. @@ -530,7 +532,7 @@ object_cxxConstructFromClass(id obj, Class cls, int flags) **********************************************************************/ void fixupCopiedIvars(id newObject, id oldObject) { - for (Class cls = oldObject->ISA(); cls; cls = cls->superclass) { + for (Class cls = oldObject->ISA(); cls; cls = cls->getSuperclass()) { if (cls->hasAutomaticIvars()) { // Use alignedInstanceStart() because unaligned bytes at the start // of this class's ivars are not represented in the layout bitmap. @@ -636,12 +638,12 @@ BOOL class_respondsToSelector(Class cls, SEL sel) // inst is an instance of cls or a subclass thereof, or nil if none is known. // Non-nil inst is faster in some cases. See lookUpImpOrForward() for details. -NEVER_INLINE BOOL +NEVER_INLINE __attribute__((flatten)) BOOL class_respondsToSelector_inst(id inst, SEL sel, Class cls) { // Avoids +initialize because it historically did so. // We're not returning a callable IMP anyway. - return sel && cls && lookUpImpOrNil(inst, sel, cls, LOOKUP_RESOLVER); + return sel && cls && lookUpImpOrNilTryCache(inst, sel, cls, LOOKUP_RESOLVER); } @@ -662,13 +664,16 @@ IMP class_lookupMethod(Class cls, SEL sel) return class_getMethodImplementation(cls, sel); } +__attribute__((flatten)) IMP class_getMethodImplementation(Class cls, SEL sel) { IMP imp; if (!cls || !sel) return nil; - imp = lookUpImpOrNil(nil, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER); + lockdebug_assert_no_locks_locked_except({ &loadMethodLock }); + + imp = lookUpImpOrNilTryCache(nil, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER); // Translate forwarding function to C-callable external version if (!imp) { @@ -775,7 +780,7 @@ Class _calloc_class(size_t size) Class class_getSuperclass(Class cls) { if (!cls) return nil; - return cls->superclass; + return cls->getSuperclass(); } BOOL class_isMetaClass(Class cls) @@ -886,6 +891,15 @@ inform_duplicate(const char *name, Class oldCls, Class newCls) const header_info *newHeader = _headerForClass(newCls); const char *oldName = oldHeader ? oldHeader->fname() : "??"; const char *newName = newHeader ? newHeader->fname() : "??"; + const objc_duplicate_class **_dupi = NULL; + + LINKER_SET_FOREACH(_dupi, const objc_duplicate_class **, "__objc_dupclass") { + const objc_duplicate_class *dupi = *_dupi; + + if (strcmp(dupi->name, name) == 0) { + return; + } + } (DebugDuplicateClasses ? _objc_fatal : _objc_inform) ("Class %s is implemented in both %s (%p) and %s (%p). " diff --git a/runtime/objc-config.h b/runtime/objc-config.h index 9bc9fc1..cac827e 100644 --- a/runtime/objc-config.h +++ b/runtime/objc-config.h @@ -26,15 +26,6 @@ #include -// Define __OBJC2__ for the benefit of our asm files. -#ifndef __OBJC2__ -# if TARGET_OS_OSX && !TARGET_OS_IOSMAC && __i386__ - // old ABI -# else -# define __OBJC2__ 1 -# endif -#endif - // Avoid the !NDEBUG double negative. #if !NDEBUG # define DEBUG 1 @@ -51,7 +42,7 @@ #endif // Define SUPPORT_ZONES=1 to enable malloc zone support in NXHashTable. -#if !(TARGET_OS_OSX || TARGET_OS_IOSMAC) +#if !(TARGET_OS_OSX || TARGET_OS_MACCATALYST) # define SUPPORT_ZONES 0 #else # define SUPPORT_ZONES 1 @@ -73,7 +64,7 @@ // Define SUPPORT_TAGGED_POINTERS=1 to enable tagged pointer objects // Be sure to edit tagged pointer SPI in objc-internal.h as well. -#if !(__OBJC2__ && __LP64__) +#if !__LP64__ # define SUPPORT_TAGGED_POINTERS 0 #else # define SUPPORT_TAGGED_POINTERS 1 @@ -82,7 +73,7 @@ // Define SUPPORT_MSB_TAGGED_POINTERS to use the MSB // as the tagged pointer marker instead of the LSB. // Be sure to edit tagged pointer SPI in objc-internal.h as well. -#if !SUPPORT_TAGGED_POINTERS || (TARGET_OS_OSX || TARGET_OS_IOSMAC) +#if !SUPPORT_TAGGED_POINTERS || ((TARGET_OS_OSX || TARGET_OS_MACCATALYST) && __x86_64__) # define SUPPORT_MSB_TAGGED_POINTERS 0 #else # define SUPPORT_MSB_TAGGED_POINTERS 1 @@ -101,7 +92,7 @@ // Define SUPPORT_PACKED_ISA=1 on platforms that store the class in the isa // field as a maskable pointer with other data around it. #if (!__LP64__ || TARGET_OS_WIN32 || \ - (TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC)) + (TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST && !__arm64__)) # define SUPPORT_PACKED_ISA 0 #else # define SUPPORT_PACKED_ISA 1 @@ -126,7 +117,7 @@ // Define SUPPORT_ZEROCOST_EXCEPTIONS to use "zero-cost" exceptions for OBJC2. // Be sure to edit objc-exception.h as well (objc_add/removeExceptionHandler) -#if !__OBJC2__ || (defined(__arm__) && __USING_SJLJ_EXCEPTIONS__) +#if defined(__arm__) && __USING_SJLJ_EXCEPTIONS__ # define SUPPORT_ZEROCOST_EXCEPTIONS 0 #else # define SUPPORT_ZEROCOST_EXCEPTIONS 1 @@ -162,6 +153,13 @@ # define SUPPORT_MESSAGE_LOGGING 1 #endif +// Define SUPPORT_AUTORELEASEPOOL_DEDDUP_PTRS to combine consecutive pointers to the same object in autorelease pools +#if !__LP64__ +# define SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS 0 +#else +# define SUPPORT_AUTORELEASEPOOL_DEDUP_PTRS 1 +#endif + // Define HAVE_TASK_RESTARTABLE_RANGES to enable usage of // task_restartable_ranges_synchronize() #if TARGET_OS_SIMULATOR || defined(__i386__) || defined(__arm__) || !TARGET_OS_MAC @@ -178,16 +176,12 @@ // because objc-class.h is public and objc-config.h is not. //#define OBJC_INSTRUMENTED -// In __OBJC2__, the runtimeLock is a mutex always held -// hence the cache lock is redundant and can be elided. +// The runtimeLock is a mutex always held hence the cache lock is +// redundant and can be elided. // // If the runtime lock ever becomes a rwlock again, // the cache lock would need to be used again -#if __OBJC2__ #define CONFIG_USE_CACHE_LOCK 0 -#else -#define CONFIG_USE_CACHE_LOCK 1 -#endif // Determine how the method cache stores IMPs. #define CACHE_IMP_ENCODING_NONE 1 // Method cache contains raw IMP. @@ -208,13 +202,75 @@ #define CACHE_MASK_STORAGE_OUTLINED 1 #define CACHE_MASK_STORAGE_HIGH_16 2 #define CACHE_MASK_STORAGE_LOW_4 3 +#define CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS 4 #if defined(__arm64__) && __LP64__ +#if TARGET_OS_OSX || TARGET_OS_SIMULATOR +#define CACHE_MASK_STORAGE CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS +#else #define CACHE_MASK_STORAGE CACHE_MASK_STORAGE_HIGH_16 +#endif #elif defined(__arm64__) && !__LP64__ #define CACHE_MASK_STORAGE CACHE_MASK_STORAGE_LOW_4 #else #define CACHE_MASK_STORAGE CACHE_MASK_STORAGE_OUTLINED #endif +// Constants used for signing/authing isas. This doesn't quite belong +// here, but the asm files can't import other headers. +#define ISA_SIGNING_DISCRIMINATOR 0x6AE1 +#define ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS 0xB5AB + +#define ISA_SIGNING_KEY ptrauth_key_process_independent_data + +// ISA signing authentication modes. Set ISA_SIGNING_AUTH_MODE to one +// of these to choose how ISAs are authenticated. +#define ISA_SIGNING_STRIP 1 // Strip the signature whenever reading an ISA. +#define ISA_SIGNING_AUTH 2 // Authenticate the signature on all ISAs. + + +// ISA signing modes. Set ISA_SIGNING_SIGN_MODE to one of these to +// choose how ISAs are signed. +#define ISA_SIGNING_SIGN_NONE 1 // Sign no ISAs. +#define ISA_SIGNING_SIGN_ONLY_SWIFT 2 // Only sign ISAs of Swift objects. +#define ISA_SIGNING_SIGN_ALL 3 // Sign all ISAs. + +#if __has_feature(ptrauth_objc_isa_strips) || __has_feature(ptrauth_objc_isa_signs) || __has_feature(ptrauth_objc_isa_authenticates) +# if __has_feature(ptrauth_objc_isa_authenticates) +# define ISA_SIGNING_AUTH_MODE ISA_SIGNING_AUTH +# else +# define ISA_SIGNING_AUTH_MODE ISA_SIGNING_STRIP +# endif +# if __has_feature(ptrauth_objc_isa_signs) +# define ISA_SIGNING_SIGN_MODE ISA_SIGNING_SIGN_ALL +# else +# define ISA_SIGNING_SIGN_MODE ISA_SIGNING_SIGN_NONE +# endif +#else +# if __has_feature(ptrauth_objc_isa) +# define ISA_SIGNING_AUTH_MODE ISA_SIGNING_AUTH +# define ISA_SIGNING_SIGN_MODE ISA_SIGNING_SIGN_ALL +# else +# define ISA_SIGNING_AUTH_MODE ISA_SIGNING_STRIP +# define ISA_SIGNING_SIGN_MODE ISA_SIGNING_SIGN_NONE +# endif +#endif + +// When set, an unsigned superclass pointer is treated as Nil, which +// will treat the class as if its superclass was weakly linked and +// not loaded, and cause uses of the class to resolve to Nil. +#define SUPERCLASS_SIGNING_TREAT_UNSIGNED_AS_NIL 0 + +#if defined(__arm64__) && TARGET_OS_IOS && !TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST +#define CONFIG_USE_PREOPT_CACHES 1 +#else +#define CONFIG_USE_PREOPT_CACHES 0 +#endif + +// When set to 1, small methods in the shared cache have a direct +// offset to a selector. When set to 0, small methods in the shared +// cache have the same format as other small methods, with an offset +// to a selref. +#define CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS 1 + #endif diff --git a/runtime/objc-env.h b/runtime/objc-env.h index ccdceb6..7083b3e 100644 --- a/runtime/objc-env.h +++ b/runtime/objc-env.h @@ -36,6 +36,7 @@ OPTION( DebugMissingPools, OBJC_DEBUG_MISSING_POOLS, "warn about a OPTION( DebugPoolAllocation, OBJC_DEBUG_POOL_ALLOCATION, "halt when autorelease pools are popped out of order, and allow heap debuggers to track autorelease pools") OPTION( DebugDuplicateClasses, OBJC_DEBUG_DUPLICATE_CLASSES, "halt when multiple classes with the same name are present") OPTION( DebugDontCrash, OBJC_DEBUG_DONT_CRASH, "halt the process by exiting instead of crashing") +OPTION( DebugPoolDepth, OBJC_DEBUG_POOL_DEPTH, "log fault when at least a set number of autorelease pages has been allocated") OPTION( DisableVtables, OBJC_DISABLE_VTABLES, "disable vtable dispatch") OPTION( DisablePreopt, OBJC_DISABLE_PREOPTIMIZATION, "disable preoptimization courtesy of dyld shared cache") @@ -43,3 +44,7 @@ OPTION( DisableTaggedPointers, OBJC_DISABLE_TAGGED_POINTERS, "disable tagg OPTION( DisableTaggedPointerObfuscation, OBJC_DISABLE_TAG_OBFUSCATION, "disable obfuscation of tagged pointers") OPTION( DisableNonpointerIsa, OBJC_DISABLE_NONPOINTER_ISA, "disable non-pointer isa fields") OPTION( DisableInitializeForkSafety, OBJC_DISABLE_INITIALIZE_FORK_SAFETY, "disable safety checks for +initialize after fork") +OPTION( DisableFaults, OBJC_DISABLE_FAULTS, "disable os faults") +OPTION( DisablePreoptCaches, OBJC_DISABLE_PREOPTIMIZED_CACHES, "disable preoptimized caches") +OPTION( DisableAutoreleaseCoalescing, OBJC_DISABLE_AUTORELEASE_COALESCING, "disable coalescing of autorelease pool pointers") +OPTION( DisableAutoreleaseCoalescingLRU, OBJC_DISABLE_AUTORELEASE_COALESCING_LRU, "disable coalescing of autorelease pool pointers using look back N strategy") diff --git a/runtime/objc-exception.mm b/runtime/objc-exception.mm index 6c318c6..2b794e6 100644 --- a/runtime/objc-exception.mm +++ b/runtime/objc-exception.mm @@ -440,7 +440,7 @@ static int _objc_default_exception_matcher(Class catch_cls, id exception) Class cls; for (cls = exception->getIsa(); cls != nil; - cls = cls->superclass) + cls = cls->getSuperclass()) { if (cls == catch_cls) return 1; } diff --git a/runtime/objc-file.h b/runtime/objc-file.h index 3dc54c7..597fd3b 100644 --- a/runtime/objc-file.h +++ b/runtime/objc-file.h @@ -54,6 +54,10 @@ struct UnsignedInitializer { private: uintptr_t storage; public: + UnsignedInitializer(uint32_t offset) { + storage = (uintptr_t)&_mh_dylib_header + offset; + } + void operator () () const { using Initializer = void(*)(); Initializer init = @@ -70,6 +74,7 @@ extern category_t * const *_getObjc2CategoryList(const headerType *mhdr, size_t extern category_t * const *_getObjc2CategoryList2(const headerType *mhdr, size_t *count); extern category_t * const *_getObjc2NonlazyCategoryList(const headerType *mhdr, size_t *count); extern UnsignedInitializer *getLibobjcInitializers(const headerType *mhdr, size_t *count); +extern uint32_t *getLibobjcInitializerOffsets(const headerType *hi, size_t *count); static inline void foreach_data_segment(const headerType *mhdr, @@ -89,11 +94,12 @@ foreach_data_segment(const headerType *mhdr, seg = (const segmentType *)((char *)seg + seg->cmdsize); } - // enumerate __DATA* segments + // enumerate __DATA* and __AUTH* segments seg = (const segmentType *) (mhdr + 1); for (unsigned long i = 0; i < mhdr->ncmds; i++) { if (seg->cmd == SEGMENT_CMD && - segnameStartsWith(seg->segname, "__DATA")) + (segnameStartsWith(seg->segname, "__DATA") || + segnameStartsWith(seg->segname, "__AUTH"))) { code(seg, slide); } diff --git a/runtime/objc-file.mm b/runtime/objc-file.mm index ffde2fd..c7ff5ca 100644 --- a/runtime/objc-file.mm +++ b/runtime/objc-file.mm @@ -68,6 +68,12 @@ GETSECT(_getObjc2ProtocolList, protocol_t * const, "__objc_protolist") GETSECT(_getObjc2ProtocolRefs, protocol_t *, "__objc_protorefs"); GETSECT(getLibobjcInitializers, UnsignedInitializer, "__objc_init_func"); +uint32_t *getLibobjcInitializerOffsets(const headerType *mhdr, size_t *outCount) { + unsigned long byteCount = 0; + uint32_t *offsets = (uint32_t *)getsectiondata(mhdr, "__TEXT", "__objc_init_offs", &byteCount); + if (outCount) *outCount = byteCount / sizeof(uint32_t); + return offsets; +} objc_image_info * _getObjcImageInfo(const headerType *mhdr, size_t *outBytes) diff --git a/runtime/objc-gdb.h b/runtime/objc-gdb.h index 9cab4a3..99cff42 100644 --- a/runtime/objc-gdb.h +++ b/runtime/objc-gdb.h @@ -219,6 +219,10 @@ OBJC_EXPORT uintptr_t objc_debug_taggedpointer_mask OBJC_EXPORT uintptr_t objc_debug_taggedpointer_obfuscator OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0); +#if OBJC_SPLIT_TAGGED_POINTERS +OBJC_EXPORT uint8_t objc_debug_tag60_permutations[8]; +#endif + // tag_slot = (obj >> slot_shift) & slot_mask OBJC_EXPORT unsigned int objc_debug_taggedpointer_slot_shift @@ -266,6 +270,9 @@ OBJC_EXPORT unsigned int objc_debug_taggedpointer_ext_payload_lshift OBJC_EXPORT unsigned int objc_debug_taggedpointer_ext_payload_rshift OBJC_AVAILABLE(10.12, 10.0, 10.0, 3.0, 2.0); +OBJC_EXPORT uintptr_t objc_debug_constant_cfstring_tag_bits + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 6.0); + #endif @@ -289,6 +296,9 @@ OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_parent_offset OBJC_AVA OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_child_offset OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 5.0); OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_depth_offset OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 5.0); OBJC_EXTERN const uint32_t objc_debug_autoreleasepoolpage_hiwat_offset OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 5.0); +#if __OBJC2__ +OBJC_EXTERN const uintptr_t objc_debug_autoreleasepoolpage_ptr_mask OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 6.0); +#endif __END_DECLS diff --git a/runtime/objc-initialize.mm b/runtime/objc-initialize.mm index 4713325..8f98cbd 100644 --- a/runtime/objc-initialize.mm +++ b/runtime/objc-initialize.mm @@ -396,10 +396,10 @@ static bool classHasTrivialInitialize(Class cls) { if (cls->isRootClass() || cls->isRootMetaclass()) return true; - Class rootCls = cls->ISA()->ISA()->superclass; + Class rootCls = cls->ISA()->ISA()->getSuperclass(); - IMP rootImp = lookUpImpOrNil(rootCls, @selector(initialize), rootCls->ISA()); - IMP imp = lookUpImpOrNil(cls, @selector(initialize), cls->ISA()); + IMP rootImp = lookUpImpOrNilTryCache(rootCls, @selector(initialize), rootCls->ISA()); + IMP imp = lookUpImpOrNilTryCache(cls, @selector(initialize), cls->ISA()); return (imp == nil || imp == (IMP)&objc_noop_imp || imp == rootImp); } @@ -500,7 +500,7 @@ void initializeNonMetaClass(Class cls) // Make sure super is done initializing BEFORE beginning to initialize cls. // See note about deadlock above. - supercls = cls->superclass; + supercls = cls->getSuperclass(); if (supercls && !supercls->isInitialized()) { initializeNonMetaClass(supercls); } diff --git a/runtime/objc-internal.h b/runtime/objc-internal.h index 112804d..ad40a1c 100644 --- a/runtime/objc-internal.h +++ b/runtime/objc-internal.h @@ -44,6 +44,11 @@ #include #include +// Include NSObject.h only if we're ObjC. Module imports get unhappy +// otherwise. +#if __OBJC__ +#include +#endif // Termination reasons in the OS_REASON_OBJC namespace. #define OBJC_EXIT_REASON_UNSPECIFIED 1 @@ -54,6 +59,18 @@ // The runtime's class structure will never grow beyond this. #define OBJC_MAX_CLASS_SIZE (32*sizeof(void*)) +// Private objc_setAssociatedObject policy modifier. When an object is +// destroyed, associated objects attached to that object that are marked with +// this will be released after all associated objects not so marked. +// +// In addition, such associations are not removed when calling +// objc_removeAssociatedObjects. +// +// NOTE: This should be used sparingly. Performance will be poor when a single +// object has more than a few (deliberately vague) associated objects marked +// with this flag. If you're not sure if you should use this, you should not use +// this! +#define _OBJC_ASSOCIATION_SYSTEM_OBJECT (1 << 16) __BEGIN_DECLS @@ -160,8 +177,14 @@ OBJC_EXPORT objc_imp_cache_entry *_Nullable class_copyImpCache(Class _Nonnull cls, int * _Nullable outCount) OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 5.0); + +OBJC_EXPORT +unsigned long +sel_hash(SEL _Nullable sel) + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 6.0); #endif + // Plainly-implemented GC barriers. Rosetta used to use these. OBJC_EXPORT id _Nullable objc_assign_strongCast_generic(id _Nullable value, id _Nullable * _Nonnull dest) @@ -199,7 +222,7 @@ OBJC_EXPORT void _objc_setClassLoader(BOOL (* _Nonnull newClassLoader)(const char * _Nonnull)) OBJC2_UNAVAILABLE; -#if !(TARGET_OS_OSX && !TARGET_OS_IOSMAC && __i386__) +#if !(TARGET_OS_OSX && !TARGET_OS_MACCATALYST && __i386__) // Add a class copy fixup handler. The name is a misnomer, as // multiple calls will install multiple handlers. Older versions // of the Swift runtime call it by name, and it's only used by Swift @@ -240,6 +263,21 @@ objc_copyClassNamesForImageHeader(const struct mach_header * _Nonnull mh, unsigned int * _Nullable outCount) OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0); +/** + * Returns the all the classes within a library. + * + * @param image The mach header for library or framework you are inquiring about. + * @param outCount The number of class names returned. + * + * @return An array of Class objects + */ + +OBJC_EXPORT Class _Nonnull * _Nullable +objc_copyClassesForImage(const char * _Nonnull image, + unsigned int * _Nullable outCount) + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 4.0); + + // Tagged pointer objects. #if __LP64__ @@ -290,10 +328,20 @@ enum OBJC_TAG_NSMethodSignature = 20, OBJC_TAG_UTTypeRecord = 21, + // When using the split tagged pointer representation + // (OBJC_SPLIT_TAGGED_POINTERS), this is the first tag where + // the tag and payload are unobfuscated. All tags from here to + // OBJC_TAG_Last52BitPayload are unobfuscated. The shared cache + // builder is able to construct these as long as the low bit is + // not set (i.e. even-numbered tags). + OBJC_TAG_FirstUnobfuscatedSplitTag = 136, // 128 + 8, first ext tag with high bit set + + OBJC_TAG_Constant_CFString = 136, + OBJC_TAG_First60BitPayload = 0, OBJC_TAG_Last60BitPayload = 6, OBJC_TAG_First52BitPayload = 8, - OBJC_TAG_Last52BitPayload = 263, + OBJC_TAG_Last52BitPayload = 263, OBJC_TAG_RESERVED_264 = 264 }; @@ -352,7 +400,16 @@ _objc_getTaggedPointerSignedValue(const void * _Nullable ptr); // Don't use the values below. Use the declarations above. -#if (TARGET_OS_OSX || TARGET_OS_IOSMAC) && __x86_64__ +#if __arm64__ +// ARM64 uses a new tagged pointer scheme where normal tags are in +// the low bits, extended tags are in the high bits, and half of the +// extended tag space is reserved for unobfuscated payloads. +# define OBJC_SPLIT_TAGGED_POINTERS 1 +#else +# define OBJC_SPLIT_TAGGED_POINTERS 0 +#endif + +#if (TARGET_OS_OSX || TARGET_OS_MACCATALYST) && __x86_64__ // 64-bit Mac - tag bit is LSB # define OBJC_MSB_TAGGED_POINTERS 0 #else @@ -360,17 +417,37 @@ _objc_getTaggedPointerSignedValue(const void * _Nullable ptr); # define OBJC_MSB_TAGGED_POINTERS 1 #endif -#define _OBJC_TAG_INDEX_MASK 0x7 +#define _OBJC_TAG_INDEX_MASK 0x7UL + +#if OBJC_SPLIT_TAGGED_POINTERS +#define _OBJC_TAG_SLOT_COUNT 8 +#define _OBJC_TAG_SLOT_MASK 0x7UL +#else // array slot includes the tag bit itself #define _OBJC_TAG_SLOT_COUNT 16 -#define _OBJC_TAG_SLOT_MASK 0xf +#define _OBJC_TAG_SLOT_MASK 0xfUL +#endif #define _OBJC_TAG_EXT_INDEX_MASK 0xff // array slot has no extra bits #define _OBJC_TAG_EXT_SLOT_COUNT 256 #define _OBJC_TAG_EXT_SLOT_MASK 0xff -#if OBJC_MSB_TAGGED_POINTERS +#if OBJC_SPLIT_TAGGED_POINTERS +# define _OBJC_TAG_MASK (1UL<<63) +# define _OBJC_TAG_INDEX_SHIFT 0 +# define _OBJC_TAG_SLOT_SHIFT 0 +# define _OBJC_TAG_PAYLOAD_LSHIFT 1 +# define _OBJC_TAG_PAYLOAD_RSHIFT 4 +# define _OBJC_TAG_EXT_MASK (_OBJC_TAG_MASK | 0x7UL) +# define _OBJC_TAG_NO_OBFUSCATION_MASK ((1UL<<62) | _OBJC_TAG_EXT_MASK) +# define _OBJC_TAG_CONSTANT_POINTER_MASK \ + ~(_OBJC_TAG_EXT_MASK | ((uintptr_t)_OBJC_TAG_EXT_SLOT_MASK << _OBJC_TAG_EXT_SLOT_SHIFT)) +# define _OBJC_TAG_EXT_INDEX_SHIFT 55 +# define _OBJC_TAG_EXT_SLOT_SHIFT 55 +# define _OBJC_TAG_EXT_PAYLOAD_LSHIFT 9 +# define _OBJC_TAG_EXT_PAYLOAD_RSHIFT 12 +#elif OBJC_MSB_TAGGED_POINTERS # define _OBJC_TAG_MASK (1UL<<63) # define _OBJC_TAG_INDEX_SHIFT 60 # define _OBJC_TAG_SLOT_SHIFT 60 @@ -394,21 +471,64 @@ _objc_getTaggedPointerSignedValue(const void * _Nullable ptr); # define _OBJC_TAG_EXT_PAYLOAD_RSHIFT 12 #endif +// Map of tags to obfuscated tags. extern uintptr_t objc_debug_taggedpointer_obfuscator; +#if OBJC_SPLIT_TAGGED_POINTERS +extern uint8_t objc_debug_tag60_permutations[8]; + +static inline uintptr_t _objc_basicTagToObfuscatedTag(uintptr_t tag) { + return objc_debug_tag60_permutations[tag]; +} + +static inline uintptr_t _objc_obfuscatedTagToBasicTag(uintptr_t tag) { + for (unsigned i = 0; i < 7; i++) + if (objc_debug_tag60_permutations[i] == tag) + return i; + return 7; +} +#endif + static inline void * _Nonnull _objc_encodeTaggedPointer(uintptr_t ptr) { - return (void *)(objc_debug_taggedpointer_obfuscator ^ ptr); + uintptr_t value = (objc_debug_taggedpointer_obfuscator ^ ptr); +#if OBJC_SPLIT_TAGGED_POINTERS + if ((value & _OBJC_TAG_NO_OBFUSCATION_MASK) == _OBJC_TAG_NO_OBFUSCATION_MASK) + return (void *)ptr; + uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK; + uintptr_t permutedTag = _objc_basicTagToObfuscatedTag(basicTag); + value &= ~(_OBJC_TAG_INDEX_MASK << _OBJC_TAG_INDEX_SHIFT); + value |= permutedTag << _OBJC_TAG_INDEX_SHIFT; +#endif + return (void *)value; +} + +static inline uintptr_t +_objc_decodeTaggedPointer_noPermute(const void * _Nullable ptr) +{ + uintptr_t value = (uintptr_t)ptr; +#if OBJC_SPLIT_TAGGED_POINTERS + if ((value & _OBJC_TAG_NO_OBFUSCATION_MASK) == _OBJC_TAG_NO_OBFUSCATION_MASK) + return value; +#endif + return value ^ objc_debug_taggedpointer_obfuscator; } static inline uintptr_t _objc_decodeTaggedPointer(const void * _Nullable ptr) { - return (uintptr_t)ptr ^ objc_debug_taggedpointer_obfuscator; + uintptr_t value = _objc_decodeTaggedPointer_noPermute(ptr); +#if OBJC_SPLIT_TAGGED_POINTERS + uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK; + + value &= ~(_OBJC_TAG_INDEX_MASK << _OBJC_TAG_INDEX_SHIFT); + value |= _objc_obfuscatedTagToBasicTag(basicTag) << _OBJC_TAG_INDEX_SHIFT; +#endif + return value; } -static inline bool +static inline bool _objc_taggedPointersEnabled(void) { extern uintptr_t objc_debug_taggedpointer_mask; @@ -447,6 +567,15 @@ _objc_isTaggedPointer(const void * _Nullable ptr) return ((uintptr_t)ptr & _OBJC_TAG_MASK) == _OBJC_TAG_MASK; } +static inline bool +_objc_isTaggedPointerOrNil(const void * _Nullable ptr) +{ + // this function is here so that clang can turn this into + // a comparison with NULL when this is appropriate + // it turns out it's not able to in many cases without this + return !ptr || ((uintptr_t)ptr & _OBJC_TAG_MASK) == _OBJC_TAG_MASK; +} + static inline objc_tag_index_t _objc_getTaggedPointerTag(const void * _Nullable ptr) { @@ -465,7 +594,7 @@ static inline uintptr_t _objc_getTaggedPointerValue(const void * _Nullable ptr) { // ASSERT(_objc_isTaggedPointer(ptr)); - uintptr_t value = _objc_decodeTaggedPointer(ptr); + uintptr_t value = _objc_decodeTaggedPointer_noPermute(ptr); uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK; if (basicTag == _OBJC_TAG_INDEX_MASK) { return (value << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT; @@ -478,7 +607,7 @@ static inline intptr_t _objc_getTaggedPointerSignedValue(const void * _Nullable ptr) { // ASSERT(_objc_isTaggedPointer(ptr)); - uintptr_t value = _objc_decodeTaggedPointer(ptr); + uintptr_t value = _objc_decodeTaggedPointer_noPermute(ptr); uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK; if (basicTag == _OBJC_TAG_INDEX_MASK) { return ((intptr_t)value << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT; @@ -487,6 +616,13 @@ _objc_getTaggedPointerSignedValue(const void * _Nullable ptr) } } +# if OBJC_SPLIT_TAGGED_POINTERS +static inline void * _Nullable +_objc_getTaggedPointerRawPointerValue(const void * _Nullable ptr) { + return (void *)((uintptr_t)ptr & _OBJC_TAG_CONSTANT_POINTER_MASK); +} +# endif + // OBJC_HAVE_TAGGED_POINTERS #endif @@ -597,6 +733,11 @@ _class_getIvarMemoryManagement(Class _Nullable cls, Ivar _Nonnull ivar) OBJC_EXPORT BOOL _class_isFutureClass(Class _Nullable cls) OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0); +/// Returns true if the class is an ABI stable Swift class. (Despite +/// the name, this does NOT return true for Swift classes built with +/// Swift versions prior to 5.0.) +OBJC_EXPORT BOOL _class_isSwift(Class _Nullable cls) + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 5.0); // API to only be called by root classes like NSObject or NSProxy @@ -878,12 +1019,47 @@ typedef void (*_objc_func_willInitializeClass)(void * _Nullable context, Class _ OBJC_EXPORT void _objc_addWillInitializeClassFunc(_objc_func_willInitializeClass _Nonnull func, void * _Nullable context) OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 4.0); +// Replicate the conditionals in objc-config.h for packed isa, indexed isa, and preopt caches +#if __ARM_ARCH_7K__ >= 2 || (__arm64__ && !__LP64__) || \ + !(!__LP64__ || TARGET_OS_WIN32 || \ + (TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST && !__arm64__)) +OBJC_EXPORT const uintptr_t _objc_has_weak_formation_callout; +#define OBJC_WEAK_FORMATION_CALLOUT_DEFINED 1 +#else +#define OBJC_WEAK_FORMATION_CALLOUT_DEFINED 0 +#endif + +#if defined(__arm64__) && TARGET_OS_IOS && !TARGET_OS_SIMULATOR && !TARGET_OS_MACCATALYST +#define CONFIG_USE_PREOPT_CACHES 1 +#else +#define CONFIG_USE_PREOPT_CACHES 0 +#endif + + +#if __OBJC2__ +// Helper function for objc4 tests only! Do not call this yourself +// for any reason ever. +OBJC_EXPORT void _method_setImplementationRawUnsafe(Method _Nonnull m, IMP _Nonnull imp) + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 5.0); +#endif + // API to only be called by classes that provide their own reference count storage OBJC_EXPORT void _objc_deallocOnMainThreadHelper(void * _Nullable context) OBJC_AVAILABLE(10.7, 5.0, 9.0, 1.0, 2.0); +#if __OBJC__ +// Declarations for internal methods used for custom weak reference +// implementations. These declarations ensure that the compiler knows +// to exclude these methods from NS_DIRECT_MEMBERS. Do NOT implement +// these methods unless you really know what you're doing. +@interface NSObject () +- (BOOL)_tryRetain; +- (BOOL)_isDeallocating; +@end +#endif + // On async versus sync deallocation and the _dealloc2main flag // // Theory: @@ -983,6 +1159,25 @@ typedef enum { #define _OBJC_SUPPORTED_INLINE_REFCNT(_rc_ivar) _OBJC_SUPPORTED_INLINE_REFCNT_LOGIC(_rc_ivar, 0) #define _OBJC_SUPPORTED_INLINE_REFCNT_WITH_DEALLOC2MAIN(_rc_ivar) _OBJC_SUPPORTED_INLINE_REFCNT_LOGIC(_rc_ivar, 1) + +// C cache_t wrappers for objcdt and the IMP caches test tool +struct cache_t; +struct bucket_t; +struct preopt_cache_t; +OBJC_EXPORT struct bucket_t * _Nonnull objc_cache_buckets(const struct cache_t * _Nonnull cache); +OBJC_EXPORT size_t objc_cache_bytesForCapacity(uint32_t cap); +OBJC_EXPORT uint32_t objc_cache_occupied(const struct cache_t * _Nonnull cache); +OBJC_EXPORT unsigned objc_cache_capacity(const struct cache_t * _Nonnull cache); + +#if CONFIG_USE_PREOPT_CACHES + +OBJC_EXPORT bool objc_cache_isConstantOptimizedCache(const struct cache_t * _Nonnull cache, bool strict, uintptr_t empty_addr); +OBJC_EXPORT unsigned objc_cache_preoptCapacity(const struct cache_t * _Nonnull cache); +OBJC_EXPORT Class _Nonnull objc_cache_preoptFallbackClass(const struct cache_t * _Nonnull cache); +OBJC_EXPORT const struct preopt_cache_t * _Nonnull objc_cache_preoptCache(const struct cache_t * _Nonnull cache); + +#endif + __END_DECLS #endif diff --git a/runtime/objc-lockdebug.h b/runtime/objc-lockdebug.h index a3048b1..a69ee06 100644 --- a/runtime/objc-lockdebug.h +++ b/runtime/objc-lockdebug.h @@ -24,11 +24,13 @@ #if LOCKDEBUG extern void lockdebug_assert_all_locks_locked(); extern void lockdebug_assert_no_locks_locked(); +extern void lockdebug_assert_no_locks_locked_except(std::initializer_list canBeLocked); extern void lockdebug_setInForkPrepare(bool); extern void lockdebug_lock_precedes_lock(const void *oldlock, const void *newlock); #else static constexpr inline void lockdebug_assert_all_locks_locked() { } static constexpr inline void lockdebug_assert_no_locks_locked() { } +static constexpr inline void lockdebug_assert_no_locks_locked_except(std::initializer_list canBeLocked) { }; static constexpr inline void lockdebug_setInForkPrepare(bool) { } static constexpr inline void lockdebug_lock_precedes_lock(const void *, const void *) { } #endif @@ -40,12 +42,12 @@ extern void lockdebug_mutex_unlock(mutex_tt *lock); extern void lockdebug_mutex_assert_locked(mutex_tt *lock); extern void lockdebug_mutex_assert_unlocked(mutex_tt *lock); -static constexpr inline void lockdebug_remember_mutex(mutex_tt *lock) { } -static constexpr inline void lockdebug_mutex_lock(mutex_tt *lock) { } -static constexpr inline void lockdebug_mutex_try_lock(mutex_tt *lock) { } -static constexpr inline void lockdebug_mutex_unlock(mutex_tt *lock) { } -static constexpr inline void lockdebug_mutex_assert_locked(mutex_tt *lock) { } -static constexpr inline void lockdebug_mutex_assert_unlocked(mutex_tt *lock) { } +static constexpr inline void lockdebug_remember_mutex(__unused mutex_tt *lock) { } +static constexpr inline void lockdebug_mutex_lock(__unused mutex_tt *lock) { } +static constexpr inline void lockdebug_mutex_try_lock(__unused mutex_tt *lock) { } +static constexpr inline void lockdebug_mutex_unlock(__unused mutex_tt *lock) { } +static constexpr inline void lockdebug_mutex_assert_locked(__unused mutex_tt *lock) { } +static constexpr inline void lockdebug_mutex_assert_unlocked(__unused mutex_tt *lock) { } extern void lockdebug_remember_monitor(monitor_tt *lock); @@ -55,12 +57,12 @@ extern void lockdebug_monitor_wait(monitor_tt *lock); extern void lockdebug_monitor_assert_locked(monitor_tt *lock); extern void lockdebug_monitor_assert_unlocked(monitor_tt *lock); -static constexpr inline void lockdebug_remember_monitor(monitor_tt *lock) { } -static constexpr inline void lockdebug_monitor_enter(monitor_tt *lock) { } -static constexpr inline void lockdebug_monitor_leave(monitor_tt *lock) { } -static constexpr inline void lockdebug_monitor_wait(monitor_tt *lock) { } -static constexpr inline void lockdebug_monitor_assert_locked(monitor_tt *lock) { } -static constexpr inline void lockdebug_monitor_assert_unlocked(monitor_tt *lock) {} +static constexpr inline void lockdebug_remember_monitor(__unused monitor_tt *lock) { } +static constexpr inline void lockdebug_monitor_enter(__unused monitor_tt *lock) { } +static constexpr inline void lockdebug_monitor_leave(__unused monitor_tt *lock) { } +static constexpr inline void lockdebug_monitor_wait(__unused monitor_tt *lock) { } +static constexpr inline void lockdebug_monitor_assert_locked(__unused monitor_tt *lock) { } +static constexpr inline void lockdebug_monitor_assert_unlocked(__unused monitor_tt *lock) {} extern void @@ -75,12 +77,12 @@ extern void lockdebug_recursive_mutex_assert_unlocked(recursive_mutex_tt *lock); static constexpr inline void -lockdebug_remember_recursive_mutex(recursive_mutex_tt *lock) { } +lockdebug_remember_recursive_mutex(__unused recursive_mutex_tt *lock) { } static constexpr inline void -lockdebug_recursive_mutex_lock(recursive_mutex_tt *lock) { } +lockdebug_recursive_mutex_lock(__unused recursive_mutex_tt *lock) { } static constexpr inline void -lockdebug_recursive_mutex_unlock(recursive_mutex_tt *lock) { } +lockdebug_recursive_mutex_unlock(__unused recursive_mutex_tt *lock) { } static constexpr inline void -lockdebug_recursive_mutex_assert_locked(recursive_mutex_tt *lock) { } +lockdebug_recursive_mutex_assert_locked(__unused recursive_mutex_tt *lock) { } static constexpr inline void -lockdebug_recursive_mutex_assert_unlocked(recursive_mutex_tt *lock) { } +lockdebug_recursive_mutex_assert_unlocked(__unused recursive_mutex_tt *lock) { } diff --git a/runtime/objc-lockdebug.mm b/runtime/objc-lockdebug.mm index f182a27..1429c2d 100644 --- a/runtime/objc-lockdebug.mm +++ b/runtime/objc-lockdebug.mm @@ -321,10 +321,18 @@ lockdebug_assert_all_locks_locked() void lockdebug_assert_no_locks_locked() +{ + lockdebug_assert_no_locks_locked_except({}); +} + +void lockdebug_assert_no_locks_locked_except(std::initializer_list canBeLocked) { auto& owned = ownedLocks(); for (const auto& l : AllLocks()) { + if (std::find(canBeLocked.begin(), canBeLocked.end(), l.first) != canBeLocked.end()) + continue; + if (hasLock(owned, l.first, l.second.k)) { _objc_fatal("lock %p:%d is incorrectly owned", l.first, l.second.k); } diff --git a/runtime/objc-object.h b/runtime/objc-object.h index 2c17c94..d15d5a8 100644 --- a/runtime/objc-object.h +++ b/runtime/objc-object.h @@ -73,7 +73,7 @@ objc_object::isClass() #if SUPPORT_TAGGED_POINTERS -inline Class +inline Class objc_object::getIsa() { if (fastpath(!isTaggedPointer())) return ISA(); @@ -103,6 +103,12 @@ objc_object::isTaggedPointer() return _objc_isTaggedPointer(this); } +inline bool +objc_object::isTaggedPointerOrNil() +{ + return _objc_isTaggedPointerOrNil(this); +} + inline bool objc_object::isBasicTaggedPointer() { @@ -121,8 +127,7 @@ objc_object::isExtTaggedPointer() #else // not SUPPORT_TAGGED_POINTERS - -inline Class +inline Class objc_object::getIsa() { return ISA(); @@ -141,6 +146,12 @@ objc_object::isTaggedPointer() return false; } +inline bool +objc_object::isTaggedPointerOrNil() +{ + return !this; +} + inline bool objc_object::isBasicTaggedPointer() { @@ -160,21 +171,118 @@ objc_object::isExtTaggedPointer() #if SUPPORT_NONPOINTER_ISA -inline Class -objc_object::ISA() -{ - ASSERT(!isTaggedPointer()); +// Set the class field in an isa. Takes both the class to set and +// a pointer to the object where the isa will ultimately be used. +// This is necessary to get the pointer signing right. +// +// Note: this method does not support setting an indexed isa. When +// indexed isas are in use, it can only be used to set the class of a +// raw isa. +inline void +isa_t::setClass(Class newCls, UNUSED_WITHOUT_PTRAUTH objc_object *obj) +{ + // Match the conditional in isa.h. +#if __has_feature(ptrauth_calls) || TARGET_OS_SIMULATOR +# if ISA_SIGNING_SIGN_MODE == ISA_SIGNING_SIGN_NONE + // No signing, just use the raw pointer. + uintptr_t signedCls = (uintptr_t)newCls; + +# elif ISA_SIGNING_SIGN_MODE == ISA_SIGNING_SIGN_ONLY_SWIFT + // We're only signing Swift classes. Non-Swift classes just use + // the raw pointer + uintptr_t signedCls = (uintptr_t)newCls; + if (newCls->isSwiftStable()) + signedCls = (uintptr_t)ptrauth_sign_unauthenticated((void *)newCls, ISA_SIGNING_KEY, ptrauth_blend_discriminator(obj, ISA_SIGNING_DISCRIMINATOR)); + +# elif ISA_SIGNING_SIGN_MODE == ISA_SIGNING_SIGN_ALL + // We're signing everything + uintptr_t signedCls = (uintptr_t)ptrauth_sign_unauthenticated((void *)newCls, ISA_SIGNING_KEY, ptrauth_blend_discriminator(obj, ISA_SIGNING_DISCRIMINATOR)); + +# else +# error Unknown isa signing mode. +# endif + + shiftcls_and_sig = signedCls >> 3; + +#elif SUPPORT_INDEXED_ISA + // Indexed isa only uses this method to set a raw pointer class. + // Setting an indexed class is handled separately. + cls = newCls; + +#else // Nonpointer isa, no ptrauth + shiftcls = (uintptr_t)newCls >> 3; +#endif +} + +// Get the class pointer out of an isa. When ptrauth is supported, +// this operation is optionally authenticated. Many code paths don't +// need the authentication, so it can be skipped in those cases for +// better performance. +// +// Note: this method does not support retrieving indexed isas. When +// indexed isas are in use, it can only be used to retrieve the class +// of a raw isa. +#if SUPPORT_INDEXED_ISA || (ISA_SIGNING_AUTH_MODE != ISA_SIGNING_AUTH) +#define MAYBE_UNUSED_AUTHENTICATED_PARAM __attribute__((unused)) +#else +#define MAYBE_UNUSED_AUTHENTICATED_PARAM UNUSED_WITHOUT_PTRAUTH +#endif + +inline Class +isa_t::getClass(MAYBE_UNUSED_AUTHENTICATED_PARAM bool authenticated) { #if SUPPORT_INDEXED_ISA - if (isa.nonpointer) { - uintptr_t slot = isa.indexcls; - return classForIndex((unsigned)slot); + return cls; +#else + + uintptr_t clsbits = bits; + +# if __has_feature(ptrauth_calls) +# if ISA_SIGNING_AUTH_MODE == ISA_SIGNING_AUTH + // Most callers aren't security critical, so skip the + // authentication unless they ask for it. Message sending and + // cache filling are protected by the auth code in msgSend. + if (authenticated) { + // Mask off all bits besides the class pointer and signature. + clsbits &= ISA_MASK; + if (clsbits == 0) + return Nil; + clsbits = (uintptr_t)ptrauth_auth_data((void *)clsbits, ISA_SIGNING_KEY, ptrauth_blend_discriminator(this, ISA_SIGNING_DISCRIMINATOR)); + } else { + // If not authenticating, strip using the precomputed class mask. + clsbits &= objc_debug_isa_class_mask; } - return (Class)isa.bits; +# else + // If not authenticating, strip using the precomputed class mask. + clsbits &= objc_debug_isa_class_mask; +# endif + +# else + clsbits &= ISA_MASK; +# endif + + return (Class)clsbits; +#endif +} + +inline Class +isa_t::getDecodedClass(bool authenticated) { +#if SUPPORT_INDEXED_ISA + if (nonpointer) { + return classForIndex(indexcls); + } + return (Class)cls; #else - return (Class)(isa.bits & ISA_MASK); + return getClass(authenticated); #endif } +inline Class +objc_object::ISA(bool authenticated) +{ + ASSERT(!isTaggedPointer()); + return isa.getDecodedClass(authenticated); +} + inline Class objc_object::rawISA() { @@ -220,18 +328,25 @@ objc_object::initInstanceIsa(Class cls, bool hasCxxDtor) initIsa(cls, true, hasCxxDtor); } +#if !SUPPORT_INDEXED_ISA && !ISA_HAS_CXX_DTOR_BIT +#define UNUSED_WITHOUT_INDEXED_ISA_AND_DTOR_BIT __attribute__((unused)) +#else +#define UNUSED_WITHOUT_INDEXED_ISA_AND_DTOR_BIT +#endif + inline void -objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor) +objc_object::initIsa(Class cls, bool nonpointer, UNUSED_WITHOUT_INDEXED_ISA_AND_DTOR_BIT bool hasCxxDtor) { ASSERT(!isTaggedPointer()); + isa_t newisa(0); + if (!nonpointer) { - isa = isa_t((uintptr_t)cls); + newisa.setClass(cls, this); } else { ASSERT(!DisableNonpointerIsa); ASSERT(!cls->instancesRequireRawIsa()); - isa_t newisa(0); #if SUPPORT_INDEXED_ISA ASSERT(cls->classArrayIndex() > 0); @@ -244,18 +359,21 @@ objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor) newisa.bits = ISA_MAGIC_VALUE; // isa.magic is part of ISA_MAGIC_VALUE // isa.nonpointer is part of ISA_MAGIC_VALUE +# if ISA_HAS_CXX_DTOR_BIT newisa.has_cxx_dtor = hasCxxDtor; - newisa.shiftcls = (uintptr_t)cls >> 3; +# endif + newisa.setClass(cls, this); #endif - - // This write must be performed in a single store in some cases - // (for example when realizing a class because other threads - // may simultaneously try to use the class). - // fixme use atomics here to guarantee single-store and to - // guarantee memory order w.r.t. the class index table - // ...but not too atomic because we don't want to hurt instantiation - isa = newisa; + newisa.extra_rc = 1; } + + // This write must be performed in a single store in some cases + // (for example when realizing a class because other threads + // may simultaneously try to use the class). + // fixme use atomics here to guarantee single-store and to + // guarantee memory order w.r.t. the class index table + // ...but not too atomic because we don't want to hurt instantiation + isa = newisa; } @@ -270,34 +388,46 @@ objc_object::changeIsa(Class newCls) ASSERT(!isTaggedPointer()); isa_t oldisa; - isa_t newisa; + isa_t newisa(0); bool sideTableLocked = false; bool transcribeToSideTable = false; + oldisa = LoadExclusive(&isa.bits); + do { transcribeToSideTable = false; - oldisa = LoadExclusive(&isa.bits); if ((oldisa.bits == 0 || oldisa.nonpointer) && !newCls->isFuture() && newCls->canAllocNonpointer()) { // 0 -> nonpointer // nonpointer -> nonpointer #if SUPPORT_INDEXED_ISA - if (oldisa.bits == 0) newisa.bits = ISA_INDEX_MAGIC_VALUE; - else newisa = oldisa; + if (oldisa.bits == 0) { + newisa.bits = ISA_INDEX_MAGIC_VALUE; + newisa.extra_rc = 1; + } else { + newisa = oldisa; + } // isa.magic is part of ISA_MAGIC_VALUE // isa.nonpointer is part of ISA_MAGIC_VALUE newisa.has_cxx_dtor = newCls->hasCxxDtor(); ASSERT(newCls->classArrayIndex() > 0); newisa.indexcls = (uintptr_t)newCls->classArrayIndex(); #else - if (oldisa.bits == 0) newisa.bits = ISA_MAGIC_VALUE; - else newisa = oldisa; + if (oldisa.bits == 0) { + newisa.bits = ISA_MAGIC_VALUE; + newisa.extra_rc = 1; + } + else { + newisa = oldisa; + } // isa.magic is part of ISA_MAGIC_VALUE // isa.nonpointer is part of ISA_MAGIC_VALUE +# if ISA_HAS_CXX_DTOR_BIT newisa.has_cxx_dtor = newCls->hasCxxDtor(); - newisa.shiftcls = (uintptr_t)newCls >> 3; +# endif + newisa.setClass(newCls, this); #endif } else if (oldisa.nonpointer) { @@ -308,38 +438,28 @@ objc_object::changeIsa(Class newCls) if (!sideTableLocked) sidetable_lock(); sideTableLocked = true; transcribeToSideTable = true; - newisa.cls = newCls; + newisa.setClass(newCls, this); } else { // raw pointer -> raw pointer - newisa.cls = newCls; + newisa.setClass(newCls, this); } - } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)); + } while (slowpath(!StoreExclusive(&isa.bits, &oldisa.bits, newisa.bits))); if (transcribeToSideTable) { // Copy oldisa's retain count et al to side table. // oldisa.has_assoc: nothing to do // oldisa.has_cxx_dtor: nothing to do sidetable_moveExtraRC_nolock(oldisa.extra_rc, - oldisa.deallocating, + oldisa.isDeallocating(), oldisa.weakly_referenced); } if (sideTableLocked) sidetable_unlock(); - if (oldisa.nonpointer) { -#if SUPPORT_INDEXED_ISA - return classForIndex(oldisa.indexcls); -#else - return (Class)((uintptr_t)oldisa.shiftcls << 3); -#endif - } - else { - return oldisa.cls; - } + return oldisa.getDecodedClass(false); } - inline bool objc_object::hasAssociatedObjects() { @@ -354,15 +474,22 @@ objc_object::setHasAssociatedObjects() { if (isTaggedPointer()) return; - retry: - isa_t oldisa = LoadExclusive(&isa.bits); - isa_t newisa = oldisa; - if (!newisa.nonpointer || newisa.has_assoc) { - ClearExclusive(&isa.bits); - return; + if (slowpath(!hasNonpointerIsa() && ISA()->hasCustomRR()) && !ISA()->isFuture() && !ISA()->isMetaClass()) { + void(*setAssoc)(id, SEL) = (void(*)(id, SEL)) object_getMethodImplementation((id)this, @selector(_noteAssociatedObjects)); + if ((IMP)setAssoc != _objc_msgForward) { + (*setAssoc)((id)this, @selector(_noteAssociatedObjects)); + } } - newisa.has_assoc = true; - if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry; + + isa_t newisa, oldisa = LoadExclusive(&isa.bits); + do { + newisa = oldisa; + if (!newisa.nonpointer || newisa.has_assoc) { + ClearExclusive(&isa.bits); + return; + } + newisa.has_assoc = true; + } while (slowpath(!StoreExclusive(&isa.bits, &oldisa.bits, newisa.bits))); } @@ -378,20 +505,20 @@ objc_object::isWeaklyReferenced() inline void objc_object::setWeaklyReferenced_nolock() { - retry: - isa_t oldisa = LoadExclusive(&isa.bits); - isa_t newisa = oldisa; - if (slowpath(!newisa.nonpointer)) { - ClearExclusive(&isa.bits); - sidetable_setWeaklyReferenced_nolock(); - return; - } - if (newisa.weakly_referenced) { - ClearExclusive(&isa.bits); - return; - } - newisa.weakly_referenced = true; - if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry; + isa_t newisa, oldisa = LoadExclusive(&isa.bits); + do { + newisa = oldisa; + if (slowpath(!newisa.nonpointer)) { + ClearExclusive(&isa.bits); + sidetable_setWeaklyReferenced_nolock(); + return; + } + if (newisa.weakly_referenced) { + ClearExclusive(&isa.bits); + return; + } + newisa.weakly_referenced = true; + } while (slowpath(!StoreExclusive(&isa.bits, &oldisa.bits, newisa.bits))); } @@ -399,8 +526,12 @@ inline bool objc_object::hasCxxDtor() { ASSERT(!isTaggedPointer()); - if (isa.nonpointer) return isa.has_cxx_dtor; - else return isa.cls->hasCxxDtor(); +#if ISA_HAS_CXX_DTOR_BIT + if (isa.nonpointer) + return isa.has_cxx_dtor; + else +#endif + return ISA()->hasCxxDtor(); } @@ -409,7 +540,7 @@ inline bool objc_object::rootIsDeallocating() { if (isTaggedPointer()) return false; - if (isa.nonpointer) return isa.deallocating; + if (isa.nonpointer) return isa.isDeallocating(); return sidetable_isDeallocating(); } @@ -435,10 +566,14 @@ objc_object::rootDealloc() { if (isTaggedPointer()) return; // fixme necessary? - if (fastpath(isa.nonpointer && - !isa.weakly_referenced && - !isa.has_assoc && - !isa.has_cxx_dtor && + if (fastpath(isa.nonpointer && + !isa.weakly_referenced && + !isa.has_assoc && +#if ISA_HAS_CXX_DTOR_BIT + !isa.has_cxx_dtor && +#else + !isa.getClass(false)->hasCxxDtor() && +#endif !isa.has_sidetable_rc)) { assert(!sidetable_present()); @@ -449,6 +584,8 @@ objc_object::rootDealloc() } } +extern explicit_atomic swiftRetain; +extern explicit_atomic swiftRelease; // Equivalent to calling [this retain], with shortcuts if there is no override inline id @@ -456,14 +593,9 @@ objc_object::retain() { ASSERT(!isTaggedPointer()); - if (fastpath(!ISA()->hasCustomRR())) { - return rootRetain(); - } - - return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain)); + return rootRetain(false, RRVariant::FastOrMsgSend); } - // Base retain implementation, ignoring overrides. // This does not check isa.fast_rr; if there is an RR override then // it was already called and it chose to call [super retain]. @@ -476,19 +608,19 @@ objc_object::retain() ALWAYS_INLINE id objc_object::rootRetain() { - return rootRetain(false, false); + return rootRetain(false, RRVariant::Fast); } ALWAYS_INLINE bool objc_object::rootTryRetain() { - return rootRetain(true, false) ? true : false; + return rootRetain(true, RRVariant::Fast) ? true : false; } -ALWAYS_INLINE id -objc_object::rootRetain(bool tryRetain, bool handleOverflow) +ALWAYS_INLINE id +objc_object::rootRetain(bool tryRetain, objc_object::RRVariant variant) { - if (isTaggedPointer()) return (id)this; + if (slowpath(isTaggedPointer())) return (id)this; bool sideTableLocked = false; bool transcribeToSideTable = false; @@ -496,29 +628,56 @@ objc_object::rootRetain(bool tryRetain, bool handleOverflow) isa_t oldisa; isa_t newisa; + oldisa = LoadExclusive(&isa.bits); + + if (variant == RRVariant::FastOrMsgSend) { + // These checks are only meaningful for objc_retain() + // They are here so that we avoid a re-load of the isa. + if (slowpath(oldisa.getDecodedClass(false)->hasCustomRR())) { + ClearExclusive(&isa.bits); + if (oldisa.getDecodedClass(false)->canCallSwiftRR()) { + return swiftRetain.load(memory_order_relaxed)((id)this); + } + return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain)); + } + } + + if (slowpath(!oldisa.nonpointer)) { + // a Class is a Class forever, so we can perform this check once + // outside of the CAS loop + if (oldisa.getDecodedClass(false)->isMetaClass()) { + ClearExclusive(&isa.bits); + return (id)this; + } + } + do { transcribeToSideTable = false; - oldisa = LoadExclusive(&isa.bits); newisa = oldisa; if (slowpath(!newisa.nonpointer)) { ClearExclusive(&isa.bits); - if (rawISA()->isMetaClass()) return (id)this; - if (!tryRetain && sideTableLocked) sidetable_unlock(); if (tryRetain) return sidetable_tryRetain() ? (id)this : nil; - else return sidetable_retain(); + else return sidetable_retain(sideTableLocked); } // don't check newisa.fast_rr; we already called any RR overrides - if (slowpath(tryRetain && newisa.deallocating)) { + if (slowpath(newisa.isDeallocating())) { ClearExclusive(&isa.bits); - if (!tryRetain && sideTableLocked) sidetable_unlock(); - return nil; + if (sideTableLocked) { + ASSERT(variant == RRVariant::Full); + sidetable_unlock(); + } + if (slowpath(tryRetain)) { + return nil; + } else { + return (id)this; + } } uintptr_t carry; newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++ if (slowpath(carry)) { // newisa.extra_rc++ overflowed - if (!handleOverflow) { + if (variant != RRVariant::Full) { ClearExclusive(&isa.bits); return rootRetain_overflow(tryRetain); } @@ -530,14 +689,20 @@ objc_object::rootRetain(bool tryRetain, bool handleOverflow) newisa.extra_rc = RC_HALF; newisa.has_sidetable_rc = true; } - } while (slowpath(!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits))); + } while (slowpath(!StoreExclusive(&isa.bits, &oldisa.bits, newisa.bits))); - if (slowpath(transcribeToSideTable)) { - // Copy the other half of the retain counts to the side table. - sidetable_addExtraRC_nolock(RC_HALF); + if (variant == RRVariant::Full) { + if (slowpath(transcribeToSideTable)) { + // Copy the other half of the retain counts to the side table. + sidetable_addExtraRC_nolock(RC_HALF); + } + + if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock(); + } else { + ASSERT(!transcribeToSideTable); + ASSERT(!sideTableLocked); } - if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock(); return (id)this; } @@ -548,12 +713,7 @@ objc_object::release() { ASSERT(!isTaggedPointer()); - if (fastpath(!ISA()->hasCustomRR())) { - rootRelease(); - return; - } - - ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release)); + rootRelease(true, RRVariant::FastOrMsgSend); } @@ -570,35 +730,65 @@ objc_object::release() ALWAYS_INLINE bool objc_object::rootRelease() { - return rootRelease(true, false); + return rootRelease(true, RRVariant::Fast); } ALWAYS_INLINE bool objc_object::rootReleaseShouldDealloc() { - return rootRelease(false, false); + return rootRelease(false, RRVariant::Fast); } -ALWAYS_INLINE bool -objc_object::rootRelease(bool performDealloc, bool handleUnderflow) +ALWAYS_INLINE bool +objc_object::rootRelease(bool performDealloc, objc_object::RRVariant variant) { - if (isTaggedPointer()) return false; + if (slowpath(isTaggedPointer())) return false; bool sideTableLocked = false; - isa_t oldisa; - isa_t newisa; + isa_t newisa, oldisa; + + oldisa = LoadExclusive(&isa.bits); + + if (variant == RRVariant::FastOrMsgSend) { + // These checks are only meaningful for objc_release() + // They are here so that we avoid a re-load of the isa. + if (slowpath(oldisa.getDecodedClass(false)->hasCustomRR())) { + ClearExclusive(&isa.bits); + if (oldisa.getDecodedClass(false)->canCallSwiftRR()) { + swiftRelease.load(memory_order_relaxed)((id)this); + return true; + } + ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release)); + return true; + } + } - retry: + if (slowpath(!oldisa.nonpointer)) { + // a Class is a Class forever, so we can perform this check once + // outside of the CAS loop + if (oldisa.getDecodedClass(false)->isMetaClass()) { + ClearExclusive(&isa.bits); + return false; + } + } + +retry: do { - oldisa = LoadExclusive(&isa.bits); newisa = oldisa; if (slowpath(!newisa.nonpointer)) { ClearExclusive(&isa.bits); - if (rawISA()->isMetaClass()) return false; - if (sideTableLocked) sidetable_unlock(); - return sidetable_release(performDealloc); + return sidetable_release(sideTableLocked, performDealloc); + } + if (slowpath(newisa.isDeallocating())) { + ClearExclusive(&isa.bits); + if (sideTableLocked) { + ASSERT(variant == RRVariant::Full); + sidetable_unlock(); + } + return false; } + // don't check newisa.fast_rr; we already called any RR overrides uintptr_t carry; newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc-- @@ -606,10 +796,16 @@ objc_object::rootRelease(bool performDealloc, bool handleUnderflow) // don't ClearExclusive() goto underflow; } - } while (slowpath(!StoreReleaseExclusive(&isa.bits, - oldisa.bits, newisa.bits))); + } while (slowpath(!StoreReleaseExclusive(&isa.bits, &oldisa.bits, newisa.bits))); - if (slowpath(sideTableLocked)) sidetable_unlock(); + if (slowpath(newisa.isDeallocating())) + goto deallocate; + + if (variant == RRVariant::Full) { + if (slowpath(sideTableLocked)) sidetable_unlock(); + } else { + ASSERT(!sideTableLocked); + } return false; underflow: @@ -619,7 +815,7 @@ objc_object::rootRelease(bool performDealloc, bool handleUnderflow) newisa = oldisa; if (slowpath(newisa.has_sidetable_rc)) { - if (!handleUnderflow) { + if (variant != RRVariant::Full) { ClearExclusive(&isa.bits); return rootRelease_underflow(performDealloc); } @@ -632,35 +828,37 @@ objc_object::rootRelease(bool performDealloc, bool handleUnderflow) sideTableLocked = true; // Need to start over to avoid a race against // the nonpointer -> raw pointer transition. + oldisa = LoadExclusive(&isa.bits); goto retry; } // Try to remove some retain counts from the side table. - size_t borrowed = sidetable_subExtraRC_nolock(RC_HALF); + auto borrow = sidetable_subExtraRC_nolock(RC_HALF); - // To avoid races, has_sidetable_rc must remain set - // even if the side table count is now zero. + bool emptySideTable = borrow.remaining == 0; // we'll clear the side table if no refcounts remain there - if (borrowed > 0) { + if (borrow.borrowed > 0) { // Side table retain count decreased. // Try to add them to the inline count. - newisa.extra_rc = borrowed - 1; // redo the original decrement too - bool stored = StoreReleaseExclusive(&isa.bits, - oldisa.bits, newisa.bits); - if (!stored) { + bool didTransitionToDeallocating = false; + newisa.extra_rc = borrow.borrowed - 1; // redo the original decrement too + newisa.has_sidetable_rc = !emptySideTable; + + bool stored = StoreReleaseExclusive(&isa.bits, &oldisa.bits, newisa.bits); + + if (!stored && oldisa.nonpointer) { // Inline update failed. // Try it again right now. This prevents livelock on LL/SC // architectures where the side table access itself may have // dropped the reservation. - isa_t oldisa2 = LoadExclusive(&isa.bits); - isa_t newisa2 = oldisa2; - if (newisa2.nonpointer) { - uintptr_t overflow; - newisa2.bits = - addc(newisa2.bits, RC_ONE * (borrowed-1), 0, &overflow); - if (!overflow) { - stored = StoreReleaseExclusive(&isa.bits, oldisa2.bits, - newisa2.bits); + uintptr_t overflow; + newisa.bits = + addc(oldisa.bits, RC_ONE * (borrow.borrowed-1), 0, &overflow); + newisa.has_sidetable_rc = !emptySideTable; + if (!overflow) { + stored = StoreReleaseExclusive(&isa.bits, &oldisa.bits, newisa.bits); + if (stored) { + didTransitionToDeallocating = newisa.isDeallocating(); } } } @@ -668,32 +866,31 @@ objc_object::rootRelease(bool performDealloc, bool handleUnderflow) if (!stored) { // Inline update failed. // Put the retains back in the side table. - sidetable_addExtraRC_nolock(borrowed); + ClearExclusive(&isa.bits); + sidetable_addExtraRC_nolock(borrow.borrowed); + oldisa = LoadExclusive(&isa.bits); goto retry; } // Decrement successful after borrowing from side table. - // This decrement cannot be the deallocating decrement - the side - // table lock and has_sidetable_rc bit ensure that if everyone - // else tried to -release while we worked, the last one would block. - sidetable_unlock(); - return false; + if (emptySideTable) + sidetable_clearExtraRC_nolock(); + + if (!didTransitionToDeallocating) { + if (slowpath(sideTableLocked)) sidetable_unlock(); + return false; + } } else { // Side table is empty after all. Fall-through to the dealloc path. } } +deallocate: // Really deallocate. - if (slowpath(newisa.deallocating)) { - ClearExclusive(&isa.bits); - if (sideTableLocked) sidetable_unlock(); - return overrelease_error(); - // does not actually return - } - newisa.deallocating = true; - if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry; + ASSERT(newisa.isDeallocating()); + ASSERT(isa.isDeallocating()); if (slowpath(sideTableLocked)) sidetable_unlock(); @@ -736,10 +933,9 @@ objc_object::rootRetainCount() if (isTaggedPointer()) return (uintptr_t)this; sidetable_lock(); - isa_t bits = LoadExclusive(&isa.bits); - ClearExclusive(&isa.bits); + isa_t bits = __c11_atomic_load((_Atomic uintptr_t *)&isa.bits, __ATOMIC_RELAXED); if (bits.nonpointer) { - uintptr_t rc = 1 + bits.extra_rc; + uintptr_t rc = bits.extra_rc; if (bits.has_sidetable_rc) { rc += sidetable_getExtraRC_nolock(); } @@ -756,12 +952,29 @@ objc_object::rootRetainCount() #else // not SUPPORT_NONPOINTER_ISA +inline void +isa_t::setClass(Class cls, objc_object *obj) +{ + this->cls = cls; +} + +inline Class +isa_t::getClass(bool authenticated __unused) +{ + return cls; +} + +inline Class +isa_t::getDecodedClass(bool authenticated) +{ + return getClass(authenticated); +} inline Class -objc_object::ISA() +objc_object::ISA(bool authenticated __unused) { ASSERT(!isTaggedPointer()); - return isa.cls; + return isa.getClass(/*authenticated*/false); } inline Class @@ -781,7 +994,7 @@ inline void objc_object::initIsa(Class cls) { ASSERT(!isTaggedPointer()); - isa = (uintptr_t)cls; + isa.setClass(cls, this); } @@ -822,18 +1035,17 @@ objc_object::changeIsa(Class cls) // cls->isInitializing() || cls->isInitialized()); ASSERT(!isTaggedPointer()); - - isa_t oldisa, newisa; - newisa.cls = cls; - do { - oldisa = LoadExclusive(&isa.bits); - } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)); - - if (oldisa.cls && oldisa.cls->instancesHaveAssociatedObjects()) { + + isa_t newisa, oldisa; + newisa.setClass(cls, this); + oldisa.bits = __c11_atomic_exchange((_Atomic uintptr_t *)&isa.bits, newisa.bits, __ATOMIC_RELAXED); + + Class oldcls = oldisa.getDecodedClass(/*authenticated*/false); + if (oldcls && oldcls->instancesHaveAssociatedObjects()) { cls->setInstancesHaveAssociatedObjects(); } - - return oldisa.cls; + + return oldcls; } @@ -873,7 +1085,7 @@ inline bool objc_object::hasCxxDtor() { ASSERT(!isTaggedPointer()); - return isa.cls->hasCxxDtor(); + return isa.getClass(/*authenticated*/false)->hasCxxDtor(); } @@ -949,14 +1161,14 @@ inline bool objc_object::rootRelease() { if (isTaggedPointer()) return false; - return sidetable_release(true); + return sidetable_release(); } inline bool objc_object::rootReleaseShouldDealloc() { if (isTaggedPointer()) return false; - return sidetable_release(false); + return sidetable_release(/*locked*/false, /*performDealloc*/false); } diff --git a/runtime/objc-opt.mm b/runtime/objc-opt.mm index b21869b..44abbdf 100644 --- a/runtime/objc-opt.mm +++ b/runtime/objc-opt.mm @@ -515,7 +515,7 @@ void preopt_init(void) const uintptr_t start = (uintptr_t)_dyld_get_shared_cache_range(&length); if (start) { - objc::dataSegmentsRanges.add(start, start + length); + objc::dataSegmentsRanges.setSharedCacheRange(start, start + length); } // `opt` not set at compile time in order to detect too-early usage diff --git a/runtime/objc-os.h b/runtime/objc-os.h index 5a06252..6e38e0e 100644 --- a/runtime/objc-os.h +++ b/runtime/objc-os.h @@ -93,6 +93,16 @@ struct explicit_atomic : public std::atomic { } }; +namespace objc { +static inline uintptr_t mask16ShiftBits(uint16_t mask) +{ + // returns by how much 0xffff must be shifted "right" to return mask + uintptr_t maskShift = __builtin_clz(mask) - 16; + ASSERT((0xffff >> maskShift) == mask); + return maskShift; +} +} + #if TARGET_OS_MAC # define OS_UNFAIR_LOCK_INLINE 1 @@ -175,17 +185,25 @@ LoadExclusive(uintptr_t *src) static ALWAYS_INLINE bool -StoreExclusive(uintptr_t *dst, uintptr_t oldvalue __unused, uintptr_t value) +StoreExclusive(uintptr_t *dst, uintptr_t *oldvalue, uintptr_t value) { - return !__builtin_arm_strex(value, dst); + if (slowpath(__builtin_arm_strex(value, dst))) { + *oldvalue = LoadExclusive(dst); + return false; + } + return true; } static ALWAYS_INLINE bool -StoreReleaseExclusive(uintptr_t *dst, uintptr_t oldvalue __unused, uintptr_t value) +StoreReleaseExclusive(uintptr_t *dst, uintptr_t *oldvalue, uintptr_t value) { - return !__builtin_arm_stlex(value, dst); + if (slowpath(__builtin_arm_stlex(value, dst))) { + *oldvalue = LoadExclusive(dst); + return false; + } + return true; } static ALWAYS_INLINE @@ -206,17 +224,17 @@ LoadExclusive(uintptr_t *src) static ALWAYS_INLINE bool -StoreExclusive(uintptr_t *dst, uintptr_t oldvalue, uintptr_t value) +StoreExclusive(uintptr_t *dst, uintptr_t *oldvalue, uintptr_t value) { - return __c11_atomic_compare_exchange_weak((_Atomic(uintptr_t) *)dst, &oldvalue, value, __ATOMIC_RELAXED, __ATOMIC_RELAXED); + return __c11_atomic_compare_exchange_weak((_Atomic(uintptr_t) *)dst, oldvalue, value, __ATOMIC_RELAXED, __ATOMIC_RELAXED); } static ALWAYS_INLINE bool -StoreReleaseExclusive(uintptr_t *dst, uintptr_t oldvalue, uintptr_t value) +StoreReleaseExclusive(uintptr_t *dst, uintptr_t *oldvalue, uintptr_t value) { - return __c11_atomic_compare_exchange_weak((_Atomic(uintptr_t) *)dst, &oldvalue, value, __ATOMIC_RELEASE, __ATOMIC_RELAXED); + return __c11_atomic_compare_exchange_weak((_Atomic(uintptr_t) *)dst, oldvalue, value, __ATOMIC_RELEASE, __ATOMIC_RELAXED); } static ALWAYS_INLINE @@ -726,7 +744,7 @@ class mutex_tt : nocopy_t { lockdebug_remember_mutex(this); } - constexpr mutex_tt(const fork_unsafe_lock_t unsafe) : mLock(OS_UNFAIR_LOCK_INIT) { } + constexpr mutex_tt(__unused const fork_unsafe_lock_t unsafe) : mLock(OS_UNFAIR_LOCK_INIT) { } void lock() { lockdebug_mutex_lock(this); @@ -762,7 +780,7 @@ class mutex_tt : nocopy_t { // Address-ordered lock discipline for a pair of locks. static void lockTwo(mutex_tt *lock1, mutex_tt *lock2) { - if (lock1 < lock2) { + if ((uintptr_t)lock1 < (uintptr_t)lock2) { lock1->lock(); lock2->lock(); } else { @@ -812,7 +830,7 @@ class recursive_mutex_tt : nocopy_t { lockdebug_remember_recursive_mutex(this); } - constexpr recursive_mutex_tt(const fork_unsafe_lock_t unsafe) + constexpr recursive_mutex_tt(__unused const fork_unsafe_lock_t unsafe) : mLock(OS_UNFAIR_RECURSIVE_LOCK_INIT) { } @@ -877,7 +895,7 @@ class monitor_tt { lockdebug_remember_monitor(this); } - monitor_tt(const fork_unsafe_lock_t unsafe) + monitor_tt(__unused const fork_unsafe_lock_t unsafe) : mutex(PTHREAD_MUTEX_INITIALIZER), cond(PTHREAD_COND_INITIALIZER) { } @@ -1019,62 +1037,18 @@ ustrdupMaybeNil(const uint8_t *str) // OS version checking: // -// sdkVersion() -// DYLD_OS_VERSION(mac, ios, tv, watch, bridge) -// sdkIsOlderThan(mac, ios, tv, watch, bridge) // sdkIsAtLeast(mac, ios, tv, watch, bridge) -// +// // This version order matches OBJC_AVAILABLE. +// +// NOTE: prefer dyld_program_sdk_at_least when possible +#define sdkIsAtLeast(x, i, t, w, b) \ + (dyld_program_sdk_at_least(dyld_platform_version_macOS_ ## x) || \ + dyld_program_sdk_at_least(dyld_platform_version_iOS_ ## i) || \ + dyld_program_sdk_at_least(dyld_platform_version_tvOS_ ## t) || \ + dyld_program_sdk_at_least(dyld_platform_version_watchOS_ ## w) || \ + dyld_program_sdk_at_least(dyld_platform_version_bridgeOS_ ## b)) -#if TARGET_OS_OSX -# define DYLD_OS_VERSION(x, i, t, w, b) DYLD_MACOSX_VERSION_##x -# define sdkVersion() dyld_get_program_sdk_version() - -#elif TARGET_OS_IOS -# define DYLD_OS_VERSION(x, i, t, w, b) DYLD_IOS_VERSION_##i -# define sdkVersion() dyld_get_program_sdk_version() - -#elif TARGET_OS_TV - // dyld does not currently have distinct constants for tvOS -# define DYLD_OS_VERSION(x, i, t, w, b) DYLD_IOS_VERSION_##t -# define sdkVersion() dyld_get_program_sdk_version() - -#elif TARGET_OS_BRIDGE -# if TARGET_OS_WATCH -# error bridgeOS 1.0 not supported -# endif - // fixme don't need bridgeOS versioning yet -# define DYLD_OS_VERSION(x, i, t, w, b) DYLD_IOS_VERSION_##t -# define sdkVersion() dyld_get_program_sdk_bridge_os_version() - -#elif TARGET_OS_WATCH -# define DYLD_OS_VERSION(x, i, t, w, b) DYLD_WATCHOS_VERSION_##w - // watchOS has its own API for compatibility reasons -# define sdkVersion() dyld_get_program_sdk_watch_os_version() - -#else -# error unknown OS -#endif - - -#define sdkIsOlderThan(x, i, t, w, b) \ - (sdkVersion() < DYLD_OS_VERSION(x, i, t, w, b)) -#define sdkIsAtLeast(x, i, t, w, b) \ - (sdkVersion() >= DYLD_OS_VERSION(x, i, t, w, b)) - -// Allow bare 0 to be used in DYLD_OS_VERSION() and sdkIsOlderThan() -#define DYLD_MACOSX_VERSION_0 0 -#define DYLD_IOS_VERSION_0 0 -#define DYLD_TVOS_VERSION_0 0 -#define DYLD_WATCHOS_VERSION_0 0 -#define DYLD_BRIDGEOS_VERSION_0 0 - -// Pretty-print a DYLD_*_VERSION_* constant. -#define SDK_FORMAT "%hu.%hhu.%hhu" -#define FORMAT_SDK(v) \ - (unsigned short)(((uint32_t)(v))>>16), \ - (unsigned char)(((uint32_t)(v))>>8), \ - (unsigned char)(((uint32_t)(v))>>0) #ifndef __BUILDING_OBJCDT__ // fork() safety requires careful tracking of all locks. diff --git a/runtime/objc-os.mm b/runtime/objc-os.mm index db021d0..39cf2db 100644 --- a/runtime/objc-os.mm +++ b/runtime/objc-os.mm @@ -28,7 +28,7 @@ #include "objc-private.h" #include "objc-loadmethod.h" -#include "objc-cache.h" +#include "objc-bp-assist.h" #if TARGET_OS_WIN32 @@ -564,13 +564,12 @@ map_images_nolock(unsigned mhCount, const char * const mhPaths[], // Disable +initialize fork safety if the app has a // __DATA,__objc_fork_ok section. - if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_13) { + if (!dyld_program_sdk_at_least(dyld_platform_version_macOS_10_13)) { DisableInitializeForkSafety = true; if (PrintInitializing) { _objc_inform("INITIALIZE: disabling +initialize fork " "safety enforcement because the app is " - "too old (SDK version " SDK_FORMAT ")", - FORMAT_SDK(dyld_get_program_sdk_version())); + "too old.)"); } } @@ -662,6 +661,11 @@ static void static_init() for (size_t i = 0; i < count; i++) { inits[i](); } + auto offsets = getLibobjcInitializerOffsets(&_mh_dylib_header, &count); + for (size_t i = 0; i < count; i++) { + UnsignedInitializer init(offsets[i]); + init(); + } } @@ -927,7 +931,9 @@ void _objc_init(void) static_init(); runtime_init(); exception_init(); - cache_init(); +#if __OBJC2__ + cache_t::init(); +#endif _imp_implementationWithBlock_init(); _dyld_objc_notify_register(&map_images, load_images, unmap_image); diff --git a/runtime/objc-private.h b/runtime/objc-private.h index bf2a8de..c801ba0 100644 --- a/runtime/objc-private.h +++ b/runtime/objc-private.h @@ -53,6 +53,16 @@ #define ASSERT(x) assert(x) #endif +// `this` is never NULL in C++ unless we encounter UB, but checking for what's impossible +// is the point of these asserts, so disable the corresponding warning, and let's hope +// we will reach the assert despite the UB +#define ASSERT_THIS_NOT_NULL \ +_Pragma("clang diagnostic push") \ +_Pragma("clang diagnostic ignored \"-Wundefined-bool-conversion\"") \ +ASSERT(this) \ +_Pragma("clang diagnostic pop") + + struct objc_class; struct objc_object; struct category_t; @@ -71,13 +81,32 @@ union isa_t { isa_t() { } isa_t(uintptr_t value) : bits(value) { } - Class cls; uintptr_t bits; + +private: + // Accessing the class requires custom ptrauth operations, so + // force clients to go through setClass/getClass by making this + // private. + Class cls; + +public: #if defined(ISA_BITFIELD) struct { ISA_BITFIELD; // defined in isa.h }; + + bool isDeallocating() { + return extra_rc == 0 && has_sidetable_rc == 0; + } + void setDeallocating() { + extra_rc = 0; + has_sidetable_rc = 0; + } #endif + + void setClass(Class cls, objc_object *obj); + Class getClass(bool authenticated); + Class getDecodedClass(bool authenticated); }; @@ -88,7 +117,7 @@ private: public: // ISA() assumes this is NOT a tagged pointer object - Class ISA(); + Class ISA(bool authenticated = false); // rawISA() assumes this is NOT a tagged pointer object or a non pointer ISA Class rawISA(); @@ -115,6 +144,7 @@ public: bool hasNonpointerIsa(); bool isTaggedPointer(); + bool isTaggedPointerOrNil(); bool isBasicTaggedPointer(); bool isExtTaggedPointer(); bool isClass(); @@ -156,22 +186,36 @@ private: uintptr_t overrelease_error(); #if SUPPORT_NONPOINTER_ISA + // Controls what parts of root{Retain,Release} to emit/inline + // - Full means the full (slow) implementation + // - Fast means the fastpaths only + // - FastOrMsgSend means the fastpaths but checking whether we should call + // -retain/-release or Swift, for the usage of objc_{retain,release} + enum class RRVariant { + Full, + Fast, + FastOrMsgSend, + }; + // Unified retain count manipulation for nonpointer isa - id rootRetain(bool tryRetain, bool handleOverflow); - bool rootRelease(bool performDealloc, bool handleUnderflow); + inline id rootRetain(bool tryRetain, RRVariant variant); + inline bool rootRelease(bool performDealloc, RRVariant variant); id rootRetain_overflow(bool tryRetain); uintptr_t rootRelease_underflow(bool performDealloc); void clearDeallocating_slow(); // Side table retain count overflow for nonpointer isa + struct SidetableBorrow { size_t borrowed, remaining; }; + void sidetable_lock(); void sidetable_unlock(); void sidetable_moveExtraRC_nolock(size_t extra_rc, bool isDeallocating, bool weaklyReferenced); bool sidetable_addExtraRC_nolock(size_t delta_rc); - size_t sidetable_subExtraRC_nolock(size_t delta_rc); + SidetableBorrow sidetable_subExtraRC_nolock(size_t delta_rc); size_t sidetable_getExtraRC_nolock(); + void sidetable_clearExtraRC_nolock(); #endif // Side-table-only retain count @@ -181,10 +225,10 @@ private: bool sidetable_isWeaklyReferenced(); void sidetable_setWeaklyReferenced_nolock(); - id sidetable_retain(); + id sidetable_retain(bool locked = false); id sidetable_retain_slow(SideTable& table); - uintptr_t sidetable_release(bool performDealloc = true); + uintptr_t sidetable_release(bool locked = false, bool performDealloc = true); uintptr_t sidetable_release_slow(SideTable& table, bool performDealloc = true); bool sidetable_tryRetain(); @@ -278,16 +322,24 @@ private: } }; + struct Range shared_cache; struct Range *ranges; uint32_t count; uint32_t size : 31; uint32_t sorted : 1; public: + inline bool inSharedCache(uintptr_t ptr) const { + return shared_cache.contains(ptr); + } inline bool contains(uint16_t witness, uintptr_t ptr) const { return witness < count && ranges[witness].contains(ptr); } + inline void setSharedCacheRange(uintptr_t start, uintptr_t end) { + shared_cache = Range{start, end}; + add(start, end); + } bool find(uintptr_t ptr, uint32_t &pos); void add(uintptr_t start, uintptr_t end); void remove(uintptr_t start, uintptr_t end); @@ -295,6 +347,10 @@ public: extern struct SafeRanges dataSegmentsRanges; +static inline bool inSharedCache(uintptr_t ptr) { + return dataSegmentsRanges.inSharedCache(ptr); +} + } // objc struct header_info; @@ -546,16 +602,12 @@ extern Class _calloc_class(size_t size); enum { LOOKUP_INITIALIZE = 1, LOOKUP_RESOLVER = 2, - LOOKUP_CACHE = 4, - LOOKUP_NIL = 8, + LOOKUP_NIL = 4, + LOOKUP_NOCACHE = 8, }; extern IMP lookUpImpOrForward(id obj, SEL, Class cls, int behavior); - -static inline IMP -lookUpImpOrNil(id obj, SEL sel, Class cls, int behavior = 0) -{ - return lookUpImpOrForward(obj, sel, cls, behavior | LOOKUP_CACHE | LOOKUP_NIL); -} +extern IMP lookUpImpOrForwardTryCache(id obj, SEL, Class cls, int behavior = 0); +extern IMP lookUpImpOrNilTryCache(id obj, SEL, Class cls, int behavior = 0); extern IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel); @@ -816,18 +868,18 @@ __attribute__((aligned(1))) typedef int16_t unaligned_int16_t; // Global operator new and delete. We must not use any app overrides. // This ALSO REQUIRES each of these be in libobjc's unexported symbol list. -#if __cplusplus +#if __cplusplus && !defined(TEST_OVERRIDES_NEW) #pragma clang diagnostic push #pragma clang diagnostic ignored "-Winline-new-delete" #include -inline void* operator new(std::size_t size) throw (std::bad_alloc) { return malloc(size); } -inline void* operator new[](std::size_t size) throw (std::bad_alloc) { return malloc(size); } -inline void* operator new(std::size_t size, const std::nothrow_t&) throw() { return malloc(size); } -inline void* operator new[](std::size_t size, const std::nothrow_t&) throw() { return malloc(size); } -inline void operator delete(void* p) throw() { free(p); } -inline void operator delete[](void* p) throw() { free(p); } -inline void operator delete(void* p, const std::nothrow_t&) throw() { free(p); } -inline void operator delete[](void* p, const std::nothrow_t&) throw() { free(p); } +inline void* operator new(std::size_t size) { return malloc(size); } +inline void* operator new[](std::size_t size) { return malloc(size); } +inline void* operator new(std::size_t size, const std::nothrow_t&) noexcept(true) { return malloc(size); } +inline void* operator new[](std::size_t size, const std::nothrow_t&) noexcept(true) { return malloc(size); } +inline void operator delete(void* p) noexcept(true) { free(p); } +inline void operator delete[](void* p) noexcept(true) { free(p); } +inline void operator delete(void* p, const std::nothrow_t&) noexcept(true) { free(p); } +inline void operator delete[](void* p, const std::nothrow_t&) noexcept(true) { free(p); } #pragma clang diagnostic pop #endif diff --git a/runtime/objc-ptrauth.h b/runtime/objc-ptrauth.h index 388abe6..8b8ed5b 100644 --- a/runtime/objc-ptrauth.h +++ b/runtime/objc-ptrauth.h @@ -60,6 +60,12 @@ #define __ptrauth_swift_value_witness_function_pointer(__key) #endif +// Workaround Definitions of ptrauth_sign_unauthenticated and friends generate unused variables warnings +#if __has_feature(ptrauth_calls) +#define UNUSED_WITHOUT_PTRAUTH +#else +#define UNUSED_WITHOUT_PTRAUTH __unused +#endif #if __has_feature(ptrauth_calls) @@ -124,12 +130,12 @@ public: // A "ptrauth" struct that just passes pointers through unchanged. struct PtrauthRaw { template - static T *sign(T *ptr, const void *address) { + static T *sign(T *ptr, __unused const void *address) { return ptr; } template - static T *auth(T *ptr, const void *address) { + static T *auth(T *ptr, __unused const void *address) { return ptr; } }; @@ -138,12 +144,12 @@ struct PtrauthRaw { // when reading. struct PtrauthStrip { template - static T *sign(T *ptr, const void *address) { + static T *sign(T *ptr, __unused const void *address) { return ptr; } template - static T *auth(T *ptr, const void *address) { + static T *auth(T *ptr, __unused const void *address) { return ptrauth_strip(ptr, ptrauth_key_process_dependent_data); } }; @@ -153,14 +159,14 @@ struct PtrauthStrip { template struct Ptrauth { template - static T *sign(T *ptr, const void *address) { + static T *sign(T *ptr, UNUSED_WITHOUT_PTRAUTH const void *address) { if (!ptr) return nullptr; return ptrauth_sign_unauthenticated(ptr, ptrauth_key_process_dependent_data, ptrauth_blend_discriminator(address, discriminator)); } template - static T *auth(T *ptr, const void *address) { + static T *auth(T *ptr, UNUSED_WITHOUT_PTRAUTH const void *address) { if (!ptr) return nullptr; return ptrauth_auth_data(ptr, ptrauth_key_process_dependent_data, ptrauth_blend_discriminator(address, discriminator)); @@ -173,7 +179,11 @@ template using RawPtr = WrappedPtr; #if __has_feature(ptrauth_calls) // Get a ptrauth type that uses a string discriminator. +#if __BUILDING_OBJCDT__ +#define PTRAUTH_STR(name) PtrauthStrip +#else #define PTRAUTH_STR(name) Ptrauth +#endif // When ptrauth is available, declare a template that wraps a type // in a WrappedPtr that uses an authenticated pointer using the diff --git a/runtime/objc-references.h b/runtime/objc-references.h index 8c79405..71fadae 100644 --- a/runtime/objc-references.h +++ b/runtime/objc-references.h @@ -35,7 +35,7 @@ __BEGIN_DECLS extern void _objc_associations_init(); extern void _object_set_associative_reference(id object, const void *key, id value, uintptr_t policy); extern id _object_get_associative_reference(id object, const void *key); -extern void _object_remove_assocations(id object); +extern void _object_remove_assocations(id object, bool deallocating); __END_DECLS diff --git a/runtime/objc-references.mm b/runtime/objc-references.mm index caa8910..b9ea085 100644 --- a/runtime/objc-references.mm +++ b/runtime/objc-references.mm @@ -38,7 +38,8 @@ enum { OBJC_ASSOCIATION_SETTER_COPY = 3, // NOTE: both bits are set, so we can simply test 1 bit in releaseValue below. OBJC_ASSOCIATION_GETTER_READ = (0 << 8), OBJC_ASSOCIATION_GETTER_RETAIN = (1 << 8), - OBJC_ASSOCIATION_GETTER_AUTORELEASE = (2 << 8) + OBJC_ASSOCIATION_GETTER_AUTORELEASE = (2 << 8), + OBJC_ASSOCIATION_SYSTEM_OBJECT = _OBJC_ASSOCIATION_SYSTEM_OBJECT, // 1 << 16 }; spinlock_t AssociationsManagerLock; @@ -172,6 +173,7 @@ _object_set_associative_reference(id object, const void *key, id value, uintptr_ // retain the new value (if any) outside the lock. association.acquireValue(); + bool isFirstAssociation = false; { AssociationsManager manager; AssociationsHashMap &associations(manager.get()); @@ -180,7 +182,7 @@ _object_set_associative_reference(id object, const void *key, id value, uintptr_ auto refs_result = associations.try_emplace(disguised, ObjectAssociationMap{}); if (refs_result.second) { /* it's the first association we make */ - object->setHasAssociatedObjects(); + isFirstAssociation = true; } /* establish or replace the association */ @@ -206,6 +208,13 @@ _object_set_associative_reference(id object, const void *key, id value, uintptr_ } } + // Call setHasAssociatedObjects outside the lock, since this + // will call the object's _noteAssociatedObjects method if it + // has one, and this may trigger +initialize which might do + // arbitrary stuff, including setting more associated objects. + if (isFirstAssociation) + object->setHasAssociatedObjects(); + // release the old value (outside of the lock). association.releaseHeldValue(); } @@ -215,7 +224,7 @@ _object_set_associative_reference(id object, const void *key, id value, uintptr_ // raw isa objects (such as OS Objects) that can't track // whether they have associated objects. void -_object_remove_assocations(id object) +_object_remove_assocations(id object, bool deallocating) { ObjectAssociationMap refs{}; @@ -225,12 +234,36 @@ _object_remove_assocations(id object) AssociationsHashMap::iterator i = associations.find((objc_object *)object); if (i != associations.end()) { refs.swap(i->second); - associations.erase(i); + + // If we are not deallocating, then SYSTEM_OBJECT associations are preserved. + bool didReInsert = false; + if (!deallocating) { + for (auto &ref: refs) { + if (ref.second.policy() & OBJC_ASSOCIATION_SYSTEM_OBJECT) { + i->second.insert(ref); + didReInsert = true; + } + } + } + if (!didReInsert) + associations.erase(i); } } + // Associations to be released after the normal ones. + SmallVector laterRefs; + // release everything (outside of the lock). for (auto &i: refs) { - i.second.releaseHeldValue(); + if (i.second.policy() & OBJC_ASSOCIATION_SYSTEM_OBJECT) { + // If we are not deallocating, then RELEASE_LATER associations don't get released. + if (deallocating) + laterRefs.append(&i.second); + } else { + i.second.releaseHeldValue(); + } + } + for (auto *later: laterRefs) { + later->releaseHeldValue(); } } diff --git a/runtime/objc-runtime-new.h b/runtime/objc-runtime-new.h index d6ce37c..f44a0d0 100644 --- a/runtime/objc-runtime-new.h +++ b/runtime/objc-runtime-new.h @@ -25,6 +25,7 @@ #define _OBJC_RUNTIME_NEW_H #include "PointerUnion.h" +#include // class_data_bits_t is the class_t->data field (class_rw_t pointer plus flags) // The extra bits are optimized for the retain/release and alloc/dealloc paths. @@ -94,13 +95,19 @@ // class has started realizing but not yet completed it #define RW_REALIZING (1<<19) +#if CONFIG_USE_PREOPT_CACHES +// this class and its descendants can't have preopt caches with inlined sels +#define RW_NOPREOPT_SELS (1<<2) +// this class and its descendants can't have preopt caches +#define RW_NOPREOPT_CACHE (1<<1) +#endif + // class is a metaclass (copied from ro) #define RW_META RO_META // (1<<0) // NOTE: MORE RW_ FLAGS DEFINED BELOW - // Values for class_rw_t->flags (RW_*), cache_t->_flags (FAST_CACHE_*), // or class_t->bits (FAST_*). // @@ -215,19 +222,19 @@ private: #endif // Compute the ptrauth signing modifier from &_imp, newSel, and cls. - uintptr_t modifierForSEL(SEL newSel, Class cls) const { - return (uintptr_t)&_imp ^ (uintptr_t)newSel ^ (uintptr_t)cls; + uintptr_t modifierForSEL(bucket_t *base, SEL newSel, Class cls) const { + return (uintptr_t)base ^ (uintptr_t)newSel ^ (uintptr_t)cls; } // Sign newImp, with &_imp, newSel, and cls as modifiers. - uintptr_t encodeImp(IMP newImp, SEL newSel, Class cls) const { + uintptr_t encodeImp(UNUSED_WITHOUT_PTRAUTH bucket_t *base, IMP newImp, UNUSED_WITHOUT_PTRAUTH SEL newSel, Class cls) const { if (!newImp) return 0; #if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_PTRAUTH return (uintptr_t) ptrauth_auth_and_resign(newImp, ptrauth_key_function_pointer, 0, ptrauth_key_process_dependent_code, - modifierForSEL(newSel, cls)); + modifierForSEL(base, newSel, cls)); #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR return (uintptr_t)newImp ^ (uintptr_t)cls; #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_NONE @@ -238,10 +245,16 @@ private: } public: - inline SEL sel() const { return _sel.load(memory_order::memory_order_relaxed); } + static inline size_t offsetOfSel() { return offsetof(bucket_t, _sel); } + inline SEL sel() const { return _sel.load(memory_order_relaxed); } - inline IMP rawImp(objc_class *cls) const { - uintptr_t imp = _imp.load(memory_order::memory_order_relaxed); +#if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR +#define MAYBE_UNUSED_ISA +#else +#define MAYBE_UNUSED_ISA __attribute__((unused)) +#endif + inline IMP rawImp(MAYBE_UNUSED_ISA objc_class *cls) const { + uintptr_t imp = _imp.load(memory_order_relaxed); if (!imp) return nil; #if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_PTRAUTH #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR @@ -253,15 +266,15 @@ public: return (IMP)imp; } - inline IMP imp(Class cls) const { - uintptr_t imp = _imp.load(memory_order::memory_order_relaxed); + inline IMP imp(UNUSED_WITHOUT_PTRAUTH bucket_t *base, Class cls) const { + uintptr_t imp = _imp.load(memory_order_relaxed); if (!imp) return nil; #if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_PTRAUTH - SEL sel = _sel.load(memory_order::memory_order_relaxed); + SEL sel = _sel.load(memory_order_relaxed); return (IMP) ptrauth_auth_and_resign((const void *)imp, ptrauth_key_process_dependent_code, - modifierForSEL(sel, cls), + modifierForSEL(base, sel, cls), ptrauth_key_function_pointer, 0); #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR return (IMP)(imp ^ (uintptr_t)cls); @@ -273,26 +286,97 @@ public: } template - void set(SEL newSel, IMP newImp, Class cls); + void set(bucket_t *base, SEL newSel, IMP newImp, Class cls); }; +/* dyld_shared_cache_builder and obj-C agree on these definitions */ +enum { + OBJC_OPT_METHODNAME_START = 0, + OBJC_OPT_METHODNAME_END = 1, + OBJC_OPT_INLINED_METHODS_START = 2, + OBJC_OPT_INLINED_METHODS_END = 3, + + __OBJC_OPT_OFFSETS_COUNT, +}; + +#if CONFIG_USE_PREOPT_CACHES +extern uintptr_t objc_opt_offsets[__OBJC_OPT_OFFSETS_COUNT]; +#endif + +/* dyld_shared_cache_builder and obj-C agree on these definitions */ +struct preopt_cache_entry_t { + uint32_t sel_offs; + uint32_t imp_offs; +}; + +/* dyld_shared_cache_builder and obj-C agree on these definitions */ +struct preopt_cache_t { + int32_t fallback_class_offset; + union { + struct { + uint16_t shift : 5; + uint16_t mask : 11; + }; + uint16_t hash_params; + }; + uint16_t occupied : 14; + uint16_t has_inlines : 1; + uint16_t bit_one : 1; + preopt_cache_entry_t entries[]; + + inline int capacity() const { + return mask + 1; + } +}; + +// returns: +// - the cached IMP when one is found +// - nil if there's no cached value and the cache is dynamic +// - `value_on_constant_cache_miss` if there's no cached value and the cache is preoptimized +extern "C" IMP cache_getImp(Class cls, SEL sel, IMP value_on_constant_cache_miss = nil); struct cache_t { +private: + explicit_atomic _bucketsAndMaybeMask; + union { + struct { + explicit_atomic _maybeMask; +#if __LP64__ + uint16_t _flags; +#endif + uint16_t _occupied; + }; + explicit_atomic _originalPreoptCache; + }; + #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_OUTLINED - explicit_atomic _buckets; - explicit_atomic _mask; -#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 - explicit_atomic _maskAndBuckets; - mask_t _mask_unused; + // _bucketsAndMaybeMask is a buckets_t pointer + // _maybeMask is the buckets mask + + static constexpr uintptr_t bucketsMask = ~0ul; + static_assert(!CONFIG_USE_PREOPT_CACHES, "preoptimized caches not supported"); +#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS + static constexpr uintptr_t maskShift = 48; + static constexpr uintptr_t maxMask = ((uintptr_t)1 << (64 - maskShift)) - 1; + static constexpr uintptr_t bucketsMask = ((uintptr_t)1 << maskShift) - 1; + static_assert(bucketsMask >= MACH_VM_MAX_ADDRESS, "Bucket field doesn't have enough bits for arbitrary pointers."); +#if CONFIG_USE_PREOPT_CACHES + static constexpr uintptr_t preoptBucketsMarker = 1ul; + static constexpr uintptr_t preoptBucketsMask = bucketsMask & ~preoptBucketsMarker; +#endif +#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16 + // _bucketsAndMaybeMask is a buckets_t pointer in the low 48 bits + // _maybeMask is unused, the mask is stored in the top 16 bits. + // How much the mask is shifted by. static constexpr uintptr_t maskShift = 48; - + // Additional bits after the mask which must be zero. msgSend // takes advantage of these additional bits to construct the value // `mask << 4` from `_maskAndBuckets` in a single instruction. static constexpr uintptr_t maskZeroBits = 4; - + // The largest mask value we can store. static constexpr uintptr_t maxMask = ((uintptr_t)1 << (64 - maskShift)) - 1; @@ -300,40 +384,107 @@ struct cache_t { static constexpr uintptr_t bucketsMask = ((uintptr_t)1 << (maskShift - maskZeroBits)) - 1; // Ensure we have enough bits for the buckets pointer. - static_assert(bucketsMask >= MACH_VM_MAX_ADDRESS, "Bucket field doesn't have enough bits for arbitrary pointers."); + static_assert(bucketsMask >= MACH_VM_MAX_ADDRESS, + "Bucket field doesn't have enough bits for arbitrary pointers."); + +#if CONFIG_USE_PREOPT_CACHES + static constexpr uintptr_t preoptBucketsMarker = 1ul; +#if __has_feature(ptrauth_calls) + // 63..60: hash_mask_shift + // 59..55: hash_shift + // 54.. 1: buckets ptr + auth + // 0: always 1 + static constexpr uintptr_t preoptBucketsMask = 0x007ffffffffffffe; + static inline uintptr_t preoptBucketsHashParams(const preopt_cache_t *cache) { + uintptr_t value = (uintptr_t)cache->shift << 55; + // masks have 11 bits but can be 0, so we compute + // the right shift for 0x7fff rather than 0xffff + return value | ((objc::mask16ShiftBits(cache->mask) - 1) << 60); + } +#else + // 63..53: hash_mask + // 52..48: hash_shift + // 47.. 1: buckets ptr + // 0: always 1 + static constexpr uintptr_t preoptBucketsMask = 0x0000fffffffffffe; + static inline uintptr_t preoptBucketsHashParams(const preopt_cache_t *cache) { + return (uintptr_t)cache->hash_params << 48; + } +#endif +#endif // CONFIG_USE_PREOPT_CACHES #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4 - // _maskAndBuckets stores the mask shift in the low 4 bits, and - // the buckets pointer in the remainder of the value. The mask - // shift is the value where (0xffff >> shift) produces the correct - // mask. This is equal to 16 - log2(cache_size). - explicit_atomic _maskAndBuckets; - mask_t _mask_unused; + // _bucketsAndMaybeMask is a buckets_t pointer in the top 28 bits + // _maybeMask is unused, the mask length is stored in the low 4 bits static constexpr uintptr_t maskBits = 4; static constexpr uintptr_t maskMask = (1 << maskBits) - 1; static constexpr uintptr_t bucketsMask = ~maskMask; + static_assert(!CONFIG_USE_PREOPT_CACHES, "preoptimized caches not supported"); #else #error Unknown cache mask storage type. #endif - -#if __LP64__ - uint16_t _flags; + + bool isConstantEmptyCache() const; + bool canBeFreed() const; + mask_t mask() const; + +#if CONFIG_USE_PREOPT_CACHES + void initializeToPreoptCacheInDisguise(const preopt_cache_t *cache); + const preopt_cache_t *disguised_preopt_cache() const; #endif - uint16_t _occupied; -public: - static bucket_t *emptyBuckets(); - - struct bucket_t *buckets(); - mask_t mask(); - mask_t occupied(); void incrementOccupied(); void setBucketsAndMask(struct bucket_t *newBuckets, mask_t newMask); + + void reallocate(mask_t oldCapacity, mask_t newCapacity, bool freeOld); + void collect_free(bucket_t *oldBuckets, mask_t oldCapacity); + + static bucket_t *emptyBuckets(); + static bucket_t *allocateBuckets(mask_t newCapacity); + static bucket_t *emptyBucketsForCapacity(mask_t capacity, bool allocate = true); + static struct bucket_t * endMarker(struct bucket_t *b, uint32_t cap); + void bad_cache(id receiver, SEL sel) __attribute__((noreturn, cold)); + +public: + // The following four fields are public for objcdt's use only. + // objcdt reaches into fields while the process is suspended + // hence doesn't care for locks and pesky little details like this + // and can safely use these. + unsigned capacity() const; + struct bucket_t *buckets() const; + Class cls() const; + +#if CONFIG_USE_PREOPT_CACHES + const preopt_cache_t *preopt_cache() const; +#endif + + mask_t occupied() const; void initializeToEmpty(); - unsigned capacity(); - bool isConstantEmptyCache(); - bool canBeFreed(); +#if CONFIG_USE_PREOPT_CACHES + bool isConstantOptimizedCache(bool strict = false, uintptr_t empty_addr = (uintptr_t)&_objc_empty_cache) const; + bool shouldFlush(SEL sel, IMP imp) const; + bool isConstantOptimizedCacheWithInlinedSels() const; + Class preoptFallbackClass() const; + void maybeConvertToPreoptimized(); + void initializeToEmptyOrPreoptimizedInDisguise(); +#else + inline bool isConstantOptimizedCache(bool strict = false, uintptr_t empty_addr = 0) const { return false; } + inline bool shouldFlush(SEL sel, IMP imp) const { + return cache_getImp(cls(), sel) == imp; + } + inline bool isConstantOptimizedCacheWithInlinedSels() const { return false; } + inline void initializeToEmptyOrPreoptimizedInDisguise() { initializeToEmpty(); } +#endif + + void insert(SEL sel, IMP imp, id receiver); + void copyCacheNolock(objc_imp_cache_entry *buffer, int len); + void destroy(); + void eraseNolock(const char *func); + + static void init(); + static void collectNolock(bool collectALot); + static size_t bytesForCapacity(uint32_t cap); #if __LP64__ bool getBit(uint16_t flags) const { @@ -396,14 +547,6 @@ public: // nothing } #endif - - static size_t bytesForCapacity(uint32_t cap); - static struct bucket_t * endMarker(struct bucket_t *b, uint32_t cap); - - void reallocate(mask_t oldCapacity, mask_t newCapacity, bool freeOld); - void insert(Class cls, SEL sel, IMP imp, id receiver); - - static void bad_cache(id receiver, SEL sel, Class isa) __attribute__((noreturn, cold)); }; @@ -424,6 +567,8 @@ struct RelativePointer: nocopy_t { int32_t offset; T get() const { + if (offset == 0) + return nullptr; uintptr_t base = (uintptr_t)&offset; uintptr_t signExtendedOffset = (uintptr_t)(intptr_t)offset; uintptr_t pointer = base + signExtendedOffset; @@ -445,7 +590,7 @@ struct stub_class_t { // A pointer modifier that does nothing to the pointer. struct PointerModifierNop { template - static T *modify(const ListType &list, T *ptr) { return ptr; } + static T *modify(__unused const ListType &list, T *ptr) { return ptr; } }; /*********************************************************************** @@ -573,6 +718,11 @@ struct entsize_list_tt { }; +namespace objc { +// Let method_t::small use this from objc-private.h. +static inline bool inSharedCache(uintptr_t ptr); +} + struct method_t { static const uint32_t smallMethodListFlag = 0x80000000; @@ -595,9 +745,16 @@ private: // The representation of a "small" method. This stores three // relative offsets to the name, types, and implementation. struct small { - RelativePointer name; + // The name field either refers to a selector (in the shared + // cache) or a selref (everywhere else). + RelativePointer name; RelativePointer types; RelativePointer imp; + + bool inSharedCache() const { + return (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && + objc::inSharedCache((uintptr_t)this)); + } }; small &small() const { @@ -631,8 +788,14 @@ public: return *(struct big *)this; } - SEL &name() const { - return isSmall() ? *small().name.get() : big().name; + SEL name() const { + if (isSmall()) { + return (small().inSharedCache() + ? (SEL)small().name.get() + : *(SEL *)small().name.get()); + } else { + return big().name; + } } const char *types() const { return isSmall() ? small().types.get() : big().types; @@ -648,13 +811,31 @@ public: return big().imp; } + SEL getSmallNameAsSEL() const { + ASSERT(small().inSharedCache()); + return (SEL)small().name.get(); + } + + SEL getSmallNameAsSELRef() const { + ASSERT(!small().inSharedCache()); + return *(SEL *)small().name.get(); + } + + void setName(SEL name) { + if (isSmall()) { + ASSERT(!small().inSharedCache()); + *(SEL *)small().name.get() = name; + } else { + big().name = name; + } + } + void setImp(IMP imp) { if (isSmall()) { remapImp(imp); } else { big().imp = imp; } - } objc_method_description *getDescription() const { @@ -800,7 +981,7 @@ struct protocol_t : objc_object { bool isCanonical() const; void clearIsCanonical(); -# define HAS_FIELD(f) (size >= offsetof(protocol_t, f) + sizeof(f)) +# define HAS_FIELD(f) ((uintptr_t)(&f) < ((uintptr_t)this + size)) bool hasExtendedMethodTypesField() const { return HAS_FIELD(_extendedMethodTypes); @@ -861,10 +1042,15 @@ struct class_ro_t { uint32_t reserved; #endif - const uint8_t * ivarLayout; - - const char * name; - WrappedPtr baseMethodList; + union { + const uint8_t * ivarLayout; + Class nonMetaclass; + }; + + explicit_atomic name; + // With ptrauth, this is signed if it points to a small list, but + // may be unsigned if it points to a big list. + void *baseMethodList; protocol_list_t * baseProtocols; const ivar_list_t * ivars; @@ -882,21 +1068,105 @@ struct class_ro_t { } } + const char *getName() const { + return name.load(std::memory_order_acquire); + } + + static const uint16_t methodListPointerDiscriminator = 0xC310; +#if 0 // FIXME: enable this when we get a non-empty definition of __ptrauth_objc_method_list_pointer from ptrauth.h. + static_assert(std::is_same< + void * __ptrauth_objc_method_list_pointer *, + void * __ptrauth(ptrauth_key_method_list_pointer, 1, methodListPointerDiscriminator) *>::value, + "Method list pointer signing discriminator must match ptrauth.h"); +#endif + method_list_t *baseMethods() const { - return baseMethodList; +#if __has_feature(ptrauth_calls) + method_list_t *ptr = ptrauth_strip((method_list_t *)baseMethodList, ptrauth_key_method_list_pointer); + if (ptr == nullptr) + return nullptr; + + // Don't auth if the class_ro and the method list are both in the shared cache. + // This is secure since they'll be read-only, and this allows the shared cache + // to cut down on the number of signed pointers it has. + bool roInSharedCache = objc::inSharedCache((uintptr_t)this); + bool listInSharedCache = objc::inSharedCache((uintptr_t)ptr); + if (roInSharedCache && listInSharedCache) + return ptr; + + // Auth all other small lists. + if (ptr->isSmallList()) + ptr = ptrauth_auth_data((method_list_t *)baseMethodList, + ptrauth_key_method_list_pointer, + ptrauth_blend_discriminator(&baseMethodList, + methodListPointerDiscriminator)); + return ptr; +#else + return (method_list_t *)baseMethodList; +#endif + } + + uintptr_t baseMethodListPtrauthData() const { + return ptrauth_blend_discriminator(&baseMethodList, + methodListPointerDiscriminator); } class_ro_t *duplicate() const { - if (flags & RO_HAS_SWIFT_INITIALIZER) { - size_t size = sizeof(*this) + sizeof(_swiftMetadataInitializer_NEVER_USE[0]); - class_ro_t *ro = (class_ro_t *)memdup(this, size); + bool hasSwiftInitializer = flags & RO_HAS_SWIFT_INITIALIZER; + + size_t size = sizeof(*this); + if (hasSwiftInitializer) + size += sizeof(_swiftMetadataInitializer_NEVER_USE[0]); + + class_ro_t *ro = (class_ro_t *)memdup(this, size); + + if (hasSwiftInitializer) ro->_swiftMetadataInitializer_NEVER_USE[0] = this->_swiftMetadataInitializer_NEVER_USE[0]; - return ro; + +#if __has_feature(ptrauth_calls) + // Re-sign the method list pointer if it was signed. + // NOTE: It is possible for a signed pointer to have a signature + // that is all zeroes. This is indistinguishable from a raw pointer. + // This code will treat such a pointer as signed and re-sign it. A + // false positive is safe: method list pointers are either authed or + // stripped, so if baseMethods() doesn't expect it to be signed, it + // will ignore the signature. + void *strippedBaseMethodList = ptrauth_strip(baseMethodList, ptrauth_key_method_list_pointer); + void *signedBaseMethodList = ptrauth_sign_unauthenticated(strippedBaseMethodList, + ptrauth_key_method_list_pointer, + baseMethodListPtrauthData()); + if (baseMethodList == signedBaseMethodList) { + ro->baseMethodList = ptrauth_auth_and_resign(baseMethodList, + ptrauth_key_method_list_pointer, + baseMethodListPtrauthData(), + ptrauth_key_method_list_pointer, + ro->baseMethodListPtrauthData()); } else { - size_t size = sizeof(*this); - class_ro_t *ro = (class_ro_t *)memdup(this, size); - return ro; + // Special case: a class_ro_t in the shared cache pointing to a + // method list in the shared cache will not have a signed pointer, + // but the duplicate will be expected to have a signed pointer since + // it's not in the shared cache. Detect that and sign it. + bool roInSharedCache = objc::inSharedCache((uintptr_t)this); + bool listInSharedCache = objc::inSharedCache((uintptr_t)strippedBaseMethodList); + if (roInSharedCache && listInSharedCache) + ro->baseMethodList = ptrauth_sign_unauthenticated(strippedBaseMethodList, + ptrauth_key_method_list_pointer, + ro->baseMethodListPtrauthData()); } +#endif + + return ro; + } + + Class getNonMetaclass() const { + ASSERT(flags & RO_META); + return nonMetaclass; + } + + const uint8_t *getIvarLayout() const { + if (flags & RO_META) + return nullptr; + return ivarLayout; } }; @@ -1036,10 +1306,9 @@ class list_array_tt { return iterator(e, e); } - - uint32_t countLists() { + inline uint32_t countLists(const std::function & peek) const { if (hasArray()) { - return array()->count; + return peek(array())->count; } else if (list) { return 1; } else { @@ -1047,6 +1316,10 @@ class list_array_tt { } } + uint32_t countLists() { + return countLists([](array_t *x) { return x; }); + } + const Ptr* beginLists() const { if (hasArray()) { return array()->lists; @@ -1317,12 +1590,10 @@ private: void setAndClearBits(uintptr_t set, uintptr_t clear) { ASSERT((set & clear) == 0); - uintptr_t oldBits; - uintptr_t newBits; + uintptr_t newBits, oldBits = LoadExclusive(&bits); do { - oldBits = LoadExclusive(&bits); newBits = (oldBits | set) & ~clear; - } while (!StoreReleaseExclusive(&bits, oldBits, newBits)); + } while (slowpath(!StoreReleaseExclusive(&bits, &oldBits, newBits))); } void setBits(uintptr_t set) { @@ -1352,7 +1623,7 @@ public: // Get the class's ro data, even in the presence of concurrent realization. // fixme this isn't really safe without a compiler barrier at least // and probably a memory barrier when realizeClass changes the data field - const class_ro_t *safe_ro() { + const class_ro_t *safe_ro() const { class_rw_t *maybe_rw = data(); if (maybe_rw->flags & RW_REALIZED) { // maybe_rw is rw @@ -1363,13 +1634,16 @@ public: } } - void setClassArrayIndex(unsigned Idx) { #if SUPPORT_INDEXED_ISA + void setClassArrayIndex(unsigned Idx) { // 0 is unused as then we can rely on zero-initialisation from calloc. ASSERT(Idx > 0); data()->index = Idx; -#endif } +#else + void setClassArrayIndex(__unused unsigned Idx) { + } +#endif unsigned classArrayIndex() { #if SUPPORT_INDEXED_ISA @@ -1412,11 +1686,49 @@ public: struct objc_class : objc_object { + objc_class(const objc_class&) = delete; + objc_class(objc_class&&) = delete; + void operator=(const objc_class&) = delete; + void operator=(objc_class&&) = delete; // Class ISA; Class superclass; cache_t cache; // formerly cache pointer and vtable class_data_bits_t bits; // class_rw_t * plus custom rr/alloc flags + Class getSuperclass() const { +#if __has_feature(ptrauth_calls) +# if ISA_SIGNING_AUTH_MODE == ISA_SIGNING_AUTH + if (superclass == Nil) + return Nil; + +#if SUPERCLASS_SIGNING_TREAT_UNSIGNED_AS_NIL + void *stripped = ptrauth_strip((void *)superclass, ISA_SIGNING_KEY); + if ((void *)superclass == stripped) { + void *resigned = ptrauth_sign_unauthenticated(stripped, ISA_SIGNING_KEY, ptrauth_blend_discriminator(&superclass, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS)); + if ((void *)superclass != resigned) + return Nil; + } +#endif + + void *result = ptrauth_auth_data((void *)superclass, ISA_SIGNING_KEY, ptrauth_blend_discriminator(&superclass, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS)); + return (Class)result; + +# else + return (Class)ptrauth_strip((void *)superclass, ISA_SIGNING_KEY); +# endif +#else + return superclass; +#endif + } + + void setSuperclass(Class newSuperclass) { +#if ISA_SIGNING_SIGN_MODE == ISA_SIGNING_SIGN_ALL + superclass = (Class)ptrauth_sign_unauthenticated((void *)newSuperclass, ISA_SIGNING_KEY, ptrauth_blend_discriminator(&superclass, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS)); +#else + superclass = newSuperclass; +#endif + } + class_rw_t *data() const { return bits.data(); } @@ -1568,6 +1880,30 @@ struct objc_class : objc_object { void setInstancesRequireRawIsaRecursively(bool inherited = false); void printInstancesRequireRawIsa(bool inherited); +#if CONFIG_USE_PREOPT_CACHES + bool allowsPreoptCaches() const { + return !(bits.data()->flags & RW_NOPREOPT_CACHE); + } + bool allowsPreoptInlinedSels() const { + return !(bits.data()->flags & RW_NOPREOPT_SELS); + } + void setDisallowPreoptCaches() { + bits.data()->setFlags(RW_NOPREOPT_CACHE | RW_NOPREOPT_SELS); + } + void setDisallowPreoptInlinedSels() { + bits.data()->setFlags(RW_NOPREOPT_SELS); + } + void setDisallowPreoptCachesRecursively(const char *why); + void setDisallowPreoptInlinedSelsRecursively(const char *why); +#else + bool allowsPreoptCaches() const { return false; } + bool allowsPreoptInlinedSels() const { return false; } + void setDisallowPreoptCaches() { } + void setDisallowPreoptInlinedSels() { } + void setDisallowPreoptCachesRecursively(const char *why) { } + void setDisallowPreoptInlinedSelsRecursively(const char *why) { } +#endif + bool canAllocNonpointer() { ASSERT(!isFuture()); return !instancesRequireRawIsa(); @@ -1589,6 +1925,28 @@ struct objc_class : objc_object { return bits.isSwiftStable_ButAllowLegacyForNow(); } + uint32_t swiftClassFlags() { + return *(uint32_t *)(&bits + 1); + } + + bool usesSwiftRefcounting() { + if (!isSwiftStable()) return false; + return bool(swiftClassFlags() & 2); //ClassFlags::UsesSwiftRefcounting + } + + bool canCallSwiftRR() { + // !hasCustomCore() is being used as a proxy for isInitialized(). All + // classes with Swift refcounting are !hasCustomCore() (unless there are + // category or swizzling shenanigans), but that bit is not set until a + // class is initialized. Checking isInitialized requires an extra + // indirection that we want to avoid on RR fast paths. + // + // In the unlikely event that someone causes a class with Swift + // refcounting to be hasCustomCore(), we'll fall back to sending -retain + // or -release, which is still correct. + return !hasCustomCore() && usesSwiftRefcounting(); + } + bool isStubClass() const { uintptr_t isa = (uintptr_t)isaBits(); return 1 <= isa && isa < 16; @@ -1608,8 +1966,7 @@ struct objc_class : objc_object { // Check the true legacy vs stable distinguisher. // The low bit of Swift's ClassFlags is SET for true legacy // and UNSET for stable pretending to be legacy. - uint32_t swiftClassFlags = *(uint32_t *)(&bits + 1); - bool isActuallySwiftLegacy = bool(swiftClassFlags & 1); + bool isActuallySwiftLegacy = bool(swiftClassFlags() & 1); return !isActuallySwiftLegacy; } @@ -1695,11 +2052,13 @@ struct objc_class : objc_object { // Returns true if this is an unrealized future class. // Locking: To prevent concurrent realization, hold runtimeLock. bool isFuture() const { + if (isStubClass()) + return false; return data()->flags & RW_FUTURE; } - bool isMetaClass() { - ASSERT(this); + bool isMetaClass() const { + ASSERT_THIS_NOT_NULL; ASSERT(isRealized()); #if FAST_CACHE_META return cache.getBit(FAST_CACHE_META); @@ -1712,31 +2071,46 @@ struct objc_class : objc_object { bool isMetaClassMaybeUnrealized() { static_assert(offsetof(class_rw_t, flags) == offsetof(class_ro_t, flags), "flags alias"); static_assert(RO_META == RW_META, "flags alias"); + if (isStubClass()) + return false; return data()->flags & RW_META; } // NOT identical to this->ISA when this is a metaclass Class getMeta() { - if (isMetaClass()) return (Class)this; + if (isMetaClassMaybeUnrealized()) return (Class)this; else return this->ISA(); } bool isRootClass() { - return superclass == nil; + return getSuperclass() == nil; } bool isRootMetaclass() { return ISA() == (Class)this; } + + // If this class does not have a name already, we can ask Swift to construct one for us. + const char *installMangledNameForLazilyNamedClass(); + + // Get the class's mangled name, or NULL if the class has a lazy + // name that hasn't been created yet. + const char *nonlazyMangledName() const { + return bits.safe_ro()->getName(); + } const char *mangledName() { // fixme can't assert locks here - ASSERT(this); + ASSERT_THIS_NOT_NULL; - if (isRealized() || isFuture()) { - return data()->ro()->name; - } else { - return ((const class_ro_t *)data())->name; + const char *result = nonlazyMangledName(); + + if (!result) { + // This class lazily instantiates its name. Emplace and + // return it. + result = installMangledNameForLazilyNamedClass(); } + + return result; } const char *demangledName(bool needsLock); @@ -1765,7 +2139,7 @@ struct objc_class : objc_object { return word_align(unalignedInstanceSize()); } - size_t instanceSize(size_t extraBytes) const { + inline size_t instanceSize(size_t extraBytes) const { if (fastpath(cache.hasFastInstanceSize(extraBytes))) { return cache.fastInstanceSize(extraBytes); } diff --git a/runtime/objc-runtime-new.mm b/runtime/objc-runtime-new.mm index 1eabd5c..df3f9fa 100644 --- a/runtime/objc-runtime-new.mm +++ b/runtime/objc-runtime-new.mm @@ -32,7 +32,6 @@ #include "objc-private.h" #include "objc-runtime-new.h" #include "objc-file.h" -#include "objc-cache.h" #include "objc-zalloc.h" #include #include @@ -48,7 +47,7 @@ static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth); static method_t *search_method_list(const method_list_t *mlist, SEL sel); template static bool method_lists_contains_any(T *mlists, T *end, SEL sels[], size_t selcount); -static void flushCaches(Class cls); +static void flushCaches(Class cls, const char *func, bool (^predicate)(Class c)); static void initializeTaggedPointerObfuscator(void); #if SUPPORT_FIXUP static void fixupMessageRef(message_ref_t *msg); @@ -151,7 +150,19 @@ uintptr_t objc_indexed_classes_count = 0; asm("\n .globl _objc_absolute_packed_isa_class_mask" \ "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK)); -const uintptr_t objc_debug_isa_class_mask = ISA_MASK; +// a better definition is +// (uintptr_t)ptrauth_strip((void *)ISA_MASK, ISA_SIGNING_KEY) +// however we know that PAC uses bits outside of MACH_VM_MAX_ADDRESS +// so approximate the definition here to be constant +template +static constexpr T coveringMask(T n) { + for (T mask = 0; mask != ~T{0}; mask = (mask << 1) | 1) { + if ((n & mask) == n) return mask; + } + return ~T{0}; +} +const uintptr_t objc_debug_isa_class_mask = ISA_MASK & coveringMask(MACH_VM_MAX_ADDRESS - 1); + const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK; const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE; @@ -267,15 +278,9 @@ objc_method_description *method_t::getSmallDescription() const { /* Low two bits of mlist->entsize is used as the fixed-up marker. - PREOPTIMIZED VERSION: Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted). (Protocol method lists are not sorted because of their extra parallel data) Runtime fixed-up method lists get 3. - UN-PREOPTIMIZED VERSION: - Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted) - Shared cache's sorting and uniquing are not trusted, but do affect the - location of the selector name string. - Runtime fixed-up method lists get 2. High two bits of protocol->flags is used as the fixed-up marker. PREOPTIMIZED VERSION: @@ -287,18 +292,14 @@ objc_method_description *method_t::getSmallDescription() const { Runtime fixed-up protocols get 3<<30. */ -static uint32_t fixed_up_method_list = 3; -static uint32_t uniqued_method_list = 1; +static const uint32_t fixed_up_method_list = 3; +static const uint32_t uniqued_method_list = 1; static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1; static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL; void disableSharedCacheOptimizations(void) { - fixed_up_method_list = 2; - // It is safe to set uniqued method lists to 0 as we'll never call it unless - // the method list was already in need of being fixed up - uniqued_method_list = 0; fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2; // Its safe to just set canonical protocol to 0 as we'll never call // clearIsCanonical() unless isCanonical() returned true, which can't happen @@ -437,8 +438,7 @@ void *object_getIndexedIvars(id obj) { uint8_t *base = (uint8_t *)obj; - if (!obj) return nil; - if (obj->isTaggedPointer()) return nil; + if (obj->isTaggedPointerOrNil()) return nil; if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize(); @@ -682,7 +682,7 @@ foreach_realized_class_and_subclass_2(Class top, unsigned &count, cls = cls->data()->firstSubclass; } else { while (!cls->data()->nextSiblingClass && cls != top) { - cls = cls->superclass; + cls = cls->getSuperclass(); if (--count == 0) { _objc_fatal("Memory corruption in class list."); } @@ -852,20 +852,20 @@ class Mixin { static void scanAddedClassImpl(Class cls, bool isMeta) { - Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject()); bool setCustom = NO, inherited = NO; if (isNSObjectSwizzled(isMeta)) { setCustom = YES; - } else if (cls == NSOClass) { - // NSObject is default but we need to check categories + } else if (Traits::knownClassHasDefaultImpl(cls, isMeta)) { + // This class is known to have the default implementations, + // but we need to check categories. auto &methods = as_objc_class(cls)->data()->methods(); setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(), methods.endCategoryMethodLists(cls)); - } else if (!isMeta && !as_objc_class(cls)->superclass) { + } else if (!isMeta && !as_objc_class(cls)->getSuperclass()) { // Custom Root class setCustom = YES; - } else if (Traits::isCustom(as_objc_class(cls)->superclass)) { + } else if (Traits::isCustom(as_objc_class(cls)->getSuperclass())) { // Superclass is custom, therefore we are too. setCustom = YES; inherited = YES; @@ -883,6 +883,14 @@ class Mixin { } public: + static bool knownClassHasDefaultImpl(Class cls, bool isMeta) { + // Typically only NSObject has default implementations. + // Allow this to be extended by overriding (to allow + // SwiftObject, for example). + Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject()); + return cls == NSOClass; + } + // Scan a class that is about to be marked Initialized for particular // bundles of selectors, and mark the class and its children // accordingly. @@ -1047,6 +1055,16 @@ struct RRScanner : scanner::Mixin { + static bool knownClassHasDefaultImpl(Class cls, bool isMeta) { + if (scanner::Mixin::knownClassHasDefaultImpl(cls, isMeta)) + return true; + if ((cls->isRootClass() || cls->isRootMetaclass()) + && strcmp(cls->mangledName(), "_TtCs12_SwiftObject") == 0) + return true; + + return false; + } + static bool isCustom(Class cls) { return cls->hasCustomCore(); } @@ -1171,7 +1189,7 @@ public: if (slowpath(PrintConnecting)) { _objc_inform("CLASS: found category %c%s(%s)", - cls->isMetaClass() ? '+' : '-', + cls->isMetaClassMaybeUnrealized() ? '+' : '-', cls->nameForLogging(), lc.cat->name); } @@ -1251,7 +1269,7 @@ fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort) // Unique selectors in list. for (auto& meth : *mlist) { const char *name = sel_cname(meth.name()); - meth.name() = sel_registerNameNoLock(name, bundleCopy); + meth.setName(sel_registerNameNoLock(name, bundleCopy)); } } @@ -1274,7 +1292,7 @@ fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort) static void prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount, - bool baseMethods, bool methodsFromBundle) + bool baseMethods, bool methodsFromBundle, const char *why) { runtimeLock.assertLocked(); @@ -1286,6 +1304,16 @@ prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount, // Therefore we need not handle any special cases here. if (baseMethods) { ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore()); + } else if (cls->cache.isConstantOptimizedCache()) { + cls->setDisallowPreoptCachesRecursively(why); + } else if (cls->allowsPreoptInlinedSels()) { +#if CONFIG_USE_PREOPT_CACHES + SEL *sels = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_START]; + SEL *sels_end = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_END]; + if (method_lists_contains_any(addedLists, addedLists + addedCount, sels, sels_end - sels)) { + cls->setDisallowPreoptInlinedSelsRecursively(why); + } +#endif } // Add method lists to array. @@ -1390,7 +1418,7 @@ attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cat method_list_t *mlist = entry.cat->methodsForMeta(isMeta); if (mlist) { if (mcount == ATTACH_BUFSIZ) { - prepareMethodLists(cls, mlists, mcount, NO, fromBundle); + prepareMethodLists(cls, mlists, mcount, NO, fromBundle, __func__); rwe->methods.attachLists(mlists, mcount); mcount = 0; } @@ -1419,9 +1447,16 @@ attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cat } if (mcount > 0) { - prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle); + prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, + NO, fromBundle, __func__); rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount); - if (flags & ATTACH_EXISTING) flushCaches(cls); + if (flags & ATTACH_EXISTING) { + flushCaches(cls, __func__, [](Class c){ + // constant caches have been dealt with in prepareMethodLists + // if the class still is constant here, it's fine to keep + return !c->cache.isConstantOptimizedCache(); + }); + } } rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount); @@ -1454,10 +1489,7 @@ static void methodizeClass(Class cls, Class previously) // Install methods and properties that the class implements itself. method_list_t *list = ro->baseMethods(); if (list) { - if (list->isSmallList() && !_dyld_is_memory_immutable(list, list->byteSize())) - _objc_fatal("CLASS: class '%s' %p small method list %p is not in immutable memory", - cls->nameForLogging(), cls, list); - prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls)); + prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls), nullptr); if (rwe) rwe->methods.attachLists(&list, 1); } @@ -1690,6 +1722,8 @@ static char *copySwiftV1MangledName(const char *string, bool isProtocol = false) // This is a misnomer: gdb_objc_realized_classes is actually a list of // named classes not in the dyld shared cache, whether realized or not. +// This list excludes lazily named classes, which have to be looked up +// using a getClass hook. NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h uintptr_t objc_debug_realized_class_generation_count; @@ -1820,7 +1854,7 @@ static void addFutureNamedClass(const char *name, Class cls) class_rw_t *rw = objc::zalloc(); class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1); - ro->name = strdupIfMutable(name); + ro->name.store(strdupIfMutable(name), std::memory_order_relaxed); rw->set_ro(ro); cls->setData(rw); cls->data()->flags = RO_FUTURE; @@ -2012,7 +2046,7 @@ static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst) // special case for root metaclass // where inst == inst->ISA() == metacls is possible if (metacls->ISA() == metacls) { - Class cls = metacls->superclass; + Class cls = metacls->getSuperclass(); ASSERT(cls->isRealized()); ASSERT(!cls->isMetaClass()); ASSERT(cls->ISA() == metacls); @@ -2030,7 +2064,7 @@ static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst) ASSERT(!cls->isMetaClassMaybeUnrealized()); return cls; } - cls = cls->superclass; + cls = cls->getSuperclass(); } #if DEBUG _objc_fatal("cls is not an instance of metacls"); @@ -2039,6 +2073,10 @@ static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst) #endif } + // See if the metaclass has a pointer to its nonmetaclass. + if (Class cls = metacls->bits.safe_ro()->getNonMetaclass()) + return cls; + // try name lookup { Class cls = getClassExceptSomeSwift(metacls->mangledName()); @@ -2263,9 +2301,15 @@ static void addSubclass(Class supercls, Class subcls) objc::RRScanner::scanAddedSubClass(subcls, supercls); objc::CoreScanner::scanAddedSubClass(subcls, supercls); + if (!supercls->allowsPreoptCaches()) { + subcls->setDisallowPreoptCachesRecursively(__func__); + } else if (!supercls->allowsPreoptInlinedSels()) { + subcls->setDisallowPreoptInlinedSelsRecursively(__func__); + } + // Special case: instancesRequireRawIsa does not propagate // from root class to root metaclass - if (supercls->instancesRequireRawIsa() && supercls->superclass) { + if (supercls->instancesRequireRawIsa() && supercls->getSuperclass()) { subcls->setInstancesRequireRawIsaRecursively(true); } } @@ -2282,7 +2326,7 @@ static void removeSubclass(Class supercls, Class subcls) runtimeLock.assertLocked(); ASSERT(supercls->isRealized()); ASSERT(subcls->isRealized()); - ASSERT(subcls->superclass == supercls); + ASSERT(subcls->getSuperclass() == supercls); objc_debug_realized_class_generation_count++; @@ -2329,15 +2373,23 @@ static NEVER_INLINE Protocol *getProtocol(const char *name) Protocol *result = (Protocol *)NXMapGet(protocols(), name); if (result) return result; + // Try table from dyld3 closure and dyld shared cache + result = getPreoptimizedProtocol(name); + if (result) return result; + // Try Swift-mangled equivalent of the given name. if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) { result = (Protocol *)NXMapGet(protocols(), swName); + + // Try table from dyld3 closure and dyld shared cache + if (!result) + result = getPreoptimizedProtocol(swName); + free(swName); - if (result) return result; + return result; } - // Try table from dyld3 closure and dyld shared cache - return getPreoptimizedProtocol(name); + return nullptr; } @@ -2526,10 +2578,22 @@ static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro class_ro_t *ro_w = make_ro_writeable(rw); ro = rw->ro(); moveIvars(ro_w, super_ro->instanceSize); - gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name); + gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->getName()); } } +static void validateAlreadyRealizedClass(Class cls) { + ASSERT(cls->isRealized()); +#if TARGET_OS_OSX + class_rw_t *rw = cls->data(); + size_t rwSize = malloc_size(rw); + + // Note: this check will need some adjustment if class_rw_t's + // size changes to not match the malloc bucket. + if (rwSize != sizeof(class_rw_t)) + _objc_fatal("realized class %p has corrupt data pointer %p", cls, rw); +#endif +} /*********************************************************************** * realizeClassWithoutSwift @@ -2548,7 +2612,10 @@ static Class realizeClassWithoutSwift(Class cls, Class previously) Class metacls; if (!cls) return nil; - if (cls->isRealized()) return cls; + if (cls->isRealized()) { + validateAlreadyRealizedClass(cls); + return cls; + } ASSERT(cls == remapClass(cls)); // fixme verify class is not in an un-dlopened part of the shared cache? @@ -2569,6 +2636,8 @@ static Class realizeClassWithoutSwift(Class cls, Class previously) cls->setData(rw); } + cls->cache.initializeToEmptyOrPreoptimizedInDisguise(); + #if FAST_CACHE_META if (isMeta) cls->cache.setBit(FAST_CACHE_META); #endif @@ -2592,7 +2661,7 @@ static Class realizeClassWithoutSwift(Class cls, Class previously) // or that Swift's initializers have already been called. // fixme that assumption will be wrong if we add support // for ObjC subclasses of Swift classes. - supercls = realizeClassWithoutSwift(remapClass(cls->superclass), nil); + supercls = realizeClassWithoutSwift(remapClass(cls->getSuperclass()), nil); metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil); #if SUPPORT_NONPOINTER_ISA @@ -2611,13 +2680,13 @@ static Class realizeClassWithoutSwift(Class cls, Class previously) // Non-pointer isa disabled by environment or app SDK version instancesRequireRawIsa = true; } - else if (!hackedDispatch && 0 == strcmp(ro->name, "OS_object")) + else if (!hackedDispatch && 0 == strcmp(ro->getName(), "OS_object")) { // hack for libdispatch et al - isa also acts as vtable pointer hackedDispatch = true; instancesRequireRawIsa = true; } - else if (supercls && supercls->superclass && + else if (supercls && supercls->getSuperclass() && supercls->instancesRequireRawIsa()) { // This is also propagated by addSubclass() @@ -2636,7 +2705,7 @@ static Class realizeClassWithoutSwift(Class cls, Class previously) #endif // Update superclass and metaclass in case of remapping - cls->superclass = supercls; + cls->setSuperclass(supercls); cls->initClassIsa(metacls); // Reconcile instance variable offsets / layout. @@ -2758,7 +2827,7 @@ static Class realizeSwiftClass(Class cls) ASSERT(remapClass(cls) == cls); ASSERT(cls->isSwiftStable_ButAllowLegacyForNow()); ASSERT(!cls->isMetaClassMaybeUnrealized()); - ASSERT(cls->superclass); + ASSERT(cls->getSuperclass()); runtimeLock.unlock(); #endif @@ -2850,13 +2919,13 @@ missingWeakSuperclass(Class cls) { ASSERT(!cls->isRealized()); - if (!cls->superclass) { + if (!cls->getSuperclass()) { // superclass nil. This is normal for root classes only. return (!(cls->data()->flags & RO_ROOT)); } else { // superclass not nil. Check if a higher superclass is missing. - Class supercls = remapClass(cls->superclass); - ASSERT(cls != cls->superclass); + Class supercls = remapClass(cls->getSuperclass()); + ASSERT(cls != cls->getSuperclass()); ASSERT(cls != supercls); if (!supercls) return YES; if (supercls->isRealized()) return NO; @@ -2975,6 +3044,10 @@ BOOL _class_isFutureClass(Class cls) return cls && cls->isFuture(); } +BOOL _class_isSwift(Class _Nullable cls) +{ + return cls && cls->isSwiftStable(); +} /*********************************************************************** * _objc_flush_caches @@ -2983,24 +3056,25 @@ BOOL _class_isFutureClass(Class cls) * and subclasses thereof. Nil flushes all classes.) * Locking: acquires runtimeLock **********************************************************************/ -static void flushCaches(Class cls) +static void flushCaches(Class cls, const char *func, bool (^predicate)(Class)) { runtimeLock.assertLocked(); #if CONFIG_USE_CACHE_LOCK mutex_locker_t lock(cacheUpdateLock); #endif + const auto handler = ^(Class c) { + if (predicate(c)) { + c->cache.eraseNolock(func); + } + + return true; + }; + if (cls) { - foreach_realized_class_and_subclass(cls, [](Class c){ - cache_erase_nolock(c); - return true; - }); - } - else { - foreach_realized_class_and_metaclass([](Class c){ - cache_erase_nolock(c); - return true; - }); + foreach_realized_class_and_subclass(cls, handler); + } else { + foreach_realized_class_and_metaclass(handler); } } @@ -3009,9 +3083,13 @@ void _objc_flush_caches(Class cls) { { mutex_locker_t lock(runtimeLock); - flushCaches(cls); - if (cls && cls->superclass && cls != cls->getIsa()) { - flushCaches(cls->getIsa()); + flushCaches(cls, __func__, [](Class c){ + return !c->cache.isConstantOptimizedCache(); + }); + if (cls && !cls->isMetaClass() && !cls->isRootClass()) { + flushCaches(cls->ISA(), __func__, [](Class c){ + return !c->cache.isConstantOptimizedCache(); + }); } else { // cls is a root class or root metaclass. Its metaclass is itself // or a subclass so the metaclass caches were already flushed. @@ -3025,7 +3103,7 @@ void _objc_flush_caches(Class cls) #else mutex_locker_t lock(runtimeLock); #endif - cache_collect(true); + cache_t::collectNolock(true); } } @@ -3246,7 +3324,7 @@ bool mustReadClasses(header_info *hi, bool hasDyldRoots) **********************************************************************/ Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized) { - const char *mangledName = cls->mangledName(); + const char *mangledName = cls->nonlazyMangledName(); if (missingWeakSuperclass(cls)) { // No superclass (probably weak-linked). @@ -3257,45 +3335,60 @@ Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized) cls->nameForLogging()); } addRemappedClass(cls, nil); - cls->superclass = nil; + cls->setSuperclass(nil); return nil; } cls->fixupBackwardDeployingStableSwift(); Class replacing = nil; - if (Class newCls = popFutureNamedClass(mangledName)) { - // This name was previously allocated as a future class. - // Copy objc_class to future class's struct. - // Preserve future's rw data block. - - if (newCls->isAnySwift()) { - _objc_fatal("Can't complete future class request for '%s' " - "because the real class is too big.", - cls->nameForLogging()); + if (mangledName != nullptr) { + if (Class newCls = popFutureNamedClass(mangledName)) { + // This name was previously allocated as a future class. + // Copy objc_class to future class's struct. + // Preserve future's rw data block. + + if (newCls->isAnySwift()) { + _objc_fatal("Can't complete future class request for '%s' " + "because the real class is too big.", + cls->nameForLogging()); + } + + class_rw_t *rw = newCls->data(); + const class_ro_t *old_ro = rw->ro(); + memcpy(newCls, cls, sizeof(objc_class)); + + // Manually set address-discriminated ptrauthed fields + // so that newCls gets the correct signatures. + newCls->setSuperclass(cls->getSuperclass()); + newCls->initIsa(cls->getIsa()); + + rw->set_ro((class_ro_t *)newCls->data()); + newCls->setData(rw); + freeIfMutable((char *)old_ro->getName()); + free((void *)old_ro); + + addRemappedClass(cls, newCls); + + replacing = cls; + cls = newCls; } - - class_rw_t *rw = newCls->data(); - const class_ro_t *old_ro = rw->ro(); - memcpy(newCls, cls, sizeof(objc_class)); - rw->set_ro((class_ro_t *)newCls->data()); - newCls->setData(rw); - freeIfMutable((char *)old_ro->name); - free((void *)old_ro); - - addRemappedClass(cls, newCls); - - replacing = cls; - cls = newCls; } if (headerIsPreoptimized && !replacing) { // class list built in shared cache // fixme strict assert doesn't work because of duplicates // ASSERT(cls == getClass(name)); - ASSERT(getClassExceptSomeSwift(mangledName)); + ASSERT(mangledName == nullptr || getClassExceptSomeSwift(mangledName)); } else { - addNamedClass(cls, mangledName, replacing); + if (mangledName) { //some Swift generic classes can lazily generate their names + addNamedClass(cls, mangledName, replacing); + } else { + Class meta = cls->ISA(); + const class_ro_t *metaRO = meta->bits.safe_ro(); + ASSERT(metaRO->getNonMetaclass() && "Metaclass with lazy name must have a pointer to the corresponding nonmetaclass."); + ASSERT(metaRO->getNonMetaclass() == cls && "Metaclass nonmetaclass pointer must equal the original class."); + } addClassTableEntry(cls); } @@ -3384,9 +3477,8 @@ readProtocol(protocol_t *newproto, Class protocol_class, } } } - else if (newproto->size >= sizeof(protocol_t)) { - // New protocol from an un-preoptimized image - // with sufficient storage. Fix it up in place. + else { + // New protocol from an un-preoptimized image. Fix it up in place. // fixme duplicate protocols from unloadable bundle newproto->initIsa(protocol_class); // fixme pinned insertFn(protocol_map, newproto->mangledName, newproto); @@ -3395,26 +3487,6 @@ readProtocol(protocol_t *newproto, Class protocol_class, newproto, newproto->nameForLogging()); } } - else { - // New protocol from an un-preoptimized image - // with insufficient storage. Reallocate it. - // fixme duplicate protocols from unloadable bundle - size_t size = max(sizeof(protocol_t), (size_t)newproto->size); - protocol_t *installedproto = (protocol_t *)calloc(size, 1); - memcpy(installedproto, newproto, newproto->size); - installedproto->size = (typeof(installedproto->size))size; - - installedproto->initIsa(protocol_class); // fixme pinned - insertFn(protocol_map, installedproto->mangledName, installedproto); - if (PrintProtocols) { - _objc_inform("PROTOCOLS: protocol at %p is %s ", - installedproto, installedproto->nameForLogging()); - _objc_inform("PROTOCOLS: protocol at %p is %s " - "(reallocated to %p)", - newproto, installedproto->nameForLogging(), - installedproto); - } - } } /*********************************************************************** @@ -3472,12 +3544,11 @@ void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int un # if TARGET_OS_OSX // Disable non-pointer isa if the app is too old // (linked before OS X 10.11) - if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_11) { + if (!dyld_program_sdk_at_least(dyld_platform_version_macOS_10_11)) { DisableNonpointerIsa = true; if (PrintRawIsa) { _objc_inform("RAW ISA: disabling non-pointer isa because " - "the app is too old (SDK version " SDK_FORMAT ")", - FORMAT_SDK(dyld_get_program_sdk_version())); + "the app is too old."); } } @@ -3755,13 +3826,13 @@ void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int un } const method_list_t *mlist; - if ((mlist = ((class_ro_t *)cls->data())->baseMethods())) { + if ((mlist = cls->bits.safe_ro()->baseMethods())) { PreoptTotalMethodLists++; if (mlist->isFixedUp()) { PreoptOptimizedMethodLists++; } } - if ((mlist=((class_ro_t *)cls->ISA()->data())->baseMethods())) { + if ((mlist = cls->ISA()->bits.safe_ro()->baseMethods())) { PreoptTotalMethodLists++; if (mlist->isFixedUp()) { PreoptOptimizedMethodLists++; @@ -3805,7 +3876,7 @@ static void schedule_class_load(Class cls) if (cls->data()->flags & RW_LOADED) return; // Ensure superclass-first ordering - schedule_class_load(cls->superclass); + schedule_class_load(cls->getSuperclass()); add_class_to_loadable_list(cls); cls->setInfo(RW_LOADED); @@ -3990,13 +4061,17 @@ _method_setImplementation(Class cls, method_t *m, IMP imp) if (!imp) return nil; IMP old = m->imp(false); + SEL sel = m->name(); + m->setImp(imp); // Cache updates are slow if cls is nil (i.e. unknown) // RR/AWZ updates are slow if cls is nil (i.e. unknown) // fixme build list of classes whose Methods are known externally? - flushCaches(cls); + flushCaches(cls, __func__, [sel, old](Class c){ + return c->cache.shouldFlush(sel, old); + }); adjustCustomFlagsForMethodChange(cls, m); @@ -4012,6 +4087,12 @@ method_setImplementation(Method m, IMP imp) return _method_setImplementation(Nil, m, imp); } +extern void _method_setImplementationRawUnsafe(Method m, IMP imp) +{ + mutex_locker_t lock(runtimeLock); + m->setImp(imp); +} + void method_exchangeImplementations(Method m1, Method m2) { @@ -4019,16 +4100,22 @@ void method_exchangeImplementations(Method m1, Method m2) mutex_locker_t lock(runtimeLock); - IMP m1_imp = m1->imp(false); - m1->setImp(m2->imp(false)); - m2->setImp(m1_imp); + IMP imp1 = m1->imp(false); + IMP imp2 = m2->imp(false); + SEL sel1 = m1->name(); + SEL sel2 = m2->name(); + + m1->setImp(imp2); + m2->setImp(imp1); // RR/AWZ updates are slow because class is unknown // Cache updates are slow because class is unknown // fixme build list of classes whose Methods are known externally? - flushCaches(nil); + flushCaches(nil, __func__, [sel1, sel2, imp1, imp2](Class c){ + return c->cache.shouldFlush(sel1, imp1) || c->cache.shouldFlush(sel2, imp2); + }); adjustCustomFlagsForMethodChange(nil, m1); adjustCustomFlagsForMethodChange(nil, m2); @@ -4395,7 +4482,8 @@ _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel, const char * protocol_t::demangledName() { - ASSERT(hasDemangledNameField()); + if (!hasDemangledNameField()) + return mangledName; if (! _demangledName) { char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/); @@ -4981,24 +5069,6 @@ objc_copyRealizedClassList_nolock(unsigned int *outCount) return result; } -static void -class_getImpCache_nolock(Class cls, cache_t &cache, objc_imp_cache_entry *buffer, int len) -{ - bucket_t *buckets = cache.buckets(); - - uintptr_t count = cache.capacity(); - uintptr_t index; - int wpos = 0; - - for (index = 0; index < count && wpos < len; index += 1) { - if (buckets[index].sel()) { - buffer[wpos].imp = buckets[index].imp(cls); - buffer[wpos].sel = buckets[index].sel(); - wpos++; - } - } -} - /*********************************************************************** * objc_getClassList * Returns pointers to all classes. @@ -5078,7 +5148,7 @@ class_copyImpCache(Class cls, int *outCount) if (count) { buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry)); - class_getImpCache_nolock(cls, cache, buffer, count); + cache.copyCacheNolock(buffer, count); } if (outCount) *outCount = count; @@ -5524,6 +5594,32 @@ copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount) return names; } +Class * +copyClassesForImage_nolock(header_info *hi, unsigned int *outCount) +{ + runtimeLock.assertLocked(); + ASSERT(hi); + + size_t count; + classref_t const *classlist = _getObjc2ClassList(hi, &count); + Class *classes = (Class *) + malloc((count+1) * sizeof(Class)); + + size_t shift = 0; + for (size_t i = 0; i < count; i++) { + Class cls = remapClass(classlist[i]); + if (cls) { + classes[i-shift] = cls; + } else { + shift++; // ignored weak-linked class + } + } + count -= shift; + classes[count] = nil; + + if (outCount) *outCount = (unsigned int)count; + return classes; +} /*********************************************************************** @@ -5563,6 +5659,29 @@ objc_copyClassNamesForImage(const char *image, unsigned int *outCount) return copyClassNamesForImage_nolock(hi, outCount); } +Class * +objc_copyClassesForImage(const char *image, unsigned int *outCount) +{ + if (!image) { + if (outCount) *outCount = 0; + return nil; + } + + mutex_locker_t lock(runtimeLock); + + // Find the image. + header_info *hi; + for (hi = FirstHeader; hi != nil; hi = hi->getNext()) { + if (0 == strcmp(image, hi->fname())) break; + } + + if (!hi) { + if (outCount) *outCount = 0; + return nil; + } + + return copyClassesForImage_nolock(hi, outCount); +} /*********************************************************************** * objc_copyClassNamesForImageHeader @@ -5631,7 +5750,7 @@ objc_class::nameForLogging() // Handle the easy case directly. if (isRealized() || isFuture()) { if (!isAnySwift()) { - return data()->ro()->name; + return data()->ro()->getName(); } auto rwe = data()->ext(); if (rwe && rwe->demangledName) { @@ -5641,11 +5760,15 @@ objc_class::nameForLogging() char *result; - const char *name = mangledName(); - char *de = copySwiftV1DemangledName(name); - if (de) result = de; - else result = strdup(name); - + if (isStubClass()) { + asprintf(&result, "", this); + } else if (const char *name = nonlazyMangledName()) { + char *de = copySwiftV1DemangledName(name); + if (de) result = de; + else result = strdup(name); + } else { + asprintf(&result, "", this); + } saveTemporaryString(result); return result; } @@ -5669,8 +5792,8 @@ objc_class::demangledName(bool needsLock) if (isRealized() || isFuture()) { // Swift metaclasses don't have the is-Swift bit. // We can't take this shortcut for them. - if (!isMetaClass() && !isAnySwift()) { - return data()->ro()->name; + if (isFuture() || (!isMetaClass() && !isAnySwift())) { + return data()->ro()->getName(); } auto rwe = data()->ext(); if (rwe && rwe->demangledName) { @@ -5798,8 +5921,9 @@ class_setVersion(Class cls, int version) /*********************************************************************** * search_method_list_inline **********************************************************************/ +template ALWAYS_INLINE static method_t * -findMethodInSortedMethodList(SEL key, const method_list_t *list) +findMethodInSortedMethodList(SEL key, const method_list_t *list, const getNameFunc &getName) { ASSERT(list); @@ -5813,13 +5937,13 @@ findMethodInSortedMethodList(SEL key, const method_list_t *list) for (count = list->count; count != 0; count >>= 1) { probe = base + (count >> 1); - uintptr_t probeValue = (uintptr_t)probe->name(); + uintptr_t probeValue = (uintptr_t)getName(probe); if (keyValue == probeValue) { // `probe` is a match. // Rewind looking for the *first* occurrence of this value. // This is required for correct category overrides. - while (probe > first && keyValue == (uintptr_t)(probe - 1)->name()) { + while (probe > first && keyValue == (uintptr_t)getName((probe - 1))) { probe--; } return &*probe; @@ -5834,6 +5958,44 @@ findMethodInSortedMethodList(SEL key, const method_list_t *list) return nil; } +ALWAYS_INLINE static method_t * +findMethodInSortedMethodList(SEL key, const method_list_t *list) +{ + if (list->isSmallList()) { + if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) { + return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); }); + } else { + return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); }); + } + } else { + return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.big().name; }); + } +} + +template +ALWAYS_INLINE static method_t * +findMethodInUnsortedMethodList(SEL sel, const method_list_t *list, const getNameFunc &getName) +{ + for (auto& meth : *list) { + if (getName(meth) == sel) return &meth; + } + return nil; +} + +ALWAYS_INLINE static method_t * +findMethodInUnsortedMethodList(SEL key, const method_list_t *list) +{ + if (list->isSmallList()) { + if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) { + return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); }); + } else { + return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); }); + } + } else { + return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.big().name; }); + } +} + ALWAYS_INLINE static method_t * search_method_list_inline(const method_list_t *mlist, SEL sel) { @@ -5844,9 +6006,8 @@ search_method_list_inline(const method_list_t *mlist, SEL sel) return findMethodInSortedMethodList(sel, mlist); } else { // Linear search of unsorted method list - for (auto& meth : *mlist) { - if (meth.name() == sel) return &meth; - } + if (auto *m = findMethodInUnsortedMethodList(sel, mlist)) + return m; } #if DEBUG @@ -5889,11 +6050,9 @@ method_lists_contains_any(T *mlists, T *end, } } } else { - for (auto& meth : *mlist) { - for (size_t i = 0; i < selcount; i++) { - if (meth.name() == sels[i]) { - return true; - } + for (size_t i = 0; i < selcount; i++) { + if (findMethodInUnsortedMethodList(sels[i], mlist)) { + return true; } } } @@ -5901,6 +6060,7 @@ method_lists_contains_any(T *mlists, T *end, return false; } + /*********************************************************************** * getMethodNoSuper_nolock * fixme @@ -5951,7 +6111,7 @@ getMethod_nolock(Class cls, SEL sel) ASSERT(cls->isRealized()); while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) { - cls = cls->superclass; + cls = cls->getSuperclass(); } return m; @@ -6006,7 +6166,7 @@ static void resolveClassMethod(id inst, SEL sel, Class cls) ASSERT(cls->isRealized()); ASSERT(cls->isMetaClass()); - if (!lookUpImpOrNil(inst, @selector(resolveClassMethod:), cls)) { + if (!lookUpImpOrNilTryCache(inst, @selector(resolveClassMethod:), cls)) { // Resolver not implemented. return; } @@ -6026,7 +6186,7 @@ static void resolveClassMethod(id inst, SEL sel, Class cls) // Cache the result (good or bad) so the resolver doesn't fire next time. // +resolveClassMethod adds to self->ISA() a.k.a. cls - IMP imp = lookUpImpOrNil(inst, sel, cls); + IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); if (resolved && PrintResolving) { if (imp) { @@ -6059,7 +6219,7 @@ static void resolveInstanceMethod(id inst, SEL sel, Class cls) ASSERT(cls->isRealized()); SEL resolve_sel = @selector(resolveInstanceMethod:); - if (!lookUpImpOrNil(cls, resolve_sel, cls->ISA())) { + if (!lookUpImpOrNilTryCache(cls, resolve_sel, cls->ISA(/*authenticated*/true))) { // Resolver not implemented. return; } @@ -6069,7 +6229,7 @@ static void resolveInstanceMethod(id inst, SEL sel, Class cls) // Cache the result (good or bad) so the resolver doesn't fire next time. // +resolveInstanceMethod adds to self a.k.a. cls - IMP imp = lookUpImpOrNil(inst, sel, cls); + IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); if (resolved && PrintResolving) { if (imp) { @@ -6113,14 +6273,14 @@ resolveMethod_locked(id inst, SEL sel, Class cls, int behavior) // try [nonMetaClass resolveClassMethod:sel] // and [cls resolveInstanceMethod:sel] resolveClassMethod(inst, sel, cls); - if (!lookUpImpOrNil(inst, sel, cls)) { + if (!lookUpImpOrNilTryCache(inst, sel, cls)) { resolveInstanceMethod(inst, sel, cls); } } // chances are that calling the resolver have populated the cache // so attempt using it - return lookUpImpOrForward(inst, sel, cls, behavior | LOOKUP_CACHE); + return lookUpImpOrForwardTryCache(inst, sel, cls, behavior); } @@ -6142,22 +6302,94 @@ log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer) if (!cacheIt) return; } #endif - cache_fill(cls, sel, imp, receiver); + cls->cache.insert(sel, imp, receiver); } /*********************************************************************** -* lookUpImpOrForward. -* The standard IMP lookup. +* realizeAndInitializeIfNeeded_locked +* Realize the given class if not already realized, and initialize it if +* not already initialized. +* inst is an instance of cls or a subclass, or nil if none is known. +* cls is the class to initialize and realize. +* initializer is true to initialize the class, false to skip initialization. +**********************************************************************/ +static Class +realizeAndInitializeIfNeeded_locked(id inst, Class cls, bool initialize) +{ + runtimeLock.assertLocked(); + if (slowpath(!cls->isRealized())) { + cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock); + // runtimeLock may have been dropped but is now locked again + } + + if (slowpath(initialize && !cls->isInitialized())) { + cls = initializeAndLeaveLocked(cls, inst, runtimeLock); + // runtimeLock may have been dropped but is now locked again + + // If sel == initialize, class_initialize will send +initialize and + // then the messenger will send +initialize again after this + // procedure finishes. Of course, if this is not being called + // from the messenger then it won't happen. 2778172 + } + return cls; +} + +/*********************************************************************** +* lookUpImpOrForward / lookUpImpOrForwardTryCache / lookUpImpOrNilTryCache +* The standard IMP lookup. +* +* The TryCache variant attempts a fast-path lookup in the IMP Cache. +* Most callers should use lookUpImpOrForwardTryCache with LOOKUP_INITIALIZE +* * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails) -* Without LOOKUP_CACHE: skips optimistic unlocked lookup (but uses cache elsewhere) -* Most callers should use LOOKUP_INITIALIZE and LOOKUP_CACHE -* inst is an instance of cls or a subclass thereof, or nil if none is known. +* With LOOKUP_NIL: returns nil on negative cache hits +* +* inst is an instance of cls or a subclass thereof, or nil if none is known. * If cls is an un-initialized metaclass then a non-nil inst is faster. * May return _objc_msgForward_impcache. IMPs destined for external use * must be converted to _objc_msgForward or _objc_msgForward_stret. * If you don't want forwarding at all, use LOOKUP_NIL. **********************************************************************/ +ALWAYS_INLINE +static IMP _lookUpImpTryCache(id inst, SEL sel, Class cls, int behavior) +{ + runtimeLock.assertUnlocked(); + + if (slowpath(!cls->isInitialized())) { + // see comment in lookUpImpOrForward + return lookUpImpOrForward(inst, sel, cls, behavior); + } + + IMP imp = cache_getImp(cls, sel); + if (imp != NULL) goto done; +#if CONFIG_USE_PREOPT_CACHES + if (fastpath(cls->cache.isConstantOptimizedCache(/* strict */true))) { + imp = cache_getImp(cls->cache.preoptFallbackClass(), sel); + } +#endif + if (slowpath(imp == NULL)) { + return lookUpImpOrForward(inst, sel, cls, behavior); + } + +done: + if ((behavior & LOOKUP_NIL) && imp == (IMP)_objc_msgForward_impcache) { + return nil; + } + return imp; +} + +IMP lookUpImpOrForwardTryCache(id inst, SEL sel, Class cls, int behavior) +{ + return _lookUpImpTryCache(inst, sel, cls, behavior); +} + +IMP lookUpImpOrNilTryCache(id inst, SEL sel, Class cls, int behavior) +{ + return _lookUpImpTryCache(inst, sel, cls, behavior | LOOKUP_NIL); +} + +NEVER_INLINE IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) { const IMP forward_imp = (IMP)_objc_msgForward_impcache; @@ -6166,10 +6398,21 @@ IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) runtimeLock.assertUnlocked(); - // Optimistic cache lookup - if (fastpath(behavior & LOOKUP_CACHE)) { - imp = cache_getImp(cls, sel); - if (imp) goto done_nolock; + if (slowpath(!cls->isInitialized())) { + // The first message sent to a class is often +new or +alloc, or +self + // which goes through objc_opt_* or various optimized entry points. + // + // However, the class isn't realized/initialized yet at this point, + // and the optimized entry points fall down through objc_msgSend, + // which ends up here. + // + // We really want to avoid caching these, as it can cause IMP caches + // to be made with a single entry forever. + // + // Note that this check is racy as several threads might try to + // message a given class for the first time at the same time, + // in which case we might cache anyway. + behavior |= LOOKUP_NOCACHE; } // runtimeLock is held during isRealized and isInitialized checking @@ -6191,25 +6434,12 @@ IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair. checkIsKnownClass(cls); - if (slowpath(!cls->isRealized())) { - cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock); - // runtimeLock may have been dropped but is now locked again - } - - if (slowpath((behavior & LOOKUP_INITIALIZE) && !cls->isInitialized())) { - cls = initializeAndLeaveLocked(cls, inst, runtimeLock); - // runtimeLock may have been dropped but is now locked again - - // If sel == initialize, class_initialize will send +initialize and - // then the messenger will send +initialize again after this - // procedure finishes. Of course, if this is not being called - // from the messenger then it won't happen. 2778172 - } - + cls = realizeAndInitializeIfNeeded_locked(inst, cls, behavior & LOOKUP_INITIALIZE); + // runtimeLock may have been dropped but is now locked again runtimeLock.assertLocked(); curClass = cls; - // The code used to lookpu the class's cache again right after + // The code used to lookup the class's cache again right after // we take the lock but for the vast majority of the cases // evidence shows this is a miss most of the time, hence a time loss. // @@ -6217,18 +6447,26 @@ IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) // kind of cache lookup is class_getInstanceMethod(). for (unsigned attempts = unreasonableClassCount();;) { - // curClass method list. - Method meth = getMethodNoSuper_nolock(curClass, sel); - if (meth) { - imp = meth->imp(false); - goto done; - } + if (curClass->cache.isConstantOptimizedCache(/* strict */true)) { +#if CONFIG_USE_PREOPT_CACHES + imp = cache_getImp(curClass, sel); + if (imp) goto done_unlock; + curClass = curClass->cache.preoptFallbackClass(); +#endif + } else { + // curClass method list. + Method meth = getMethodNoSuper_nolock(curClass, sel); + if (meth) { + imp = meth->imp(false); + goto done; + } - if (slowpath((curClass = curClass->superclass) == nil)) { - // No implementation found, and method resolver didn't help. - // Use forwarding. - imp = forward_imp; - break; + if (slowpath((curClass = curClass->getSuperclass()) == nil)) { + // No implementation found, and method resolver didn't help. + // Use forwarding. + imp = forward_imp; + break; + } } // Halt if there is a cycle in the superclass chain. @@ -6258,9 +6496,16 @@ IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) } done: - log_and_fill_cache(cls, imp, sel, inst, curClass); + if (fastpath((behavior & LOOKUP_NOCACHE) == 0)) { +#if CONFIG_USE_PREOPT_CACHES + while (cls->cache.isConstantOptimizedCache(/* strict */true)) { + cls = cls->cache.preoptFallbackClass(); + } +#endif + log_and_fill_cache(cls, imp, sel, inst, curClass); + } + done_unlock: runtimeLock.unlock(); - done_nolock: if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) { return nil; } @@ -6274,7 +6519,6 @@ IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) **********************************************************************/ IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel) { - Method meth; IMP imp; // fixme this is incomplete - no resolver, +initialize - @@ -6282,24 +6526,35 @@ IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel) ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct); // Search cache first. - imp = cache_getImp(cls, sel); - if (imp) return imp; + // + // If the cache used for the lookup is preoptimized, + // we ask for `_objc_msgForward_impcache` to be returned on cache misses, + // so that there's no TOCTOU race between using `isConstantOptimizedCache` + // and calling cache_getImp() when not under the runtime lock. + // + // For dynamic caches, a miss will return `nil` + imp = cache_getImp(cls, sel, _objc_msgForward_impcache); - // Cache miss. Search method list. + if (slowpath(imp == nil)) { + // Cache miss. Search method list. - mutex_locker_t lock(runtimeLock); + mutex_locker_t lock(runtimeLock); - meth = getMethodNoSuper_nolock(cls, sel); + if (auto meth = getMethodNoSuper_nolock(cls, sel)) { + // Hit in method list. Cache it. + imp = meth->imp(false); + } else { + imp = _objc_msgForward_impcache; + } - if (meth) { - // Hit in method list. Cache it. - cache_fill(cls, sel, meth->imp(false), nil); - return meth->imp(false); - } else { - // Miss in method list. Cache objc_msgForward. - cache_fill(cls, sel, _objc_msgForward_impcache, nil); - return _objc_msgForward_impcache; + // Note, because we do not hold the runtime lock above + // isConstantOptimizedCache might flip, so we need to double check + if (!cls->cache.isConstantOptimizedCache(true /* strict */)) { + cls->cache.insert(sel, imp, nil); + } } + + return imp; } @@ -6318,7 +6573,7 @@ objc_property_t class_getProperty(Class cls, const char *name) ASSERT(cls->isRealized()); - for ( ; cls; cls = cls->superclass) { + for ( ; cls; cls = cls->getSuperclass()) { for (auto& prop : cls->data()->properties()) { if (0 == strcmp(name, prop.name)) { return (objc_property_t)∝ @@ -6376,6 +6631,15 @@ objc_class::setInitialized() objc::RRScanner::scanInitializedClass(cls, metacls); objc::CoreScanner::scanInitializedClass(cls, metacls); +#if CONFIG_USE_PREOPT_CACHES + cls->cache.maybeConvertToPreoptimized(); + metacls->cache.maybeConvertToPreoptimized(); +#endif + + if (PrintInitializing) { + _objc_inform("INITIALIZE: thread %p: setInitialized(%s)", + objc_thread_self(), cls->nameForLogging()); + } // Update the +initialize flags. // Do this last. metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING); @@ -6414,6 +6678,59 @@ void objc_class::setInstancesRequireRawIsaRecursively(bool inherited) }); } +#if CONFIG_USE_PREOPT_CACHES +void objc_class::setDisallowPreoptCachesRecursively(const char *why) +{ + Class cls = (Class)this; + runtimeLock.assertLocked(); + + if (!allowsPreoptCaches()) return; + + foreach_realized_class_and_subclass(cls, [=](Class c){ + if (!c->allowsPreoptCaches()) { + return false; + } + + if (c->cache.isConstantOptimizedCache(/* strict */true)) { + c->cache.eraseNolock(why); + } else { + if (PrintCaches) { + _objc_inform("CACHES: %sclass %s: disallow preopt cache (from %s)", + isMetaClass() ? "meta" : "", + nameForLogging(), why); + } + c->setDisallowPreoptCaches(); + } + return true; + }); +} + +void objc_class::setDisallowPreoptInlinedSelsRecursively(const char *why) +{ + Class cls = (Class)this; + runtimeLock.assertLocked(); + + if (!allowsPreoptInlinedSels()) return; + + foreach_realized_class_and_subclass(cls, [=](Class c){ + if (!c->allowsPreoptInlinedSels()) { + return false; + } + + if (PrintCaches) { + _objc_inform("CACHES: %sclass %s: disallow sel-inlined preopt cache (from %s)", + isMetaClass() ? "meta" : "", + nameForLogging(), why); + } + + c->setDisallowPreoptInlinedSels(); + if (c->cache.isConstantOptimizedCacheWithInlinedSels()) { + c->cache.eraseNolock(why); + } + return true; + }); +} +#endif /*********************************************************************** * Choose a class index. @@ -6439,6 +6756,62 @@ void objc_class::chooseClassArrayIndex() #endif } +static const char *empty_lazyClassNamer(Class cls __unused) { + return nullptr; +} + +static ChainedHookFunction LazyClassNamerHook{empty_lazyClassNamer}; + +void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue, + _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue) { + LazyClassNamerHook.set(newValue, oldOutValue); +} + +const char * objc_class::installMangledNameForLazilyNamedClass() { + auto lazyClassNamer = LazyClassNamerHook.get(); + if (!*lazyClassNamer) { + _objc_fatal("Lazily named class %p with no lazy name handler registered", this); + } + + // If this is called on a metaclass, extract the original class + // and make it do the installation instead. It will install + // the metaclass's name too. + if (isMetaClass()) { + Class nonMeta = bits.safe_ro()->getNonMetaclass(); + return nonMeta->installMangledNameForLazilyNamedClass(); + } + + Class cls = (Class)this; + Class metaclass = ISA(); + + const char *name = lazyClassNamer((Class)this); + if (!name) { + _objc_fatal("Lazily named class %p wasn't named by lazy name handler", this); + } + + // Emplace the name into the class_ro_t. If we lose the race, + // then we'll free our name and use whatever got placed there + // instead of our name. + const char *previously = NULL; + class_ro_t *ro = (class_ro_t *)cls->bits.safe_ro(); + bool wonRace = ro->name.compare_exchange_strong(previously, name, std::memory_order_release, std::memory_order_acquire); + if (!wonRace) { + free((void *)name); + name = previously; + } + + // Emplace whatever name won the race in the metaclass too. + class_ro_t *metaRO = (class_ro_t *)metaclass->bits.safe_ro(); + + // Write our pointer if the current value is NULL. There's no + // need to loop or check success, since the only way this can + // fail is if another thread succeeded in writing the exact + // same pointer. + const char *expected = NULL; + metaRO->name.compare_exchange_strong(expected, name, std::memory_order_release, std::memory_order_acquire); + + return name; +} /*********************************************************************** * Update custom RR and AWZ when a method changes its IMP @@ -6461,7 +6834,7 @@ adjustCustomFlagsForMethodChange(Class cls, method_t *meth) const uint8_t * class_getIvarLayout(Class cls) { - if (cls) return cls->data()->ro()->ivarLayout; + if (cls) return cls->data()->ro()->getIvarLayout(); else return nil; } @@ -6494,6 +6867,8 @@ class_setIvarLayout(Class cls, const uint8_t *layout) { if (!cls) return; + ASSERT(!cls->isMetaClass()); + mutex_locker_t lock(runtimeLock); checkIsKnownClass(cls); @@ -6509,7 +6884,7 @@ class_setIvarLayout(Class cls, const uint8_t *layout) class_ro_t *ro_w = make_ro_writeable(cls->data()); - try_free(ro_w->ivarLayout); + try_free(ro_w->getIvarLayout()); ro_w->ivarLayout = ustrdupMaybeNil(layout); } @@ -6583,7 +6958,7 @@ Class _class_getClassForIvar(Class cls, Ivar ivar) { mutex_locker_t lock(runtimeLock); - for ( ; cls; cls = cls->superclass) { + for ( ; cls; cls = cls->getSuperclass()) { if (auto ivars = cls->data()->ro()->ivars) { if (ivars->containsIvar(ivar)) { return cls; @@ -6605,7 +6980,7 @@ _class_getVariable(Class cls, const char *name) { mutex_locker_t lock(runtimeLock); - for ( ; cls; cls = cls->superclass) { + for ( ; cls; cls = cls->getSuperclass()) { ivar_t *ivar = getIvar(cls, name); if (ivar) { return ivar; @@ -6644,6 +7019,29 @@ BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen) return NO; } +static void +addMethods_finish(Class cls, method_list_t *newlist) +{ + auto rwe = cls->data()->extAllocIfNeeded(); + + if (newlist->count > 1) { + method_t::SortBySELAddress sorter; + std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter); + } + + prepareMethodLists(cls, &newlist, 1, NO, NO, __func__); + rwe->methods.attachLists(&newlist, 1); + + // If the class being modified has a constant cache, + // then all children classes are flattened constant caches + // and need to be flushed as well. + flushCaches(cls, __func__, [](Class c){ + // constant caches have been dealt with in prepareMethodLists + // if the class still is constant here, it's fine to keep + return !c->cache.isConstantOptimizedCache(); + }); +} + /********************************************************************** * addMethod @@ -6671,8 +7069,6 @@ addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace) result = _method_setImplementation(cls, m, imp); } } else { - auto rwe = cls->data()->extAllocIfNeeded(); - // fixme optimize method_list_t *newlist; newlist = (method_list_t *)calloc(method_list_t::byteSize(method_t::bigSize, 1), 1); @@ -6684,10 +7080,7 @@ addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace) first.types = strdupIfMutable(types); first.imp = imp; - prepareMethodLists(cls, &newlist, 1, NO, NO); - rwe->methods.attachLists(&newlist, 1); - flushCaches(cls); - + addMethods_finish(cls, newlist); result = nil; } @@ -6750,17 +7143,9 @@ addMethods(Class cls, const SEL *names, const IMP *imps, const char **types, } if (newlist->count > 0) { - auto rwe = cls->data()->extAllocIfNeeded(); - // fixme resize newlist because it may have been over-allocated above. // Note that realloc() alone doesn't work due to ptrauth. - - method_t::SortBySELAddress sorter; - std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter); - - prepareMethodLists(cls, &newlist, 1, NO, NO); - rwe->methods.attachLists(&newlist, 1); - flushCaches(cls); + addMethods_finish(cls, newlist); } else { // Attaching the method list to the class consumes it. If we don't // do that, we have to free the memory ourselves. @@ -7096,7 +7481,7 @@ objc_duplicateClass(Class original, const char *name, duplicate = alloc_class_for_subclass(original, extraBytes); duplicate->initClassIsa(original->ISA()); - duplicate->superclass = original->superclass; + duplicate->setSuperclass(original->getSuperclass()); duplicate->cache.initializeToEmpty(); @@ -7128,8 +7513,8 @@ objc_duplicateClass(Class original, const char *name, duplicate->chooseClassArrayIndex(); - if (duplicate->superclass) { - addSubclass(duplicate->superclass, duplicate); + if (duplicate->getSuperclass()) { + addSubclass(duplicate->getSuperclass(), duplicate); // duplicate->isa == original->isa so don't addSubclass() for it } else { addRootClass(duplicate); @@ -7137,7 +7522,7 @@ objc_duplicateClass(Class original, const char *name, // Don't methodize class - construction above is correct - addNamedClass(duplicate, ro->name); + addNamedClass(duplicate, ro->getName()); addClassTableEntry(duplicate, /*addMeta=*/false); if (PrintConnecting) { @@ -7198,8 +7583,8 @@ static void objc_initializeClassPair_internal(Class superclass, const char *name meta->setInstanceSize(meta_ro_w->instanceStart); } - cls_ro_w->name = strdupIfMutable(name); - meta_ro_w->name = strdupIfMutable(name); + cls_ro_w->name.store(strdupIfMutable(name), std::memory_order_release); + meta_ro_w->name.store(strdupIfMutable(name), std::memory_order_release); cls_ro_w->ivarLayout = &UnsetLayout; cls_ro_w->weakIvarLayout = &UnsetLayout; @@ -7222,14 +7607,14 @@ static void objc_initializeClassPair_internal(Class superclass, const char *name if (superclass) { meta->initClassIsa(superclass->ISA()->ISA()); - cls->superclass = superclass; - meta->superclass = superclass->ISA(); + cls->setSuperclass(superclass); + meta->setSuperclass(superclass->ISA()); addSubclass(superclass, cls); addSubclass(superclass->ISA(), meta); } else { meta->initClassIsa(meta); - cls->superclass = Nil; - meta->superclass = cls; + cls->setSuperclass(Nil); + meta->setSuperclass(cls); addRootClass(cls); addSubclass(cls, meta); } @@ -7336,7 +7721,7 @@ void objc_registerClassPair(Class cls) (cls->ISA()->data()->flags & RW_CONSTRUCTED)) { _objc_inform("objc_registerClassPair: class '%s' was already " - "registered!", cls->data()->ro()->name); + "registered!", cls->data()->ro()->getName()); return; } @@ -7345,7 +7730,7 @@ void objc_registerClassPair(Class cls) { _objc_inform("objc_registerClassPair: class '%s' was not " "allocated with objc_allocateClassPair!", - cls->data()->ro()->name); + cls->data()->ro()->getName()); return; } @@ -7354,7 +7739,7 @@ void objc_registerClassPair(Class cls) cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING); // Add to named class table. - addNamedClass(cls, cls->data()->ro()->name); + addNamedClass(cls, cls->data()->ro()->getName()); } @@ -7377,7 +7762,7 @@ Class objc_readClassPair(Class bits, const struct objc_image_info *info) // Fail if the superclass isn't kosher. bool rootOK = bits->data()->flags & RO_ROOT; - if (!verifySuperclass(bits->superclass, rootOK)){ + if (!verifySuperclass(bits->getSuperclass(), rootOK)){ return nil; } @@ -7416,7 +7801,7 @@ static void detach_class(Class cls, bool isMeta) // superclass's subclass list if (cls->isRealized()) { - Class supercls = cls->superclass; + Class supercls = cls->getSuperclass(); if (supercls) { removeSubclass(supercls, cls); } else { @@ -7448,7 +7833,7 @@ static void free_class(Class cls) auto rwe = rw->ext(); auto ro = rw->ro(); - cache_delete(cls); + cls->cache.destroy(); if (rwe) { for (auto& meth : rwe->methods) { @@ -7477,9 +7862,9 @@ static void free_class(Class cls) rwe->protocols.tryFree(); } - try_free(ro->ivarLayout); + try_free(ro->getIvarLayout()); try_free(ro->weakIvarLayout); - try_free(ro->name); + try_free(ro->getName()); try_free(ro); objc::zfree(rwe); objc::zfree(rw); @@ -7500,25 +7885,25 @@ void objc_disposeClassPair(Class cls) // disposing still-unregistered class is OK! _objc_inform("objc_disposeClassPair: class '%s' was not " "allocated with objc_allocateClassPair!", - cls->data()->ro()->name); + cls->data()->ro()->getName()); return; } if (cls->isMetaClass()) { _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, " - "not a class!", cls->data()->ro()->name); + "not a class!", cls->data()->ro()->getName()); return; } // Shouldn't have any live subclasses. if (cls->data()->firstSubclass) { _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, " - "including '%s'!", cls->data()->ro()->name, + "including '%s'!", cls->data()->ro()->getName(), cls->data()->firstSubclass->nameForLogging()); } if (cls->ISA()->data()->firstSubclass) { _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, " - "including '%s'!", cls->data()->ro()->name, + "including '%s'!", cls->data()->ro()->getName(), cls->ISA()->data()->firstSubclass->nameForLogging()); } @@ -7661,12 +8046,11 @@ class_createInstances(Class cls, size_t extraBytes, static id _object_copyFromZone(id oldObj, size_t extraBytes, void *zone) { - if (!oldObj) return nil; - if (oldObj->isTaggedPointer()) return oldObj; + if (oldObj->isTaggedPointerOrNil()) return oldObj; // fixme this doesn't handle C++ ivars correctly (#4619414) - Class cls = oldObj->ISA(); + Class cls = oldObj->ISA(/*authenticated*/true); size_t size; id obj = _class_createInstanceFromZone(cls, extraBytes, zone, OBJECT_CONSTRUCT_NONE, false, &size); @@ -7741,7 +8125,7 @@ void *objc_destructInstance(id obj) // This order is important. if (cxx) object_cxxDestruct(obj); - if (assoc) _object_remove_assocations(obj); + if (assoc) _object_remove_assocations(obj, /*deallocating*/true); obj->clearDeallocating(); } @@ -7826,6 +8210,8 @@ unsigned objc_debug_taggedpointer_ext_payload_lshift = 0; unsigned objc_debug_taggedpointer_ext_payload_rshift = 0; Class objc_debug_taggedpointer_ext_classes[1] = { nil }; +uintptr_t objc_debug_constant_cfstring_tag_bits = 0; + static void disableTaggedPointers() { } @@ -7853,6 +8239,13 @@ unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LS unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT; // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s +#if OBJC_SPLIT_TAGGED_POINTERS +uint8_t objc_debug_tag60_permutations[8] = { 0, 1, 2, 3, 4, 5, 6, 7 }; +uintptr_t objc_debug_constant_cfstring_tag_bits = _OBJC_TAG_EXT_MASK | ((uintptr_t)(OBJC_TAG_Constant_CFString - OBJC_TAG_First52BitPayload) << _OBJC_TAG_EXT_SLOT_SHIFT); +#else +uintptr_t objc_debug_constant_cfstring_tag_bits = 0; +#endif + static void disableTaggedPointers() { @@ -7875,15 +8268,21 @@ disableTaggedPointers() static Class * classSlotForBasicTagIndex(objc_tag_index_t tag) { +#if OBJC_SPLIT_TAGGED_POINTERS + uintptr_t obfuscatedTag = _objc_basicTagToObfuscatedTag(tag); + return &objc_tag_classes[obfuscatedTag]; +#else uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK); uintptr_t obfuscatedTag = tag ^ tagObfuscator; + // Array index in objc_tag_classes includes the tagged bit itself -#if SUPPORT_MSB_TAGGED_POINTERS +# if SUPPORT_MSB_TAGGED_POINTERS return &objc_tag_classes[0x8 | obfuscatedTag]; -#else +# else return &objc_tag_classes[(obfuscatedTag << 1) | 1]; +# endif #endif } @@ -7899,6 +8298,10 @@ classSlotForTagIndex(objc_tag_index_t tag) if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) { int index = tag - OBJC_TAG_First52BitPayload; +#if OBJC_SPLIT_TAGGED_POINTERS + if (tag >= OBJC_TAG_FirstUnobfuscatedSplitTag) + return &objc_tag_ext_classes[index]; +#endif uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator >> _OBJC_TAG_EXT_INDEX_SHIFT) & _OBJC_TAG_EXT_INDEX_MASK); @@ -7922,16 +8325,28 @@ classSlotForTagIndex(objc_tag_index_t tag) static void initializeTaggedPointerObfuscator(void) { - if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) || - // Set the obfuscator to zero for apps linked against older SDKs, - // in case they're relying on the tagged pointer representation. - DisableTaggedPointerObfuscation) { - objc_debug_taggedpointer_obfuscator = 0; - } else { + if (!DisableTaggedPointerObfuscation && dyld_program_sdk_at_least(dyld_fall_2018_os_versions)) { // Pull random data into the variable, then shift away all non-payload bits. arc4random_buf(&objc_debug_taggedpointer_obfuscator, sizeof(objc_debug_taggedpointer_obfuscator)); objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK; + +#if OBJC_SPLIT_TAGGED_POINTERS + // The obfuscator doesn't apply to any of the extended tag mask or the no-obfuscation bit. + objc_debug_taggedpointer_obfuscator &= ~(_OBJC_TAG_EXT_MASK | _OBJC_TAG_NO_OBFUSCATION_MASK); + + // Shuffle the first seven entries of the tag permutator. + int max = 7; + for (int i = max - 1; i >= 0; i--) { + int target = arc4random_uniform(i + 1); + swap(objc_debug_tag60_permutations[i], + objc_debug_tag60_permutations[target]); + } +#endif + } else { + // Set the obfuscator to zero for apps linked against older SDKs, + // in case they're relying on the tagged pointer representation. + objc_debug_taggedpointer_obfuscator = 0; } } @@ -8079,19 +8494,19 @@ static Class setSuperclass(Class cls, Class newSuper) ASSERT(cls->isRealized()); ASSERT(newSuper->isRealized()); - oldSuper = cls->superclass; + oldSuper = cls->getSuperclass(); removeSubclass(oldSuper, cls); removeSubclass(oldSuper->ISA(), cls->ISA()); - cls->superclass = newSuper; - cls->ISA()->superclass = newSuper->ISA(); + cls->setSuperclass(newSuper); + cls->ISA()->setSuperclass(newSuper->ISA(/*authenticated*/true)); addSubclass(newSuper, cls); addSubclass(newSuper->ISA(), cls->ISA()); // Flush subclass's method caches. - flushCaches(cls); - flushCaches(cls->ISA()); - + flushCaches(cls, __func__, [](Class c){ return true; }); + flushCaches(cls->ISA(), __func__, [](Class c){ return true; }); + return oldSuper; } diff --git a/runtime/objc-runtime.mm b/runtime/objc-runtime.mm index 08a1b77..e38b274 100644 --- a/runtime/objc-runtime.mm +++ b/runtime/objc-runtime.mm @@ -33,6 +33,7 @@ * Imports. **********************************************************************/ +#include // os_feature_enabled_simple() #include "objc-private.h" #include "objc-loadmethod.h" #include "objc-file.h" @@ -87,6 +88,9 @@ const option_t Settings[] = { #undef OPTION }; +namespace objc { + int PageCountWarning = 50; // Default value if the environment variable is not set +} // objc's key for pthread_getspecific #if SUPPORT_DIRECT_THREAD_KEYS @@ -338,6 +342,22 @@ void removeHeader(header_info *hi) #endif } +/*********************************************************************** +* SetPageCountWarning +* Convert environment variable value to integer value. +* If the value is valid, set the global PageCountWarning value. +**********************************************************************/ +void SetPageCountWarning(const char* envvar) { + if (envvar) { + long result = strtol(envvar, NULL, 10); + if (result <= INT_MAX && result >= -1) { + int32_t var = (int32_t)result; + if (var != 0) { // 0 is not a valid value for the env var + objc::PageCountWarning = var; + } + } + } +} /*********************************************************************** * environ_init @@ -352,6 +372,13 @@ void environ_init(void) return; } + // Turn off autorelease LRU coalescing by default for apps linked against + // older SDKs. LRU coalescing can reorder releases and certain older apps + // are accidentally relying on the ordering. + // rdar://problem/63886091 + if (!dyld_program_sdk_at_least(dyld_fall_2020_os_versions)) + DisableAutoreleaseCoalescingLRU = true; + bool PrintHelp = false; bool PrintOptions = false; bool maybeMallocDebugging = false; @@ -376,6 +403,11 @@ void environ_init(void) continue; } + if (0 == strncmp(*p, "OBJC_DEBUG_POOL_DEPTH=", 22)) { + SetPageCountWarning(*p + 22); + continue; + } + const char *value = strchr(*p, '='); if (!*value) continue; value++; @@ -388,10 +420,10 @@ void environ_init(void) *opt->var = (0 == strcmp(value, "YES")); break; } - } + } } - // Special case: enable some autorelease pool debugging + // Special case: enable some autorelease pool debugging // when some malloc debugging is enabled // and OBJC_DEBUG_POOL_ALLOCATION is not set to something other than NO. if (maybeMallocDebugging) { @@ -409,6 +441,10 @@ void environ_init(void) } } + if (!os_feature_enabled_simple(objc4, preoptimizedCaches, true)) { + DisablePreoptCaches = true; + } + // Print OBJC_HELP and OBJC_PRINT_OPTIONS output. if (PrintHelp || PrintOptions) { if (PrintHelp) { @@ -649,31 +685,25 @@ objc_getAssociatedObject(id object, const void *key) return _object_get_associative_reference(object, key); } -static void -_base_objc_setAssociatedObject(id object, const void *key, id value, objc_AssociationPolicy policy) -{ - _object_set_associative_reference(object, key, value, policy); -} - -static ChainedHookFunction SetAssocHook{_base_objc_setAssociatedObject}; +typedef void (*objc_hook_setAssociatedObject)(id _Nonnull object, const void * _Nonnull key, + id _Nullable value, objc_AssociationPolicy policy); void objc_setHook_setAssociatedObject(objc_hook_setAssociatedObject _Nonnull newValue, objc_hook_setAssociatedObject _Nullable * _Nonnull outOldValue) { - SetAssocHook.set(newValue, outOldValue); + // See objc_object::setHasAssociatedObjects() for a replacement } void objc_setAssociatedObject(id object, const void *key, id value, objc_AssociationPolicy policy) { - SetAssocHook.get()(object, key, value, policy); + _object_set_associative_reference(object, key, value, policy); } - void objc_removeAssociatedObjects(id object) { if (object && object->hasAssociatedObjects()) { - _object_remove_assocations(object); + _object_remove_assocations(object, /*deallocating*/false); } } diff --git a/runtime/objc-sel-set.mm b/runtime/objc-sel-set.mm index ab21b00..0fcf6f6 100644 --- a/runtime/objc-sel-set.mm +++ b/runtime/objc-sel-set.mm @@ -120,7 +120,7 @@ struct __objc_sel_set *__objc_sel_set_create(size_t selrefs) { sset->_count = 0; // heuristic to convert executable's selrefs count to table size -#if TARGET_OS_IPHONE && !TARGET_OS_IOSMAC +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST for (idx = 0; __objc_sel_set_capacities[idx] < selrefs; idx++); if (idx > 0 && selrefs < 1536) idx--; #else diff --git a/runtime/objc-sel-table.s b/runtime/objc-sel-table.s index 6d9710d..3fb517a 100644 --- a/runtime/objc-sel-table.s +++ b/runtime/objc-sel-table.s @@ -2,7 +2,12 @@ #include #if __LP64__ +#if __arm64e__ +// 0x6AE1 +# define PTR(x) .quad x@AUTH(da, 27361, addr) +#else # define PTR(x) .quad x +#endif #else # define PTR(x) .long x #endif diff --git a/runtime/objc-sel.mm b/runtime/objc-sel.mm index da4c228..a8623d8 100644 --- a/runtime/objc-sel.mm +++ b/runtime/objc-sel.mm @@ -24,10 +24,8 @@ #if __OBJC2__ #include "objc-private.h" -#include "objc-cache.h" #include "DenseMapExtras.h" - static objc::ExplicitInitDenseSet namedSelectors; static SEL search_builtins(const char *key); @@ -69,6 +67,16 @@ const char *sel_getName(SEL sel) } +unsigned long sel_hash(SEL sel) +{ + unsigned long selAddr = (unsigned long)sel; +#if CONFIG_USE_PREOPT_CACHES + selAddr ^= (selAddr >> 7); +#endif + return selAddr; +} + + BOOL sel_isMapped(SEL sel) { if (!sel) return NO; diff --git a/runtime/objc-weak.h b/runtime/objc-weak.h index 8c50050..535fc88 100644 --- a/runtime/objc-weak.h +++ b/runtime/objc-weak.h @@ -123,9 +123,15 @@ struct weak_table_t { uintptr_t max_hash_displacement; }; +enum WeakRegisterDeallocatingOptions { + ReturnNilIfDeallocating, + CrashIfDeallocating, + DontCheckDeallocating +}; + /// Adds an (object, weak pointer) pair to the weak table. id weak_register_no_lock(weak_table_t *weak_table, id referent, - id *referrer, bool crashIfDeallocating); + id *referrer, WeakRegisterDeallocatingOptions deallocatingOptions); /// Removes an (object, weak pointer) pair from the weak table. void weak_unregister_no_lock(weak_table_t *weak_table, id referent, id *referrer); diff --git a/runtime/objc-weak.mm b/runtime/objc-weak.mm index 4d9c43d..3289953 100644 --- a/runtime/objc-weak.mm +++ b/runtime/objc-weak.mm @@ -389,38 +389,43 @@ weak_unregister_no_lock(weak_table_t *weak_table, id referent_id, */ id weak_register_no_lock(weak_table_t *weak_table, id referent_id, - id *referrer_id, bool crashIfDeallocating) + id *referrer_id, WeakRegisterDeallocatingOptions deallocatingOptions) { objc_object *referent = (objc_object *)referent_id; objc_object **referrer = (objc_object **)referrer_id; - if (!referent || referent->isTaggedPointer()) return referent_id; + if (referent->isTaggedPointerOrNil()) return referent_id; // ensure that the referenced object is viable - bool deallocating; - if (!referent->ISA()->hasCustomRR()) { - deallocating = referent->rootIsDeallocating(); - } - else { - BOOL (*allowsWeakReference)(objc_object *, SEL) = - (BOOL(*)(objc_object *, SEL)) - object_getMethodImplementation((id)referent, - @selector(allowsWeakReference)); - if ((IMP)allowsWeakReference == _objc_msgForward) { - return nil; + if (deallocatingOptions == ReturnNilIfDeallocating || + deallocatingOptions == CrashIfDeallocating) { + bool deallocating; + if (!referent->ISA()->hasCustomRR()) { + deallocating = referent->rootIsDeallocating(); } - deallocating = + else { + // Use lookUpImpOrForward so we can avoid the assert in + // class_getInstanceMethod, since we intentionally make this + // callout with the lock held. + auto allowsWeakReference = (BOOL(*)(objc_object *, SEL)) + lookUpImpOrForwardTryCache((id)referent, @selector(allowsWeakReference), + referent->getIsa()); + if ((IMP)allowsWeakReference == _objc_msgForward) { + return nil; + } + deallocating = ! (*allowsWeakReference)(referent, @selector(allowsWeakReference)); - } + } - if (deallocating) { - if (crashIfDeallocating) { - _objc_fatal("Cannot form weak reference to instance (%p) of " - "class %s. It is possible that this object was " - "over-released, or is in the process of deallocation.", - (void*)referent, object_getClassName((id)referent)); - } else { - return nil; + if (deallocating) { + if (deallocatingOptions == CrashIfDeallocating) { + _objc_fatal("Cannot form weak reference to instance (%p) of " + "class %s. It is possible that this object was " + "over-released, or is in the process of deallocation.", + (void*)referent, object_getClassName((id)referent)); + } else { + return nil; + } } } diff --git a/runtime/objc.h b/runtime/objc.h index 6b974a3..9e22d90 100644 --- a/runtime/objc.h +++ b/runtime/objc.h @@ -67,7 +67,7 @@ typedef id _Nullable (*IMP)(id _Nonnull, SEL _Nonnull, ...); # endif #else // __OBJC_BOOL_IS_BOOL not set. -# if TARGET_OS_OSX || TARGET_OS_IOSMAC || ((TARGET_OS_IOS || TARGET_OS_BRIDGE) && !__LP64__ && !__ARM_ARCH_7K) +# if TARGET_OS_OSX || TARGET_OS_MACCATALYST || ((TARGET_OS_IOS || TARGET_OS_BRIDGE) && !__LP64__ && !__ARM_ARCH_7K) # define OBJC_BOOL_IS_BOOL 0 # else # define OBJC_BOOL_IS_BOOL 1 diff --git a/runtime/runtime.h b/runtime/runtime.h index c97129b..67145bd 100644 --- a/runtime/runtime.h +++ b/runtime/runtime.h @@ -1767,43 +1767,6 @@ OBJC_EXPORT void objc_setHook_getClass(objc_hook_getClass _Nonnull newValue, OBJC_AVAILABLE(10.14.4, 12.2, 12.2, 5.2, 3.2); #endif -/** - * Function type for a hook that assists objc_setAssociatedObject(). - * - * @param object The source object for the association. - * @param key The key for the association. - * @param value The value to associate with the key key for object. Pass nil to clear an existing association. - * @param policy The policy for the association. For possible values, see “Associative Object Behaviors.” - * - * @see objc_setAssociatedObject - * @see objc_setHook_setAssociatedObject - */ -typedef void (*objc_hook_setAssociatedObject)(id _Nonnull object, const void * _Nonnull key, - id _Nullable value, objc_AssociationPolicy policy); - -/** - * Install a hook for objc_setAssociatedObject(). - * - * @param newValue The hook function to install. - * @param outOldValue The address of a function pointer variable. On return, - * the old hook function is stored in the variable. - * - * @note The store to *outOldValue is thread-safe: the variable will be - * updated before objc_setAssociatedObject() calls your new hook to read it, - * even if your new hook is called from another thread before this - * setter completes. - * @note Your hook should always call the previous hook. - * - * @see objc_setAssociatedObject - * @see objc_hook_setAssociatedObject - */ -#if !(TARGET_OS_OSX && __i386__) -#define OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED 1 -OBJC_EXPORT void objc_setHook_setAssociatedObject(objc_hook_setAssociatedObject _Nonnull newValue, - objc_hook_setAssociatedObject _Nullable * _Nonnull outOldValue) - OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 4.0); -#endif - /** * Function type for a function that is called when an image is loaded. * @@ -1831,7 +1794,39 @@ typedef void (*objc_func_loadImage)(const struct mach_header * _Nonnull header); OBJC_EXPORT void objc_addLoadImageFunc(objc_func_loadImage _Nonnull func) OBJC_AVAILABLE(10.15, 13.0, 13.0, 6.0, 4.0); -/** +/** + * Function type for a hook that provides a name for lazily named classes. + * + * @param cls The class to generate a name for. + * @return The name of the class, or NULL if the name isn't known or can't me generated. + * + * @see objc_setHook_lazyClassNamer + */ +typedef const char * _Nullable (*objc_hook_lazyClassNamer)(_Nonnull Class cls); + +/** + * Install a hook to provide a name for lazily-named classes. + * + * @param newValue The hook function to install. + * @param outOldValue The address of a function pointer variable. On return, + * the old hook function is stored in the variable. + * + * @note The store to *outOldValue is thread-safe: the variable will be + * updated before objc_getClass() calls your new hook to read it, + * even if your new hook is called from another thread before this + * setter completes. + * @note Your hook must call the previous hook for class names + * that you do not recognize. + */ +#if !(TARGET_OS_OSX && __i386__) +#define OBJC_SETHOOK_LAZYCLASSNAMER_DEFINED 1 +OBJC_EXPORT +void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue, + _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue) + OBJC_AVAILABLE(10.16, 14.0, 14.0, 7.0, 5.0); +#endif + +/** * Callback from Objective-C to Swift to perform Swift class initialization. */ #if !(TARGET_OS_OSX && __i386__) diff --git a/test/association.m b/test/association.m index e148fc5..35f81f4 100644 --- a/test/association.m +++ b/test/association.m @@ -3,6 +3,8 @@ #include "test.h" #include #include +#include +#include static int values; static int supers; @@ -85,6 +87,100 @@ static const char *key = "key"; } @end +@interface Sub59318867: NSObject @end +@implementation Sub59318867 ++ (void)initialize { + objc_setAssociatedObject(self, &key, self, OBJC_ASSOCIATION_ASSIGN); +} +@end + +@interface CallOnDealloc: NSObject @end +@implementation CallOnDealloc { + void (^_block)(void); +} +- (id)initWithBlock: (void (^)(void))block { + _block = (__bridge id)Block_copy((__bridge void *)block); + return self; +} +- (void)dealloc { + _block(); + _Block_release((__bridge void *)_block); + SUPER_DEALLOC(); +} +@end + +void TestReleaseLater(void) { + int otherObjsCount = 100; + char keys1[otherObjsCount]; + char keys2[otherObjsCount]; + char laterKey; + + __block int normalDeallocs = 0; + __block int laterDeallocs = 0; + + { + id target = [NSObject new]; + for (int i = 0; i < otherObjsCount; i++) { + id value = [[CallOnDealloc alloc] initWithBlock: ^{ normalDeallocs++; }]; + objc_setAssociatedObject(target, keys1 + i, value, OBJC_ASSOCIATION_RETAIN); + RELEASE_VALUE(value); + } + { + id laterValue = [[CallOnDealloc alloc] initWithBlock: ^{ + testassertequal(laterDeallocs, 0); + testassertequal(normalDeallocs, otherObjsCount * 2); + laterDeallocs++; + }]; + objc_setAssociatedObject(target, &laterKey, laterValue, (objc_AssociationPolicy)(OBJC_ASSOCIATION_RETAIN | _OBJC_ASSOCIATION_SYSTEM_OBJECT)); + RELEASE_VALUE(laterValue); + } + for (int i = 0; i < otherObjsCount; i++) { + id value = [[CallOnDealloc alloc] initWithBlock: ^{ normalDeallocs++; }]; + objc_setAssociatedObject(target, keys2 + i, value, OBJC_ASSOCIATION_RETAIN); + RELEASE_VALUE(value); + } + RELEASE_VALUE(target); + } + testassertequal(laterDeallocs, 1); + testassertequal(normalDeallocs, otherObjsCount * 2); +} + +void TestReleaseLaterRemoveAssociations(void) { + + char normalKey; + char laterKey; + + __block int normalDeallocs = 0; + __block int laterDeallocs = 0; + + @autoreleasepool { + id target = [NSObject new]; + { + id normalValue = [[CallOnDealloc alloc] initWithBlock: ^{ normalDeallocs++; }]; + id laterValue = [[CallOnDealloc alloc] initWithBlock: ^{ laterDeallocs++; }]; + objc_setAssociatedObject(target, &normalKey, normalValue, OBJC_ASSOCIATION_RETAIN); + objc_setAssociatedObject(target, &laterKey, laterValue, (objc_AssociationPolicy)(OBJC_ASSOCIATION_RETAIN | _OBJC_ASSOCIATION_SYSTEM_OBJECT)); + RELEASE_VALUE(normalValue); + RELEASE_VALUE(laterValue); + } + testassertequal(normalDeallocs, 0); + testassertequal(laterDeallocs, 0); + + objc_removeAssociatedObjects(target); + testassertequal(normalDeallocs, 1); + testassertequal(laterDeallocs, 0); + + id normalValue = objc_getAssociatedObject(target, &normalKey); + id laterValue = objc_getAssociatedObject(target, &laterKey); + testassert(!normalValue); + testassert(laterValue); + + RELEASE_VALUE(target); + } + + testassertequal(normalDeallocs, 1); + testassertequal(laterDeallocs, 1); +} int main() { @@ -123,5 +219,13 @@ int main() objc_setAssociatedObject(nil, &key, nil, OBJC_ASSOCIATION_ASSIGN); #pragma clang diagnostic pop + // rdar://problem/59318867 Make sure we don't reenter the association lock + // when setting an associated object on an uninitialized class. + Class Sub59318867Local = objc_getClass("Sub59318867"); + objc_setAssociatedObject(Sub59318867Local, &key, Sub59318867Local, OBJC_ASSOCIATION_ASSIGN); + + TestReleaseLater(); + TestReleaseLaterRemoveAssociations(); + succeed(__FILE__); } diff --git a/test/badPoolCompat-ios-tvos.m b/test/badPoolCompat-ios-tvos.m deleted file mode 100644 index 5f1b92c..0000000 --- a/test/badPoolCompat-ios-tvos.m +++ /dev/null @@ -1,14 +0,0 @@ -// Run test badPool as if it were built with an old SDK. - -// TEST_CONFIG MEM=mrc OS=iphoneos,iphonesimulator,appletvos,appletvsimulator -// TEST_CRASHES -// TEST_CFLAGS -DOLD=1 -Xlinker -sdk_version -Xlinker 9.0 - -/* -TEST_RUN_OUTPUT -objc\[\d+\]: Invalid or prematurely-freed autorelease pool 0x[0-9a-fA-f]+\. Set a breakpoint .* Proceeding anyway .* -OK: badPool.m -END -*/ - -#include "badPool.m" diff --git a/test/badPoolCompat-ios.m b/test/badPoolCompat-ios.m new file mode 100644 index 0000000..a5f684f --- /dev/null +++ b/test/badPoolCompat-ios.m @@ -0,0 +1,18 @@ +// Run test badPool as if it were built with an old SDK. + +// TEST_CONFIG MEM=mrc OS=iphoneos,iphonesimulator ARCH=x86_64,arm64 +// TEST_CRASHES +// TEST_CFLAGS -DOLD=1 -Xlinker -platform_version -Xlinker ios -Xlinker 9.0 -Xlinker 9.0 -miphoneos-version-min=9.0 + +/* +TEST_BUILD_OUTPUT +ld: warning: passed two min versions.*for platform.* +END + +TEST_RUN_OUTPUT +objc\[\d+\]: Invalid or prematurely-freed autorelease pool 0x[0-9a-fA-f]+\. Set a breakpoint .* Proceeding anyway .* +OK: badPool.m +END +*/ + +#include "badPool.m" diff --git a/test/badPoolCompat-macos.m b/test/badPoolCompat-macos.m index afd2117..1131c83 100644 --- a/test/badPoolCompat-macos.m +++ b/test/badPoolCompat-macos.m @@ -1,10 +1,14 @@ // Run test badPool as if it were built with an old SDK. -// TEST_CONFIG MEM=mrc OS=macosx +// TEST_CONFIG MEM=mrc OS=macosx ARCH=x86_64 // TEST_CRASHES -// TEST_CFLAGS -DOLD=1 -Xlinker -sdk_version -Xlinker 10.11 +// TEST_CFLAGS -DOLD=1 -Xlinker -platform_version -Xlinker macos -Xlinker 10.11 -Xlinker 10.11 -mmacosx-version-min=10.11 /* +TEST_BUILD_OUTPUT +ld: warning: passed two min versions.*for platform.* +END + TEST_RUN_OUTPUT objc\[\d+\]: Invalid or prematurely-freed autorelease pool 0x[0-9a-fA-f]+\. Set a breakpoint .* Proceeding anyway .* OK: badPool.m diff --git a/test/badPoolCompat-tvos.m b/test/badPoolCompat-tvos.m new file mode 100644 index 0000000..3adfacd --- /dev/null +++ b/test/badPoolCompat-tvos.m @@ -0,0 +1,18 @@ +// Run test badPool as if it were built with an old SDK. + +// TEST_CONFIG MEM=mrc OS=appletvos,appletvsimulator ARCH=x86_64,arm64 +// TEST_CRASHES +// TEST_CFLAGS -DOLD=1 -Xlinker -platform_version -Xlinker tvos -Xlinker 9.0 -Xlinker 9.0 -mtvos-version-min=9.0 + +/* +TEST_BUILD_OUTPUT +ld: warning: passed two min versions.*for platform.* +END + +TEST_RUN_OUTPUT +objc\[\d+\]: Invalid or prematurely-freed autorelease pool 0x[0-9a-fA-f]+\. Set a breakpoint .* Proceeding anyway .* +OK: badPool.m +END +*/ + +#include "badPool.m" diff --git a/test/badPoolCompat-watchos.m b/test/badPoolCompat-watchos.m index 6e89e44..19e8ca7 100644 --- a/test/badPoolCompat-watchos.m +++ b/test/badPoolCompat-watchos.m @@ -2,9 +2,13 @@ // TEST_CONFIG MEM=mrc OS=watchos,watchsimulator // TEST_CRASHES -// TEST_CFLAGS -DOLD=1 -Xlinker -sdk_version -Xlinker 2.0 +// TEST_CFLAGS -DOLD=1 -Xlinker -platform_version -Xlinker watchos -Xlinker 2.0 -Xlinker 2.0 -mwatchos-version-min=2.0 /* +TEST_BUILD_OUTPUT +ld: warning: passed two min versions.*for platform.* +END + TEST_RUN_OUTPUT objc\[\d+\]: Invalid or prematurely-freed autorelease pool 0x[0-9a-fA-f]+\. Set a breakpoint .* Proceeding anyway .* OK: badPool.m diff --git a/test/badSuperclass.m b/test/badSuperclass.m index 2fa0bc7..2ac22b5 100644 --- a/test/badSuperclass.m +++ b/test/badSuperclass.m @@ -26,7 +26,7 @@ int main() // Create a cycle in a superclass chain (Sub->supercls == Sub) // then attempt to walk that chain. Runtime should halt eventually. _objc_flush_caches(supercls); - ((Class *)(__bridge void *)subcls)[1] = subcls; + ((Class __ptrauth_objc_super_pointer *)(__bridge void *)subcls)[1] = subcls; #ifdef CACHE_FLUSH _objc_flush_caches(supercls); #else diff --git a/test/bigrc.m b/test/bigrc.m index 419bbb6..3918d8f 100644 --- a/test/bigrc.m +++ b/test/bigrc.m @@ -1,13 +1,4 @@ // TEST_CONFIG MEM=mrc -/* -TEST_RUN_OUTPUT -objc\[\d+\]: Deallocator object 0x[0-9a-fA-F]+ overreleased while already deallocating; break on objc_overrelease_during_dealloc_error to debug -OK: bigrc.m -OR -no overrelease enforcement -OK: bigrc.m -END - */ #include "test.h" #include "testroot.i" @@ -20,37 +11,15 @@ static size_t LOTS; -(void)dealloc { id o = self; - size_t rc = 1; - testprintf("Retain a lot during dealloc\n"); + testprintf("Retain/release during dealloc\n"); - testassert(rc == 1); - testassert([o retainCount] == rc); - do { - [o retain]; - if (rc % 0x100000 == 0) testprintf("%zx/%zx ++\n", rc, LOTS); - } while (++rc < LOTS); - - testassert([o retainCount] == rc); - - do { - [o release]; - if (rc % 0x100000 == 0) testprintf("%zx/%zx --\n", rc, LOTS); - } while (--rc > 1); - - testassert(rc == 1); - testassert([o retainCount] == rc); - - - testprintf("Overrelease during dealloc\n"); - - // Not all architectures enforce this. -#if !SUPPORT_NONPOINTER_ISA - testwarn("no overrelease enforcement"); - fprintf(stderr, "no overrelease enforcement\n"); -#endif + testassertequal([o retainCount], 0); + [o retain]; + testassertequal([o retainCount], 0); [o release]; + testassertequal([o retainCount], 0); [super dealloc]; } diff --git a/test/bool.c b/test/bool.c index c12cc32..f112414 100644 --- a/test/bool.c +++ b/test/bool.c @@ -5,7 +5,11 @@ #include #if TARGET_OS_OSX -# define RealBool 0 +# if __x86_64__ +# define RealBool 0 +# else +# define RealBool 1 +# endif #elif TARGET_OS_IOS || TARGET_OS_BRIDGE # if (__arm__ && !__armv7k__) || __i386__ # define RealBool 0 diff --git a/test/cacheflush-constant.m b/test/cacheflush-constant.m new file mode 100644 index 0000000..94da6e2 --- /dev/null +++ b/test/cacheflush-constant.m @@ -0,0 +1,44 @@ +// TEST_CFLAGS -framework Foundation +/* +TEST_RUN_OUTPUT +foo +bar +bar +foo +END +*/ + +// NOTE: This test won't catch problems when running against a root, so it's of +// limited utility, but it would at least catch things when testing against the +// shared cache. + +#include +#include + +@interface NSBlock: NSObject @end + +// NSBlock is a conveniently accessible superclass that (currently) has a constant cache. +@interface MyBlock: NSBlock ++(void)foo; ++(void)bar; +@end +@implementation MyBlock ++(void)foo { + printf("foo\n"); +} ++(void)bar { + printf("bar\n"); +} +@end + +int main() { + [MyBlock foo]; + [MyBlock bar]; + + Method m1 = class_getClassMethod([MyBlock class], @selector(foo)); + Method m2 = class_getClassMethod([MyBlock class], @selector(bar)); + method_exchangeImplementations(m1, m2); + + [MyBlock foo]; + [MyBlock bar]; +} diff --git a/test/category.m b/test/category.m index 80795e2..334bc5f 100644 --- a/test/category.m +++ b/test/category.m @@ -135,25 +135,25 @@ asm( "l_OBJC_$_CATEGORY_INSTANCE_METHODS_Super_$_Category_catlist2: \n" " .long 24 \n" " .long 1 \n" -" "PTR" L_catlist2MethodString \n" -" "PTR" L_catlist2MethodTypes \n" -" "PTR" _catlist2MethodImplementation"SIGNED_CATEGORY_IMP" \n" +" " PTR " L_catlist2MethodString \n" +" " PTR " L_catlist2MethodTypes \n" +" " PTR " _catlist2MethodImplementation" SIGNED_CATEGORY_IMP" \n" " .p2align 3 \n" "l_OBJC_$_CATEGORY_Super_$_Category_catlist2: \n" -" "PTR" L_catlist2CategoryName \n" -" "PTR" _OBJC_CLASS_$_Super \n" -" "PTR" l_OBJC_$_CATEGORY_INSTANCE_METHODS_Super_$_Category_catlist2 \n" -" "PTR" 0 \n" -" "PTR" 0 \n" -" "PTR" 0 \n" -" "PTR" 0 \n" +" " PTR " L_catlist2CategoryName \n" +" " PTR " _OBJC_CLASS_$_Super \n" +" " PTR " l_OBJC_$_CATEGORY_INSTANCE_METHODS_Super_$_Category_catlist2 \n" +" " PTR " 0 \n" +" " PTR " 0 \n" +" " PTR " 0 \n" +" " PTR " 0 \n" " .long 64 \n" " .space 4 \n" " .section __DATA,__objc_catlist2 \n" " .p2align 3 \n" -" "PTR" l_OBJC_$_CATEGORY_Super_$_Category_catlist2 \n" +" " PTR " l_OBJC_$_CATEGORY_Super_$_Category_catlist2 \n" " .text \n" ); diff --git a/test/consolidatePoolPointers.m b/test/consolidatePoolPointers.m new file mode 100644 index 0000000..241df6f --- /dev/null +++ b/test/consolidatePoolPointers.m @@ -0,0 +1,142 @@ +//TEST_CONFIG MEM=mrc ARCH=x86_64,ARM64,ARM64e +//TEST_ENV OBJC_DISABLE_AUTORELEASE_COALESCING=NO OBJC_DISABLE_AUTORELEASE_COALESCING_LRU=NO + +#include "test.h" +#import +#include + +@interface Counter: NSObject { +@public + int retains; + int releases; + int autoreleases; +} +@end +@implementation Counter + +- (id)retain { + retains++; + return [super retain]; +} + +- (oneway void)release { + releases++; + [super release]; +} + +- (id)autorelease { + autoreleases++; + return [super autorelease]; +} + +- (void)dealloc { + testprintf("%p dealloc\n", self); + [super dealloc]; +} + +@end + +// Create a number of objects, autoreleasing each one a number of times in a +// round robin fashion. Verify that each object gets sent retain, release, and +// autorelease the correct number of times. Verify that the gap between +// autoreleasepool pointers is the given number of objects. Note: this will not +// work when the pool hits a page boundary, to be sure to stay under that limit. +void test(int objCount, int autoreleaseCount, int expectedGap) { + testprintf("Testing %d objects, %d autoreleases, expecting gap of %d\n", + objCount, autoreleaseCount, expectedGap); + + Counter *objs[objCount]; + for (int i = 0; i < objCount; i++) + objs[i] = [Counter new]; + + for (int j = 0; j < autoreleaseCount; j++) + for (int i = 0; i < objCount; i++) + [objs[i] retain]; + + for (int i = 0; i < objCount; i++) { + testassertequal(objs[i]->retains, autoreleaseCount); + testassertequal(objs[i]->releases, 0); + testassertequal(objs[i]->autoreleases, 0); + } + + void *outer = objc_autoreleasePoolPush(); + uintptr_t outerAddr = (uintptr_t)outer; + for (int j = 0; j < autoreleaseCount; j++) + for (int i = 0; i < objCount; i++) + [objs[i] autorelease]; + for (int i = 0; i < objCount; i++) { + testassertequal(objs[i]->retains, autoreleaseCount); + testassertequal(objs[i]->releases, 0); + testassertequal(objs[i]->autoreleases, autoreleaseCount); + } + + void *inner = objc_autoreleasePoolPush(); + uintptr_t innerAddr = (uintptr_t)inner; + testprintf("outer=%p inner=%p\n", outer, inner); + // Do one more autorelease in the inner pool to make sure we correctly + // handle pool boundaries. + for (int i = 0; i < objCount; i++) + [[objs[i] retain] autorelease]; + for (int i = 0; i < objCount; i++) { + testassertequal(objs[i]->retains, autoreleaseCount + 1); + testassertequal(objs[i]->releases, 0); + testassertequal(objs[i]->autoreleases, autoreleaseCount + 1); + } + + objc_autoreleasePoolPop(inner); + for (int i = 0; i < objCount; i++) { + testassertequal(objs[i]->retains, autoreleaseCount + 1); + testassertequal(objs[i]->releases, 1); + testassertequal(objs[i]->autoreleases, autoreleaseCount + 1); + } + + objc_autoreleasePoolPop(outer); + for (int i = 0; i < objCount; i++) { + testassertequal(objs[i]->retains, autoreleaseCount + 1); + testassertequal(objs[i]->releases, autoreleaseCount + 1); + testassertequal(objs[i]->autoreleases, autoreleaseCount + 1); + } + + intptr_t gap = innerAddr - outerAddr; + testprintf("gap=%ld\n", gap); + testassertequal(gap, expectedGap * sizeof(id)); + + // Destroy our test objects. + for (int i = 0; i < objCount; i++) + [objs[i] release]; +} + +int main() +{ + // Push a pool here so test() doesn't see a placeholder. + objc_autoreleasePoolPush(); + + test(1, 1, 2); + test(1, 2, 2); + test(1, 10, 2); + test(1, 100, 2); + test(1, 70000, 3); + + test(2, 1, 3); + test(2, 2, 3); + test(2, 10, 3); + test(2, 100, 3); + test(2, 70000, 5); + + test(3, 1, 4); + test(3, 2, 4); + test(3, 10, 4); + test(3, 100, 4); + test(3, 70000, 7); + + test(4, 1, 5); + test(4, 2, 5); + test(4, 10, 5); + test(4, 100, 5); + test(4, 70000, 9); + + test(5, 1, 6); + test(5, 2, 11); + + succeed(__FILE__); +} diff --git a/test/customrr-nsobject.m b/test/customrr-nsobject.m index 912f414..f25e4ad 100644 --- a/test/customrr-nsobject.m +++ b/test/customrr-nsobject.m @@ -10,6 +10,8 @@ typedef IMP __ptrauth_objc_method_list_imp MethodListIMP; typedef IMP MethodListIMP; #endif +EXTERN_C void _method_setImplementationRawUnsafe(Method m, IMP imp); + static int Retains; static int Releases; static int Autoreleases; @@ -64,7 +66,7 @@ int main(int argc __unused, char **argv) #if SWIZZLE_AWZ method_setImplementation(meth, (IMP)HackAllocWithZone); #else - ((MethodListIMP *)meth)[2] = (IMP)HackAllocWithZone; + _method_setImplementationRawUnsafe(meth, (IMP)HackAllocWithZone); #endif meth = class_getClassMethod(cls, @selector(new)); @@ -72,7 +74,7 @@ int main(int argc __unused, char **argv) #if SWIZZLE_CORE method_setImplementation(meth, (IMP)HackPlusNew); #else - ((MethodListIMP *)meth)[2] = (IMP)HackPlusNew; + _method_setImplementationRawUnsafe(meth, (IMP)HackPlusNew); #endif meth = class_getClassMethod(cls, @selector(self)); @@ -80,7 +82,7 @@ int main(int argc __unused, char **argv) #if SWIZZLE_CORE method_setImplementation(meth, (IMP)HackPlusSelf); #else - ((MethodListIMP *)meth)[2] = (IMP)HackPlusSelf; + _method_setImplementationRawUnsafe(meth, (IMP)HackPlusSelf); #endif meth = class_getInstanceMethod(cls, @selector(self)); @@ -88,7 +90,7 @@ int main(int argc __unused, char **argv) #if SWIZZLE_CORE method_setImplementation(meth, (IMP)HackSelf); #else - ((MethodListIMP *)meth)[2] = (IMP)HackSelf; + _method_setImplementationRawUnsafe(meth, (IMP)HackSelf); #endif meth = class_getInstanceMethod(cls, @selector(release)); @@ -96,25 +98,25 @@ int main(int argc __unused, char **argv) #if SWIZZLE_RELEASE method_setImplementation(meth, (IMP)HackRelease); #else - ((MethodListIMP *)meth)[2] = (IMP)HackRelease; + _method_setImplementationRawUnsafe(meth, (IMP)HackRelease); #endif // These other methods get hacked for counting purposes only meth = class_getInstanceMethod(cls, @selector(retain)); RealRetain = (typeof(RealRetain))method_getImplementation(meth); - ((MethodListIMP *)meth)[2] = (IMP)HackRetain; + _method_setImplementationRawUnsafe(meth, (IMP)HackRetain); meth = class_getInstanceMethod(cls, @selector(autorelease)); RealAutorelease = (typeof(RealAutorelease))method_getImplementation(meth); - ((MethodListIMP *)meth)[2] = (IMP)HackAutorelease; + _method_setImplementationRawUnsafe(meth, (IMP)HackAutorelease); meth = class_getClassMethod(cls, @selector(alloc)); RealAlloc = (typeof(RealAlloc))method_getImplementation(meth); - ((MethodListIMP *)meth)[2] = (IMP)HackAlloc; + _method_setImplementationRawUnsafe(meth, (IMP)HackAlloc); meth = class_getInstanceMethod(cls, @selector(init)); - ((MethodListIMP *)meth)[2] = (IMP)HackInit; + _method_setImplementationRawUnsafe(meth, (IMP)HackInit); // Verify that the swizzles occurred before +initialize by provoking it now testassert(PlusInitializes == 0); diff --git a/test/customrr.m b/test/customrr.m index 4ebcece..633c260 100644 --- a/test/customrr.m +++ b/test/customrr.m @@ -191,38 +191,31 @@ int main(int argc __unused, char **argv) // Don't use runtime functions to do this - // we want the runtime to think that these are NSObject's real code { -#if __has_feature(ptrauth_calls) - typedef IMP __ptrauth_objc_method_list_imp MethodListIMP; -#else - typedef IMP MethodListIMP; -#endif - Class cls = [NSObject class]; IMP imp = class_getMethodImplementation(cls, @selector(retain)); - MethodListIMP *m = (MethodListIMP *) - class_getInstanceMethod(cls, @selector(retain)); - testassert(m[2] == imp); // verify Method struct is as we expect - - m = (MethodListIMP *)class_getInstanceMethod(cls, @selector(retain)); - m[2] = (IMP)HackRetain; - m = (MethodListIMP *)class_getInstanceMethod(cls, @selector(release)); - m[2] = (IMP)HackRelease; - m = (MethodListIMP *)class_getInstanceMethod(cls, @selector(autorelease)); - m[2] = (IMP)HackAutorelease; - m = (MethodListIMP *)class_getInstanceMethod(cls, @selector(retainCount)); - m[2] = (IMP)HackRetainCount; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(retain)); - m[2] = (IMP)HackPlusRetain; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(release)); - m[2] = (IMP)HackPlusRelease; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(autorelease)); - m[2] = (IMP)HackPlusAutorelease; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(retainCount)); - m[2] = (IMP)HackPlusRetainCount; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(alloc)); - m[2] = (IMP)HackAlloc; - m = (MethodListIMP *)class_getClassMethod(cls, @selector(allocWithZone:)); - m[2] = (IMP)HackAllocWithZone; + Method m = class_getInstanceMethod(cls, @selector(retain)); + testassert(method_getImplementation(m) == imp); // verify Method struct is as we expect + + m = class_getInstanceMethod(cls, @selector(retain)); + _method_setImplementationRawUnsafe(m, (IMP)HackRetain); + m = class_getInstanceMethod(cls, @selector(release)); + _method_setImplementationRawUnsafe(m, (IMP)HackRelease); + m = class_getInstanceMethod(cls, @selector(autorelease)); + _method_setImplementationRawUnsafe(m, (IMP)HackAutorelease); + m = class_getInstanceMethod(cls, @selector(retainCount)); + _method_setImplementationRawUnsafe(m, (IMP)HackRetainCount); + m = class_getClassMethod(cls, @selector(retain)); + _method_setImplementationRawUnsafe(m, (IMP)HackPlusRetain); + m = class_getClassMethod(cls, @selector(release)); + _method_setImplementationRawUnsafe(m, (IMP)HackPlusRelease); + m = class_getClassMethod(cls, @selector(autorelease)); + _method_setImplementationRawUnsafe(m, (IMP)HackPlusAutorelease); + m = class_getClassMethod(cls, @selector(retainCount)); + _method_setImplementationRawUnsafe(m, (IMP)HackPlusRetainCount); + m = class_getClassMethod(cls, @selector(alloc)); + _method_setImplementationRawUnsafe(m, (IMP)HackAlloc); + m = class_getClassMethod(cls, @selector(allocWithZone:)); + _method_setImplementationRawUnsafe(m, (IMP)HackAllocWithZone); _objc_flush_caches(cls); diff --git a/test/evil-class-def.m b/test/evil-class-def.m index c49bda8..066691a 100644 --- a/test/evil-class-def.m +++ b/test/evil-class-def.m @@ -12,8 +12,14 @@ #if __has_feature(ptrauth_calls) # define SIGNED_METHOD_LIST_IMP "@AUTH(ia,0,addr) " +# define SIGNED_METHOD_LIST "@AUTH(da,0xC310,addr) " +# define SIGNED_ISA "@AUTH(da, 0x6AE1, addr) " +# define SIGNED_SUPER "@AUTH(da, 0xB5AB, addr) " #else # define SIGNED_METHOD_LIST_IMP +# define SIGNED_METHOD_LIST +# define SIGNED_ISA +# define SIGNED_SUPER #endif #define str(x) #x @@ -25,15 +31,15 @@ void* nop(void* self) { return self; } __END_DECLS asm( - ".globl _OBJC_CLASS_$_Super \n" - ".section __DATA,__objc_data \n" - ".align 3 \n" - "_OBJC_CLASS_$_Super: \n" - PTR "_OBJC_METACLASS_$_Super \n" - PTR "0 \n" - PTR "__objc_empty_cache \n" - PTR "0 \n" - PTR "L_ro \n" + ".globl _OBJC_CLASS_$_Super \n" + ".section __DATA,__objc_data \n" + ".align 3 \n" + "_OBJC_CLASS_$_Super: \n" + PTR "_OBJC_METACLASS_$_Super" SIGNED_ISA "\n" + PTR "0 \n" + PTR "__objc_empty_cache \n" + PTR "0 \n" + PTR "L_ro \n" // pad to OBJC_MAX_CLASS_SIZE PTR "0 \n" PTR "0 \n" @@ -63,12 +69,12 @@ asm( PTR "0 \n" PTR "0 \n" "" - "_OBJC_METACLASS_$_Super: \n" - PTR "_OBJC_METACLASS_$_Super \n" - PTR "_OBJC_CLASS_$_Super \n" - PTR "__objc_empty_cache \n" - PTR "0 \n" - PTR "L_meta_ro \n" + "_OBJC_METACLASS_$_Super: \n" + PTR "_OBJC_METACLASS_$_Super" SIGNED_ISA "\n" + PTR "_OBJC_CLASS_$_Super" SIGNED_SUPER "\n" + PTR "__objc_empty_cache \n" + PTR "0 \n" + PTR "L_meta_ro \n" // pad to OBJC_MAX_CLASS_SIZE PTR "0 \n" PTR "0 \n" @@ -108,9 +114,9 @@ asm( PTR "0 \n" PTR "L_super_name \n" #if EVIL_SUPER - PTR "L_evil_methods \n" + PTR "L_evil_methods" SIGNED_METHOD_LIST "\n" #else - PTR "L_good_methods \n" + PTR "L_good_methods" SIGNED_METHOD_LIST "\n" #endif PTR "0 \n" PTR "L_super_ivars \n" @@ -127,24 +133,24 @@ asm( PTR "0 \n" PTR "L_super_name \n" #if EVIL_SUPER_META - PTR "L_evil_methods \n" + PTR "L_evil_methods" SIGNED_METHOD_LIST "\n" #else - PTR "L_good_methods \n" + PTR "L_good_methods" SIGNED_METHOD_LIST "\n" #endif PTR "0 \n" PTR "0 \n" PTR "0 \n" PTR "0 \n" - ".globl _OBJC_CLASS_$_Sub \n" - ".section __DATA,__objc_data \n" - ".align 3 \n" - "_OBJC_CLASS_$_Sub: \n" - PTR "_OBJC_METACLASS_$_Sub \n" - PTR "_OBJC_CLASS_$_Super \n" - PTR "__objc_empty_cache \n" - PTR "0 \n" - PTR "L_sub_ro \n" + ".globl _OBJC_CLASS_$_Sub \n" + ".section __DATA,__objc_data \n" + ".align 3 \n" + "_OBJC_CLASS_$_Sub: \n" + PTR "_OBJC_METACLASS_$_Sub" SIGNED_ISA "\n" + PTR "_OBJC_CLASS_$_Super" SIGNED_SUPER "\n" + PTR "__objc_empty_cache \n" + PTR "0 \n" + PTR "L_sub_ro \n" // pad to OBJC_MAX_CLASS_SIZE PTR "0 \n" PTR "0 \n" @@ -174,12 +180,12 @@ asm( PTR "0 \n" PTR "0 \n" "" - "_OBJC_METACLASS_$_Sub: \n" - PTR "_OBJC_METACLASS_$_Super \n" - PTR "_OBJC_METACLASS_$_Super \n" - PTR "__objc_empty_cache \n" - PTR "0 \n" - PTR "L_sub_meta_ro \n" + "_OBJC_METACLASS_$_Sub: \n" + PTR "_OBJC_METACLASS_$_Super" SIGNED_ISA "\n" + PTR "_OBJC_METACLASS_$_Super" SIGNED_SUPER "\n" + PTR "__objc_empty_cache \n" + PTR "0 \n" + PTR "L_sub_meta_ro \n" // pad to OBJC_MAX_CLASS_SIZE PTR "0 \n" PTR "0 \n" @@ -219,9 +225,9 @@ asm( PTR "0 \n" PTR "L_sub_name \n" #if EVIL_SUB - PTR "L_evil_methods \n" + PTR "L_evil_methods" SIGNED_METHOD_LIST "\n" #else - PTR "L_good_methods \n" + PTR "L_good_methods" SIGNED_METHOD_LIST "\n" #endif PTR "0 \n" PTR "L_sub_ivars \n" @@ -238,9 +244,9 @@ asm( PTR "0 \n" PTR "L_sub_name \n" #if EVIL_SUB_META - PTR "L_evil_methods \n" + PTR "L_evil_methods" SIGNED_METHOD_LIST "\n" #else - PTR "L_good_methods \n" + PTR "L_good_methods" SIGNED_METHOD_LIST "\n" #endif PTR "0 \n" PTR "0 \n" diff --git a/test/exchangeImp.m b/test/exchangeImp.m index da84f94..489c691 100644 --- a/test/exchangeImp.m +++ b/test/exchangeImp.m @@ -24,6 +24,9 @@ END #include static int state; +static int swizzleOld; +static int swizzleNew; +static int swizzleB; #define ONE 1 #define TWO 2 @@ -36,6 +39,13 @@ static int state; +(void) two { state = TWO; } +(void) length { state = LENGTH; } +(void) count { state = COUNT; } + +-(void) swizzleTarget { + swizzleOld++; +} +-(void) swizzleReplacement { + swizzleNew++; +} @end #define checkExchange(s1, v1, s2, v2) \ @@ -90,6 +100,42 @@ static int state; testassert(state == v2); \ } while (0) +@interface A : Super +@end +@implementation A +@end + +@interface B : Super +@end +@implementation B +- (void) swizzleTarget { + swizzleB++; +} +@end + +@interface C : Super +@end +@implementation C +- (void) hello { } +@end + +static IMP findInCache(Class cls, SEL sel) +{ + struct objc_imp_cache_entry *ents; + int count; + IMP ret = nil; + + ents = class_copyImpCache(cls, &count); + for (int i = 0; i < count; i++) { + if (ents[i].sel == sel) { + ret = ents[i].imp; + break; + } + } + free(ents); + return ret; +} + int main() { // Check ordinary selectors @@ -102,5 +148,66 @@ int main() checkExchange(count, COUNT, one, ONE); checkExchange(two, TWO, length, LENGTH); + Super *s = [Super new]; + A *a = [A new]; + B *b = [B new]; + C *c = [C new]; + + // cache swizzleTarget in Super, A and B + [s swizzleTarget]; + testassert(swizzleOld == 1); + testassert(swizzleNew == 0); + testassert(swizzleB == 0); + testassert(findInCache([Super class], @selector(swizzleTarget)) != nil); + + [a swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 0); + testassert(swizzleB == 0); + testassert(findInCache([A class], @selector(swizzleTarget)) != nil); + + [b swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 0); + testassert(swizzleB == 1); + testassert(findInCache([B class], @selector(swizzleTarget)) != nil); + + // prime C's cache too + [c hello]; + testassert(findInCache([C class], @selector(hello)) != nil); + + Method m1 = class_getInstanceMethod([Super class], @selector(swizzleTarget)); + Method m2 = class_getInstanceMethod([Super class], @selector(swizzleReplacement)); + method_exchangeImplementations(m1, m2); + + // this should invalidate Super, A, but: + // - not B because it overrides - swizzleTarget and hence doesn't care + // - not C because it neither called swizzleTarget nor swizzleReplacement + testassert(findInCache([Super class], @selector(swizzleTarget)) == nil); + testassert(findInCache([A class], @selector(swizzleTarget)) == nil); + testassert(findInCache([B class], @selector(swizzleTarget)) != nil); + testassert(findInCache([C class], @selector(hello)) != nil); + + // now check that all lookups do the right thing + [s swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 1); + testassert(swizzleB == 1); + + [a swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 2); + testassert(swizzleB == 1); + + [b swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 2); + testassert(swizzleB == 2); + + [c swizzleTarget]; + testassert(swizzleOld == 2); + testassert(swizzleNew == 3); + testassert(swizzleB == 2); + succeed(__FILE__); } diff --git a/test/fakeRealizedClass.m b/test/fakeRealizedClass.m new file mode 100644 index 0000000..cec1c12 --- /dev/null +++ b/test/fakeRealizedClass.m @@ -0,0 +1,74 @@ +/* +Make sure we detect classes with the RW_REALIZED bit set in the binary. rdar://problem/67692760 +TEST_CONFIG OS=macosx +TEST_CRASHES +TEST_RUN_OUTPUT +objc\[\d+\]: realized class 0x[0-9a-fA-F]+ has corrupt data pointer 0x[0-9a-fA-F]+ +objc\[\d+\]: HALTED +END +*/ + +#include "test.h" + +#include + +#define RW_REALIZED (1U<<31) + +struct ObjCClass { + struct ObjCClass * __ptrauth_objc_isa_pointer isa; + struct ObjCClass * __ptrauth_objc_super_pointer superclass; + void *cachePtr; + uintptr_t zero; + uintptr_t data; +}; + +struct ObjCClass_ro { + uint32_t flags; + uint32_t instanceStart; + uint32_t instanceSize; +#ifdef __LP64__ + uint32_t reserved; +#endif + + union { + const uint8_t * ivarLayout; + struct ObjCClass * nonMetaClass; + }; + + const char * name; + struct ObjCMethodList * __ptrauth_objc_method_list_pointer baseMethodList; + struct protocol_list_t * baseProtocols; + const struct ivar_list_t * ivars; + + const uint8_t * weakIvarLayout; + struct property_list_t *baseProperties; +}; + +extern struct ObjCClass OBJC_METACLASS_$_NSObject; +extern struct ObjCClass OBJC_CLASS_$_NSObject; + +struct ObjCClass_ro FakeSuperclassRO = { + .flags = RW_REALIZED +}; + +struct ObjCClass FakeSuperclass = { + &OBJC_METACLASS_$_NSObject, + &OBJC_METACLASS_$_NSObject, + NULL, + 0, + (uintptr_t)&FakeSuperclassRO +}; + +struct ObjCClass_ro FakeSubclassRO; + +struct ObjCClass FakeSubclass = { + &FakeSuperclass, + &FakeSuperclass, + NULL, + 0, + (uintptr_t)&FakeSubclassRO +}; + +static struct ObjCClass *class_ptr __attribute__((used)) __attribute((section("__DATA,__objc_nlclslist"))) = &FakeSubclass; + +int main() {} diff --git a/test/fakeRealizedClass2.m b/test/fakeRealizedClass2.m new file mode 100644 index 0000000..487c4d2 --- /dev/null +++ b/test/fakeRealizedClass2.m @@ -0,0 +1,74 @@ +/* +Variant on fakeRealizedClass which tests a fake class with no superclass rdar://problem/67692760 +TEST_CONFIG OS=macosx +TEST_CRASHES +TEST_RUN_OUTPUT +objc\[\d+\]: realized class 0x[0-9a-fA-F]+ has corrupt data pointer 0x[0-9a-fA-F]+ +objc\[\d+\]: HALTED +END +*/ + +#include "test.h" + +#include + +#define RW_REALIZED (1U<<31) + +struct ObjCClass { + struct ObjCClass * __ptrauth_objc_isa_pointer isa; + struct ObjCClass * __ptrauth_objc_super_pointer superclass; + void *cachePtr; + uintptr_t zero; + uintptr_t data; +}; + +struct ObjCClass_ro { + uint32_t flags; + uint32_t instanceStart; + uint32_t instanceSize; +#ifdef __LP64__ + uint32_t reserved; +#endif + + union { + const uint8_t * ivarLayout; + struct ObjCClass * nonMetaClass; + }; + + const char * name; + struct ObjCMethodList * __ptrauth_objc_method_list_pointer baseMethodList; + struct protocol_list_t * baseProtocols; + const struct ivar_list_t * ivars; + + const uint8_t * weakIvarLayout; + struct property_list_t *baseProperties; +}; + +extern struct ObjCClass OBJC_METACLASS_$_NSObject; +extern struct ObjCClass OBJC_CLASS_$_NSObject; + +struct ObjCClass_ro FakeSuperclassRO = { + .flags = RW_REALIZED +}; + +struct ObjCClass FakeSuperclass = { + &OBJC_METACLASS_$_NSObject, + NULL, + NULL, + 0, + (uintptr_t)&FakeSuperclassRO +}; + +struct ObjCClass_ro FakeSubclassRO; + +struct ObjCClass FakeSubclass = { + &FakeSuperclass, + &FakeSuperclass, + NULL, + 0, + (uintptr_t)&FakeSubclassRO +}; + +static struct ObjCClass *class_ptr __attribute__((used)) __attribute((section("__DATA,__objc_nlclslist"))) = &FakeSubclass; + +int main() {} diff --git a/test/forward.m b/test/forward.m index 517f5e2..e1d133d 100644 --- a/test/forward.m +++ b/test/forward.m @@ -67,7 +67,7 @@ long long forward_handler(id self, SEL _cmd, long i1, long i2, long i3, long i4, # define p "w" // arm64_32 # endif void *struct_addr; - __asm__ volatile("mov %"p"0, "p"8" : "=r" (struct_addr) : : p"8"); + __asm__ volatile("mov %" p "0, " p "8" : "=r" (struct_addr) : : p "8"); #endif testassert(self == receiver); diff --git a/test/gc-main.m b/test/gc-main.m deleted file mode 100644 index 44f7476..0000000 --- a/test/gc-main.m +++ /dev/null @@ -1,10 +0,0 @@ -#include "test.h" - -OBJC_ROOT_CLASS -@interface Main @end -@implementation Main @end - -int main(int argc __attribute__((unused)), char **argv) -{ - succeed(basename(argv[0])); -} diff --git a/test/gc.c b/test/gc.c deleted file mode 100644 index dab0f7b..0000000 --- a/test/gc.c +++ /dev/null @@ -1 +0,0 @@ -int GC(void) { return 42; } diff --git a/test/gc.m b/test/gc.m deleted file mode 100644 index 65ba5f9..0000000 --- a/test/gc.m +++ /dev/null @@ -1,8 +0,0 @@ -#import - -OBJC_ROOT_CLASS -@interface GC @end -@implementation GC @end - -// silence "no debug symbols in executable" warning -void foo(void) { } diff --git a/test/gcenforcer-app-aso.m b/test/gcenforcer-app-aso.m deleted file mode 100644 index 8507a62..0000000 --- a/test/gcenforcer-app-aso.m +++ /dev/null @@ -1,12 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-aso gcenforcer-app-aso.exe -END - -TEST_RUN_OUTPUT -.*No Info\.plist file in application bundle or no NSPrincipalClass in the Info\.plist file, exiting -END -*/ diff --git a/test/gcenforcer-app-gc.m b/test/gcenforcer-app-gc.m deleted file mode 100644 index a8ff65b..0000000 --- a/test/gcenforcer-app-gc.m +++ /dev/null @@ -1,14 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-gc gcenforcer-app-gc.exe -END - -TEST_CRASHES -TEST_RUN_OUTPUT -objc\[\d+\]: Objective-C garbage collection is no longer supported\. -objc\[\d+\]: HALTED -END -*/ diff --git a/test/gcenforcer-app-gcaso.m b/test/gcenforcer-app-gcaso.m deleted file mode 100644 index 2094937..0000000 --- a/test/gcenforcer-app-gcaso.m +++ /dev/null @@ -1,14 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-gcaso gcenforcer-app-gcaso.exe -END - -TEST_CRASHES -TEST_RUN_OUTPUT -objc\[\d+\]: Objective-C garbage collection is no longer supported\. -objc\[\d+\]: HALTED -END -*/ diff --git a/test/gcenforcer-app-gcaso2.m b/test/gcenforcer-app-gcaso2.m deleted file mode 100644 index 8231993..0000000 --- a/test/gcenforcer-app-gcaso2.m +++ /dev/null @@ -1,14 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-gcaso2 gcenforcer-app-gcaso2.exe -END - -TEST_CRASHES -TEST_RUN_OUTPUT -objc\[\d+\]: Objective-C garbage collection is no longer supported\. -objc\[\d+\]: HALTED -END -*/ diff --git a/test/gcenforcer-app-gconly.m b/test/gcenforcer-app-gconly.m deleted file mode 100644 index 1b8e6a6..0000000 --- a/test/gcenforcer-app-gconly.m +++ /dev/null @@ -1,14 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-gconly gcenforcer-app-gconly.exe -END - -TEST_CRASHES -TEST_RUN_OUTPUT -objc\[\d+\]: Objective-C garbage collection is no longer supported\. -objc\[\d+\]: HALTED -END -*/ diff --git a/test/gcenforcer-app-nogc.m b/test/gcenforcer-app-nogc.m deleted file mode 100644 index d99db0f..0000000 --- a/test/gcenforcer-app-nogc.m +++ /dev/null @@ -1,12 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-nogc gcenforcer-app-nogc.exe -END - -TEST_RUN_OUTPUT -running -END -*/ diff --git a/test/gcenforcer-app-noobjc.m b/test/gcenforcer-app-noobjc.m deleted file mode 100644 index ad746c3..0000000 --- a/test/gcenforcer-app-noobjc.m +++ /dev/null @@ -1,12 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/$C{ARCH}-noobjc gcenforcer-app-noobjc.exe -END - -TEST_RUN_OUTPUT - -END -*/ diff --git a/test/gcenforcer-dylib-nogc.m b/test/gcenforcer-dylib-nogc.m deleted file mode 100644 index b10fbe1..0000000 --- a/test/gcenforcer-dylib-nogc.m +++ /dev/null @@ -1,11 +0,0 @@ -// gc-off app loading gc-off dylib: should work - -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/libnogc.dylib . - $C{COMPILE} $DIR/gc-main.m -x none libnogc.dylib -o gcenforcer-dylib-nogc.exe -END -*/ diff --git a/test/gcenforcer-dylib-noobjc.m b/test/gcenforcer-dylib-noobjc.m deleted file mode 100644 index a06fa54..0000000 --- a/test/gcenforcer-dylib-noobjc.m +++ /dev/null @@ -1,9 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/libnoobjc.dylib . - $C{COMPILE} $DIR/gc-main.m -x none libnoobjc.dylib -o gcenforcer-dylib-noobjc.exe -END -*/ diff --git a/test/gcenforcer-dylib-requiresgc.m b/test/gcenforcer-dylib-requiresgc.m deleted file mode 100644 index 69a4d25..0000000 --- a/test/gcenforcer-dylib-requiresgc.m +++ /dev/null @@ -1,22 +0,0 @@ -// gc-off app loading gc-required dylib: should crash -// linker sees librequiresgc.fake.dylib, runtime uses librequiresgc.dylib - -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 -TEST_CRASHES - -TEST_RUN_OUTPUT -dyld: Library not loaded: librequiresgc\.dylib - Referenced from: .*gcenforcer-dylib-requiresgc.exe - Reason: no suitable image found\. Did find: - (.*librequiresgc\.dylib: cannot load '.*librequiresgc\.dylib' because Objective-C garbage collection is not supported(\n)?)+ - librequiresgc.dylib: cannot load 'librequiresgc\.dylib' because Objective-C garbage collection is not supported( - .*librequiresgc\.dylib: cannot load '.*librequiresgc\.dylib' because Objective-C garbage collection is not supported(\n)?)* -END - -TEST_BUILD - cp $DIR/gcfiles/librequiresgc.dylib . - $C{COMPILE} $DIR/gc-main.m -x none $DIR/gcfiles/librequiresgc.fake.dylib -o gcenforcer-dylib-requiresgc.exe -END -*/ diff --git a/test/gcenforcer-dylib-supportsgc.m b/test/gcenforcer-dylib-supportsgc.m deleted file mode 100644 index d8ce9e3..0000000 --- a/test/gcenforcer-dylib-supportsgc.m +++ /dev/null @@ -1,9 +0,0 @@ -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/libsupportsgc.dylib . - $C{COMPILE} $DIR/gc-main.m -x none libsupportsgc.dylib -o gcenforcer-dylib-supportsgc.exe -END -*/ diff --git a/test/gcenforcer-preflight.m b/test/gcenforcer-preflight.m deleted file mode 100644 index 828cc33..0000000 --- a/test/gcenforcer-preflight.m +++ /dev/null @@ -1,88 +0,0 @@ -#pragma clang diagnostic ignored "-Wcomment" -/* -fixme disabled in BATS because of gcfiles -TEST_CONFIG OS=macosx BATS=0 - -TEST_BUILD - cp $DIR/gcfiles/* . - $C{COMPILE} $DIR/gcenforcer-preflight.m -o gcenforcer-preflight.exe -END -*/ - -#include "test.h" -#include - -void check(int expected, const char *name) -{ - int fd = open(name, O_RDONLY); - testassert(fd >= 0); - - int result = objc_appRequiresGC(fd); - - close(fd); - testprintf("want %2d got %2d for %s\n", expected, result, name); - if (result != expected) { - fail("want %2d got %2d for %s\n", expected, result, name); - } - testassert(result == expected); -} - -int main() -{ - int i; - for (i = 0; i < 1000; i++) { - // dlopen_preflight - - testassert(dlopen_preflight("libsupportsgc.dylib")); - testassert(dlopen_preflight("libnoobjc.dylib")); - testassert(! dlopen_preflight("librequiresgc.dylib")); - testassert(dlopen_preflight("libnogc.dylib")); - - // objc_appRequiresGC - - // noobjc: no ObjC content - // nogc: ordinary not GC - // aso: trivial AppleScriptObjC wrapper that can run without GC - // gc: -fobjc-gc - // gconly: -fobjc-gc-only - // gcaso: non-trivial AppleScriptObjC with too many classrefs - // gcaso2: non-trivial AppleScriptObjC with too many class impls - - check(0, "x86_64-noobjc"); - check(0, "x86_64-nogc"); - check(0, "x86_64-aso"); - check(1, "x86_64-gc"); - check(1, "x86_64-gconly"); - check(1, "x86_64-gcaso"); - check(1, "x86_64-gcaso2"); - - check(0, "i386-noobjc"); - check(0, "i386-nogc"); - check(0, "i386-aso"); - check(1, "i386-gc"); - check(1, "i386-gconly"); - check(1, "i386-gcaso"); - check(1, "i386-gcaso2"); - - // fat files - check(0, "i386-aso--x86_64-aso"); - check(0, "i386-nogc--x86_64-nogc"); - check(1, "i386-gc--x86_64-gc"); - check(1, "i386-gc--x86_64-nogc"); - check(1, "i386-nogc--x86_64-gc"); - - // broken files - check(-1, "x86_64-broken"); - check(-1, "i386-broken"); - check(-1, "i386-broken--x86_64-gc"); - check(-1, "i386-broken--x86_64-nogc"); - check(-1, "i386-gc--x86_64-broken"); - check(-1, "i386-nogc--x86_64-broken"); - - // evil files - // evil1: claims to have 4 billion load commands of size 0 - check(-1, "evil1"); - } - - succeed(__FILE__); -} diff --git a/test/gcfiles/evil1 b/test/gcfiles/evil1 deleted file mode 100644 index 88bd337c1adf4cf57147abe815ff7740f598744d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 441 zcmZXQOGuOf5XV17T)Rl1hl0HfB=cB&uz1j!F8KfjK~Z5*I+;i)Dq0eR1%YmfDEQbt zb@1dt6!awc_z+S|4_ZDJQ8Z8qIxN*eMUA!3tIlD5fBwu|ew$bEgwJu3GK;Sdk{!o+ zxIUL~J#Kov2j}ta@}_sP$u%NYys55Ha#L4_UVkS1LKK0REeIgxTLE!h4DA%VNdsG| z9asE@p_alc@U8QLGm08S3A);4gr>2vOz5q$Qqhut{QH4M!I$>SK4G-)yi%$PW~Gn_ zF()&k3hyNSkLpy`&+Wqj2JYJDqvkVh+1(N0ZR7(DKpV+&qn(F&Y=4nTsh-qA_V?p@ zi6#@7@Q{B`lt-SzPLSpLxrMYkz`DS~;+rV>QhBpML6RVa;=_82latp(xQkG2f`lWS%_}U=k@dMlhCc-m_7S?$D`cy l7;nX(76WE4j&8^}P0t*`S_&9>@?Y?WAVon{CHMjS2}H#c4}C#MeL_6-0i;H15by&K!kn`ojlHqVGcZ@VJ2UsroVjy$ zHkw`EJAeH4*CRrRhoEE7C!t9gJ`veO0`)hbQm*FTF1=Sezk)_26H(=G$#I|IB4wp? zsnX}?m}neI&O06|dr9dTTOMnH#K%uQCqy2u$)iz^uuO{W$RefNZB|>BbiGa$#yCa= z7-`rmz+jw_q{ah`hhYE)2Fr$FwGGE_53XY!7+KiYfkDn5NQl98_D0Lc9E{@@FwVe!D;`HUCkw9eTd0($@)eR7NfwppeuVcT z+5_w&r40@PD%+kjbbN{(F}6Qu@)xzGwDeIJsjLvhNX0O?FOpCS<3EFolq-dIioPkh zyov(jL^dcnhVe(^z`x~6$JlUTgbW++_st%}%YN98MD<0=rs2B!rV--AF_MUZ@h7t* zF+a#4s? zI)TBGJhBj0Kj*%mfo%dR zjy&q?iKtE6*FwC6wQ^ki|BL_s0(Sa>y?qR|gwV=e*E5>hl38_hr>mWI^rmsG<$UOB z#a6AQ=QbjPcF}NLvt_$lK5+SQHwG}X|Ad)uw=JVwbIi84QoT|fjvnje&okdQPa^Rh z=tk@jXW%VZMy`+pe`Yn{^itm#AwI%m@ye+6V%l@2H!P&07WjZi9@htO?;)1C#0VZw zw>oxR_dwln>;r#@-@jHN?!h{^)>idOEtij0D#VNE!9B5seTHDf%VC@QSBYXkF`yVw z3@8Q^1OFQaF6``Gy#8zEJ{Np>_tT$oJlMUlhwIMLOmZhVbAK9+-5dLT7F5Oc+nLKB z>~m0WmTxw_S)S<|d^zaN3S4jUyeIa5>GgVzXIT3QwdHaFCmhQVP2IG^s@2l#gXcJ- z=9yQGVsQBL-)MM#=J}V>U-Ep)^DECm)C=MJCF^`YO5;2<0ga1=cgd?zC5i#XfMP%~ zpcqgLC^Ql=&`_7U)PHTa?VKludWDY}j?!C<$axH>9y;)HZjxKLO3nhJ=xtB*1`1*i9=Gh`c0Xe5aydKo)7Nwx>f08H%b*3~t$ouvO x2(dVr2Yxt$oF8h<(p^{DNZG}j!pw^ys~cR`Az~c8?!2jKCl_9uKRJ(J{|1kHi{$_S diff --git a/test/gcfiles/i386-aso--x86_64-aso b/test/gcfiles/i386-aso--x86_64-aso deleted file mode 100755 index 93b331649cdfb041bf5fa300f34f51b8b1d3aa67..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29060 zcmeHPU2I&%6`mU$Sb+r&S*QzvZj1wI>UyC{qKF^KYh!SNNgS_}Mo=(|<_wL#z z?F%XA$a81T{CxM!cW14+b7#&c-+b_)Qfd>zH3(NJ)r07s30%58+9)gH4 zAPfit!hkR!3JkBv?q3U!Xl1ZxP}y^M?2Q=`YGLY3IYwL{GNZnlB7v~KwG4WM;CnNUpEP~b#W~NAr!q#j6_N)VkBZ192Z>(6xu(Ai`IvR4~&?s;Bq%2v{R`dW*ge?+JXO4UDy1Rb!0+1 zwKsKBtM;-U#v@UUXg%xK>$y|D#ff8d!3Nsjm0B00lAkO2tIsgnk8WV_hTSm?WQFV~ z&e94y4BGm#&DybT9Srgq{FcY}AADr=!Ts)cIi%DC3xUCyJW_Sihark0Q=UYWgwpE@ z5f4CT`7dzUhM3Qu<OdwE*?*i+)XU4L)oU=Ib{Oggy`|-D&o~zf= zrQ&otzfj6UzZHQtBnSHc@OOXj!kJwcKKS(C1HbM^UJru0?phOeIh}d$DYX-QWxM$Q z7xVuD`78^@_RUCnDxF=ZH~iW3cyYRxt1YA-s^w<=rz^Ez*3%=EnM!W(q{~Pj^=tKF zrCd*Ef+AygA%dCO2s2Zymi+8YtypawntpPmGkUC&xzFyC&<$SII>N;bu zmRtOAUzIwK+-P4b(@)L}W?WB|x)U`xCgw5DV2t{))j57egaKhd7!U@80b$^K!@!Z| zg`=lGPhMn)k1ze|FPI)Iy?O!H=6HWsv#bAN9}1RUy%fqpQk_1RJpRNbHd@)iH=C6m zoYNQhaS=BkR&^o2p)Q)8VZ{+(;X2f}h6zb8Lb?TvN-cmPiii zgIR7(D9)HonU7-A0t>ot9@5-dsb}k_vVJ*_Jc@@M)CKL&`!jP5AN%QZg!+NmLi^h7 zO65`IeuUxC+-B~*yurij1OB+r7CRKM=Txbm1!)$m(I9P>B(g!8TN+yXzD)zAhNAnx z+btM0rp}ad^|~%&%P#t4cV2YpU9?|q#ME;6+P&%Yp1t?%-m{y=iU)k$tjeG=Li}Mb|qs0sTf?w0}|3UR!h77vjT>S6ovn{`E|+~S!!wc@=L8myKytyLg_u>EE-AtscFABsH)U6y#nIbiB zV{*o^d#^rQ>PML&Y6Bk!@&AjwPxQ?H|L)^}b`rOMK+b~kmO^S>-wDy9ZVe7ung#=G-(!0{|13$_?&;v zyUI(l5aK;Esl53fuc!N(#4^Fxkb-@0V=?Es?mhGQQ2rWCC0iH}2801&Ko}4PgaKhd z7!U@80bxKG5C*Y}j6n#$JVo}qE5FsN_nU#Zjo59WYIr>K%$Fw(;W+TU9?hKfz$;HR8^RBX8f9oUHSvcz3QEJ?|bi_ zx$n$4dNXr-^TD70JfoD_2OWZrK=1EY>T_jfGO*u+YCSi3y8Ll@;T#+z!^RkF8TSAyf{^?oTYW-tVgsG>5sQwRq8Z`tiwuW%_O#2wP{?f!;MC@>FF?P zSJByzQ3pmI{dHh4Pb0bg3C00*00Y6g?szRHi2c3mh=5_CKLQ3hdn7#!`dY7qQQ-P( zy}|xGt^ngDj9mdnTGDu0xBZK*U(>F?+Qg)OjH|#ngZ``id87cj#$TXXPfspQrUoq7 z=6dw5g}DbfOKTq-CRDbf;IZ*JPDE@vXYyx!t-Z>VFmjes#K`qw@V+qDm-(N;LF;o< z@1KNYa8^TfLLL_%&$V{*At#Aw#qp0j%m_pT!c44#l&7~bGgQhB%yX3@J12G&vl#{F)$Td@8cgFGg`<(ady zAC#xhn%`xqcP3zzFo`^Ju90I(ZDL3%97fTMqAi#~v=i{h(9WTqK+837oI$)7ZlPU9 zdk{JYO$+Bz%{5Dp8T)z<$M~i_1mhJbf6I0|3~bM>+O-X@2LEL!bGT+5KlIb`iAz7< z^lu-S{oxztISf??pN;h`lVuoPe-5rdj=Tr8sFKlsb+XT>m9-iW8_edwz0(;fM$W!hkR!3b*(i&=IT0`?AB^J^qMetSBpQ8wV zK2X~#$c>bA3q|eGD!u#*CCw63-3Zqf9lwS?NJgWEdPq0w+F5s2+L41g`U)YAHY%>K z#`#iVpZfj?8Xo8#&fv`h{=FF6sAIr82-Y^B)*&uMemAjM#-OGXJAO4Wld@UQ$_f1c m1dx*PEmTVn=Az?VigEUFk%jhAx~m@*UhG|$S@QrZIMKgpur(k6 diff --git a/test/gcfiles/i386-broken--x86_64-gc b/test/gcfiles/i386-broken--x86_64-gc deleted file mode 100755 index b22be31828f6f0905f149b02e06526076f2099e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPTWnNC7@jRfR;jQcF*Pb4u;mugg`kEPk}XhJv;u9x5EC7GIb~P3+h%tgis7LN z!MIjTP0+*#6CO12ff$~AF!7Q|BKUw0UV;e^Jb)<#6JvZJN~qsAXJ&i$Y+Jx5!2eI@ z{PWM{J2T&$)0~-|f4;x@#aBwHI)r%$bCgOztxw_75JTFiN!tx`8KSBIK|~l32801& zKo}4PgaKhd7!U@80bxKG5C-ls2G0Hc`%OFXH;Vv^(f&W?Ds@zOG0gwfMg*;Qw{CBH zxvg_osB>HXX-yX+vy}eH^6uR3BW*Qo$+%+e!3gDPW|L zek$6I5Fq>b4T9ENTlchvGCaiHc-XPP;!mYNTIa!GK;=-WFm3%f;ZQf+X7cB3t#jS8 zVZ=S9h!Kxq@VRi~%l4;n(R$bB=eNLcvJMf{Q(mB1hwXRuz<;i&3x0nQiIC~)o3h?i zdsz<8BTVCM-3NS&6UC@U8;DWw&5AMDH=OgQ#<02{abWO<xc4$;?*xn#3Xw;ab+TNkQDiBF%Mz$1(2Xt= zdM)fG=-tq3q1gxKvl5?$i_rU_=OgS!2n)+HxG@Xox%e52<7}rFAo2tP-{s_Rv5?GV zdy;*Ja(%ELM_?OHdn*=vxqtoPug~Q#J+SlRciGMaf|~!332U6qG{`D6oAvnrB`mzh zegx*Ry-Hm|lh?+QU5AP#e<0bO?J0B@4kfo0x(ED&gN4_N$!&wf`M&N_b}--4?^2U5 z`Gq1UrYHJ4r3`0a15C((+VL%uV2801&KK8=n4ifg%{ zsl2DDzT9?WVkPp%Uc7Pay%9vpOPb1U*C!rEbp6?K+h64~M@~1s^6mI!Gr#aBn;US= zaKbRz%&E(GMnSGzot&J^@RRn+MZ|V?ZN}GQ&R4y;?qX3DhVyw$B=OA(;kP-zdnsYf z?^1rZ@;j1&^IHnzoEJ5qX2R5FYORc7F8MZsh%g`w2m``^Fdz&F1HynXAPfit!hkR! z4BUSV@OS8s5cs$?B?EIkqigaKhd7!U@80bxKG5C((+VL%uV2L5jh zELtA#d|GvG?pVC+>e8XQq1aGe$Kn(>(gvI0d$9i;o8UVNYFriKi&J_Co7zkLv>F~s zX>Kvq1H}Vfe!dTBkeJ3E>N-7;(f)|vJ6!UykG`7_%Lcl$dDYBCg*ocPQYc<%E{tL2 z0e_F0IQB7M4T5_cu-CzCMSd=ba~p%&8pO@gM0+UC{j4lNe*#F#*&cRF*S1BQwzQbm zn@tuvkFC49vhkw2U#`zJtOWOxt1uu82m``^Fdz&F1HynXAPfit!hkSv*E8@#;Qt@P z+O~0W;kXO`f75pQsHYL8Jp;g$u#2k7fexz*VLIg8g6 z6j!v(VurQhm^jZ_@xY&C~Ei{w|MUUw8K(`ZSZD4m*4}7_n-x#9hO?W+0Q6A zVe$A3QD{dof}G&0wV%<(W!l-*<1u0J=qpX3Z5D_Bk;J1@N4JXn#G67q=B? zhuhuPIr+(d){VL%uV2801&Kp2>ffxYSS<@AwjiS*c9o`;tn zd-hKhPnW-&;1@>v*rpC+wp~A^Ciu-}o&M8DPA81l|4F1xHh+ezQtii{d>^W{YqI$$ zwDtY}t>XKC0kNpdKkixO=MU4^{Dunq`*U7OnZe&bwj{__bKdAwv5`b9(J(JQPVk5v zw)3kut!6gw<>r}@;$jK|!hkR!3V&<(KA@AFcKSQHw%d>-+;zxeFIqV4*UaH+&BtAa}C1y y2|#n%qT(xH*D1pO0?1Yyf^wCg5L*MAFh+LZ3Y?0~-%?rlPW%UEVC;^Z2mS?nVR-8R diff --git a/test/gcfiles/i386-broken--x86_64-nogc b/test/gcfiles/i386-broken--x86_64-nogc deleted file mode 100755 index a401acce90c1eea84e67f306c0b47ef70137b3ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPU2IfE6rP0wD^%E4Vro>pV2fZ#7yJtkl5By(q7`ThhM36E(pz?QyKQ#2p%@-& z2*$NyYJ$F*@Zf_F#PDWf;!h%ppb0*ipz(nR&_XaV#s{K=`h9a}ws-Hg1$+WLC!Krd z%+GgczPY!#GdpL#yYcy#N~t*r3lQcim4I58!lgcjv{93`6XtS6RXu`;Fdz&F1HynX zAPfit!hkR!3$rN*yf1EL0!wLK@<&RH1rn|AJ*L!FWO+-Js&j>? z>(r<-wJshS=pD@IVrjS+iBXIUFt~`30S4RYBt9>|n2!Wtpt8>Rxgoz`?CO4$fZ-v% z1Pm^mRK#M?)_Q-jRLJHJ*vx1+9q z+Kvz)`}j42)|=aQwS_V~#NBwFS%Z-c);8 z4$mV|jc7gK7mGaye2Wvss6!iwQRmHyG1xbp^QXqJx*u_1@P_2V@dlF;YDac}t+0}Kel&}*RC2j;U9pM?w1`=A#g>_i9)%QCnz3+K7`8H?j=rxzpg2m;^bC% zeTQ;=updTX8%}#G7k{yD-Qlm!OdJ2b&4`j!Qp&gPboW?Z|!%f$(Q{? z5h=ywrcgf6i2n?N!;i(z0zWp@FZ3LeTgaKhd7!U@80bxMxE?+uwwP}J(>=}FeM|?hwjsJvexxTr) ztGTY+etqHzh?JK$m)oyR+>hwGv*q@`%4d$8ZhG~b@yQl`;ZL^Ie|@Zxcthpp*cgbp*fw6DQ=_1{e++Zy zQBJBG5YEGK7ykdI?etMkBTRb+fGJ@o={7!-BF+eI5cOeac{9o9=7X?UoBI+tr>Cae z^o#)bJrHk$DXnHbD+p`Dg>rA=X*#S9=*BlgA zw9R6Mwc(gM>Y~)zE=BmI43{Ws_#3x)?*6pHQiW~sWP~J(}Gv@-RA?US8Bxii~K7q|K^_DBO5Bk5JZxBa)_wE9wB6J4x z5cq#5dcFaYDrX#g|K{H3n>ReQ^|OWlw9Nhax0U?AVu-o}&p23*wvjt>cq?ZdpqS}@ zxZbUtbHI8l5y-;{FBsR=*5=f1jP$V$oyKgxc1lg~o6S1?r;nUY7_a}6NSkc=6j!A>jy?JTR9p9C%TZ|S z`~Pdj_x~5fqAvfqXO*8nOk?vKD(vsic`0QEfB)FhAY09Sqf5m`60t=6g7`SWBXZcz zuimoSWxIK%vqcL7!hkR!3cPNJN^SRFm_wc1OEbwId#UXeI(dH{abMt| z^-}55QfizrCJrsni&MqQSz1T#dPFOc{&@RUrOsl=KCD#6Ok$f+o5s~T+-Ovqbsa|S z3Of5SYQV^$zXlBEX(YQp!8m{pU?5o6+_zSb*Y6nOqx zZ?Hd)YruF3W7mL@mNcH$ZU3_8SGDJ_HZiFm<2o?Tq5pb+9w|Vs@h7O()00b+sR0|d zxgNc1VeSFW(%J`y36-rVcx-%@6A_!vnfw`F>w5V~7+G5>Vr2U;cwd<7%lyybp!J2R zcTdAH@+J(#DLZKy!~9Jg_^*dLa92ZgLLL_%&$V~+@WU$T6ihF%*xFq8WvgH-l&=;E$o5Lpy<%Yv4G8crV;RyNvcA zbPk#p&ZU}bmL4Ns5w9$_N(Q{Yx;#%)9YMxgAhHTb2>E+G~&MnHTafw3;5RVy0Et7RE4Pm z*639RV~`UN_Qo_TAFmW9O@va%F@x*B0#0!Pb)x4d7Z{F+Fdz&F1HynXAPfit!hrgq z^V`+m^E+hX($;6cVEeSS{VR^0+*oI6EYm4H*m)geTkk*k^6PaNozbyQ>F=HAV4k|w zDgD*CdG$vAqo23CR2ZfWn`fi=50h23M_RV(#! z7^J7%>(|u7~800z&i-mHlWraE=7Jjv028TrV~4UH8GR2Si@@FrH(2uhV{SN2&eV#*6nRC zw{`9cHICZ^GYHGQgNN38+V<@UWnvju4nE(DR~g4#T9^Eh5<`JR%X?2LwH-;`LZ#|m zCi?Yi)Tvq*4-NDT=5(<%+=Iv{Mg|z%#K-`H^>h-S8(=(u2wwNS@fgyM10yV9JgtZG2ebJ;oz3?TB2yIO6fn|= zKNYP<2#{_322Sg(t$SKS2_C|(KWtxM^QY1tt@Ge8pmL~Gn6`YJa3~wDGx>A6*17K4 zFyfw4#E3^QI4)fOvi@m2wBEJ(`7KbKtV00hlo$9c!}_~&;6GQ?1;4+DNXT^MO92k5dxp2HerG)B{9bhf& zFsNxuBQVHgqc<}Kc?`bg_8mK4XxqBOeU~XD@J+x-ArpCIUMI_y8by*xUjlClltz~b ze=YPT_`Bh+h0iuHorO3SF2dgre?Hu9xUeuUo$Ir3nhT$?I8Jx|0tB9bY}XFZZuM{Pnr~r3ZF?{4VR6fK&4yGH#9283$Q~YO@^wzl4SF z*bm1v)>o-ZsPfuavg=T>3 z$iem>2B&y|T5a{9gQ18B1HynXAPfit!hkR!45+>3%SWy?PLPRxV{iS8>C@QwuXvUl zn#y~c>dS36CRQSC?8O_$-Wx%nyrikzc75Vu1lOM}xBXQg42PhEv)r7ZKXowHdR=oUeLw-Nm9R4CnJ$NMgK zyp;1+&Lio#zNIkCbx{LKCQNC@*2*Z>l5fL_2m``^Fdz&F1HynXAPfit!hkR!3XHaE*+{HiVf9uEKcztZEy&_2j|am2)?7B##JG{IHiYhsJ+xrt>KZB z<`GjpP(0A(=lc)`iD{gnuG0ex6@M!CAjl>OpwXOfF7LPW%6sHFo?5^$I1GIIf-0JRtoW*Mj zvMcIlGsEgI%pG+>`r2+q_@xZDC~Ei{w|MUT)WcGRZSZElm*4}7_n-x#9+q0X+3zSg zVevSIDAc1EK}zt{+RtcXGVS#0{+O_Mw3Vh%HycBNTJz+%JC6ZTbN>OxSJk+4<1aMs zLiaE6gSwyDFYZP!n!3C`K9(SQ2L>4dTRKZ&@>=Fjj{s{PoL@55{D znruD_-`f8FRS z6Fee^?fU9X>ob|}a_h`Uc2R`^VL%uV2801&Ko}4PgaKhd7!U@80b$@CU|`YmL}x4Z zFcziE1?zfCduvyhZkjp@-%(gMK8-=Z=oxHQ7>SLunT14^Z9wC+wgJp|2lfHXZVUzB za}UDU3Bc#JMa5RY?o)*A1(2*Z1or;5t#}GGa;yR!%Lr~Sji3kJ2fG{8o z2m``^Fdz&F1HynXAPfit!oWSoz`4JFziAi#W)olu>i@?)rH(2uhV{SN2&Z*N>-M&n z+q!mz8pmyd8HDBD!9(laZF_fzGO>&+2cPf7tBhkVtxNt$iJ?HE<-Mnr+Kwb|ky3Ro z6a6|h>Qt?ZhX#5FbGld>?nPu2BLfU>Vq}29dOC^E3osr)1TauoXZ+ldUod)gJ4(Rt z5MKfYH%=;IF{o?3zgQ|{^9O8Vv>wNS@fgyM10yV9JgtZG2ebJ;oz3?TB2yIO6fn|= zKNYP<2#{_322Sg(t-D)82_C|(KWtxM^QY1tt@Ge8pmL~Gn6`YJa3~wDGx>A6*14Y9 zFyfw4#E3^QI4)fOvi@m2wBEJ(`7KbKtV00hlo$9c!}_~&;6GQ?1;4+DNXT^MO92k5dxp2HerG)B{9bhf& zFsNxuBQVHgqc<}Kc?`bg_8mK4XxqBOeU~XD@J+x-ArpCIUMI_y8by*xZ-CbTrO_qA zUkkkneh2)u@Yx2Yvk=F^Mfm&RFM#WS3k&noxjqZ0x$qf_<8Vj-E! z_9pud<@%sM4#zs2_Es$Xa^L#HU!TigdSK_r@3Ni=IJMv*|nm7-^C_h z@(V@86q8#*=|Cg?GjI++7CQ_4*i^gFw%}DQn_JMvY;SED)CmaHWd?g++uzdaN+`7g zIoSTg;1n-VtF0b%FccACKo}4PgaKhd7!U@80kx-m`N*}#2{N&F?5&?MeHt7870+^g zQ+an&UAgVX#7d-%y?Eo;dm{*x8=A^(*C!rEaQ)eG+h64~M@~1s^6mI!GiUgd&GmR@ zxL}xU=F(+6qaatVPEJl{IHkRE5uu&kn=yOL`D%Zzr&v^l;d~woNz7RxoSSprO9}g& zmvY|9c_bazw-koCE~-b#gelF~S{cP!@@+T~VL%uV2801&Ko}4PgaKhd7!U@80bxKG zxc?a7@6aE?@pnJ|Hgqzy2N3+$b^obe#5B%O&(Q-J?T`5Thf6-r(f1Hy*+5S=ubR23Fjt*e3Xcz}i(=S$ zz~7@LjB^awgW%Z)oOLiqk)I2~JjS5524S-`(H;u(JS#KMo&b_^x`)%!wRO>^E-j|^ z=8%QXS?%X?*K44>>}NUXYz*qPrf@_G0mELP{a1g`1n zQ*L=j0PG%!x5?yIv)&aD;L+CK8i^bHYFqzTEgo%jDNYYG*j?MZ2Waa|xz*hPIg8g6 zWLMP9W`@;am^jvsfVQs+u+T9FTn>E??DSfJuJ0&v)@s0 z!s2lZQK&~Tf|TH?wV%<(WZLP~{V`$jXe&*jZZ?JjwdTojcOC;F-{KYhTyTd%6mL}ok9xR%_+}9^B^J+Ywh=Lkw=RN5J&XsWD!i%xOOg8wyhy)7 zr;M7;p5=Z6FStX2R$3|KcarwZy#Q(m{yIdGGmgDap|elDWlHUX|6lJnh@-swb^%in z8iRRo{J#^u-vCjSI}Sei@xtoQf7-C}!!`f>a&mBO7yqvqqVB>w4mP5057kfG{8o2m``^Fdz)f#=xF*`EvTmwM2Ss9`D0T zk3IV*vZu@6O>l;hKDMdTsBPCzsR_>6tkHk^$mxW!`ag-d$>z`SRI2^hlkdZ8?VfBt z3g6oP|5ma6zkpEGvh#;(bk3o|_WoR#Qf9FCk2M6zYVI3dDmIdcCFfG{8o2m``^Fdz&F1HynXAPfit!hkSv4=}KJd7`Todl-vT z=7M#-rM;&L*+oEDCVD~A)_5w&&8-je5pAcIEoiIjr;0c_H&fij*`A+NyW?<}&j0gS&1-Exw diff --git a/test/gcfiles/i386-gc--x86_64-nogc b/test/gcfiles/i386-gc--x86_64-nogc deleted file mode 100755 index 3a237e19cb2e19990cce1a370e23ab68f735fcb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPTWnNC7@ma!D^%DbF*Pb4uthMW3*N$mBwL`cXocP|#6*Uc9@y3Gw%Of=VtA+_ z7}tua3HoNjgAYCs!<&hTmqZdl6MQm3;{y+%g>71E= zF5j8?W==CRJO6xlhbKo}4PgaKhd z7!U@80bxKG5C((+VL%wT%NRKK_wP6C!ryEHEJgkQn5WcH<;Af6R~z88?r7cC_DWmV z&QRmHO)!J7+`D*a-P5+aCzOe0TsioBFJ5IFb7@`jM@kF@5-smNrqnhhc}tY4bD8MZ zsZpnDT|6|>N)$J$&!$W)t z7~D9ih{d3;^}b@Mkj)>kiP3r-2gXB4I}VJngz>Z<&L7O?`*k+Ie-N3X7^i@dM*OL0 zJwkwN00M{XTykl zN)aO-!Qi-X{mc5N@z8qbrWZCtak35plv7^dvkdF+%7On}Q5XFEMMOfTD{u08Q}ty& zj7Opx(R#oy7JCo)7AJ~PhdL0W&YKluuzxt`PxWDSJL16L3(1A!4Jsv6kL&^JTIb19xbJ@OR z|Djwz^oQYChtuAw#b4}QfB36&`HS;+eEc5knSfJ^78>`Y(-{X@jcT(T|G$KV@7N2+ zG}c$Ci>UJ2ShD+2vE&aV+p~Rz-ol~e)b0{5X#D50P;m2ZUfghV{7upuQs%29P+L-ODErU7%p}Ndq-)s9?T3rdHRv`!5 ze;AzN1!|4egARrwA`A!v!hkR!3cu1`FHw6T}2AA5fUf%3AZa@)0u`w?7!w%qnt`OJ~ijjw()KH1C}{$z7Ko*6C} zCY!l*8P6!lv@SxUzg`&QNSEAGkkUM2 zst1Y(y8V1V;vg}NGt_hRKt}r`{=VUok8||Bgjg}qo6W0cZYs=GCziwGgX)qPb{_Ee zpb6s~1NI!aUE)474YJq@3>Iv~+DtaYpchs0};wn?*hkAB4r~JeR;VJ$=ee z?+Ae11MxPR+-lal0s=hR`dcG$gI{gyzsKUyMwjCBK!e@2y?cPR&Xk+o9gwqlO+j`= z-E3x99frB1E=XV7tq8xA;TA;=f8!RBS0 zj`J;E(a!~UC`9pANAReJ>xXX^F;imk%w`)Aqj>8gc+|sqK&rx<`o9#p&%lfHD|E`J z>FinVH}HZx6lkTDLVhP{&)f^3hTyM5Bst^Q`vf}s)LWs{Uikm@euFs5yKfgT6`?Vh z2gmLeTgaKhd7!U@8f!P??l`daOAGw-HkImzKcbKQ|{ zLT5aVE5LXLzAM0pN)%7ZmUXFSRbf^O z&m(*mY7MZLlomJ)sI)z2-|;DS#Mo%eq%uMfBNfBox=_!T@ect|%Cl3i zOrxS?0tUwU4+15}Fn*R4DI2C&l;wKSb%T2MctUWzikDRgQGLNPRKvr1C&mN-h%u4r zjp0hiT=t{rp2rdZIM<~Z2L3N1aWqWVEv}d$PMnKMh*9Z@5j@N=j4nQ_z~Bw5F$^?C z<7ie|b@QL~*vZFgh_Tueqt@U|!X@1m#|ALCCN_Fta7_g7rPH%>ujWtAs&`Bl4n_-% zYv{zaz`jb332_-N-#>z41eIasM0o=BF_eoa`G3WALI0PiGp3PAA+}K-hMt2)g=>$> z_t`;`#CZOH#N~c8Fdn{_jF#&d^;*fOtko;1KMZ9IKU{eGlixm_nB3}Le{cAU#_NnT z4Hbv)^_A}uzs@$uQ7=v zlbjLjx>z31smO$Q0zLTrKA#BQ2TzA}u01W90nLDBKr^5j&oWw zmp575+{hmhVsm>I*Y?0zdtofu&fngB628qdx37NChS44xYv=#iy$|N(^>+UE_IK;w z4!`-$_TKSSMu@%R1GrX`zA13s=H5u$`gw0}ugY!Tt)F1c6|nCy%bv)udQK6Wn7P6f zwp?{n1iwF=mSy3WBsO}%ewKShO0?u&j(a`sJE;79OQFqu=>TBU(1XOrZ^?I{S~LTi z0nLDBKr^5j&Gy|Fe&46Y=GoTsx|1rSt(A2BpZ-1U6^!(%iXS6?06z~zHOFj(o zK0^6fXtNXh^Sdd(&84AV!N+fpl;C?*VC$=9Kr^5j&Gy|Fe&46Y=GoTsJ3}^;4 z1OFZaM~h@ z+DLBqmRTEkFe{nPC>!od!L%xVj;!dD;*oK`*-v7S7V=4Dkl-){XQL|3RkPgkOyt=Y z3H@TDShK`SJOt<$+oQPbWRWm28k`FZ-V>Y~JOuY(0w)Lfd(O9E;tb(=*f?A86CBP2 zwtp zxELscaVkuqxD$kK1#uyCSV3hn#osx#ym9 z&-uL%?t3}E{`JQ{PYNLpfycm;;M-3K@tKH6BIv&jmU21$ZsxJkJ^6S_0l((W1# zQm$n#u7&ip(ROIL?%^(TmXyA|;jAH?htb$w8lvf%k{#ZmDp zx!1=H_?X7{4fqIC*q@Y5=SsyX%8FB}V^V)Ex8UP6#&7k<5pu{JZ-b>gm0nAS6mj(R z`RJYteGPDylnyWqpltZ=Q2Qt+!ZsW;@zb)D)!Zl_(YO%w5$*HA>q4Jj#(x?IDVOHn zn}?w3b#!1S;z3Ir#;@&wf7O$&UGgvp8fLHJj~~R#dAJ_wRgsi6+w*ekcGpk8k0@f` zddx%TOv64O^KLyOzvHyeMy%|n=i4>wTqW=3+)Zo2 z&DHEHb@vm`TBtXjV$QGBon%RmS{H2B!-!|ihxG8Ko=0d8{Kp|h#0dZCMx$zH3vQ+1 zFXb=K4_hQ{^-WUiyct?261jssu#UzL7zQaN=aRs(k{=e^E^NLU72AB>wMXZlwtD9v zCSnkd4HyR)gkr1bFBg(&9hnd>V+POXr?^HClz6o(2Nx^^Lk0nZfI+|@U=T0}7zBnO zaK5#B;p$$ZP1VKi&mUqxvAy#*j;&atwU!ufW%k<(&~3lJzkPk5+V7QiGSTv>eb~mX zk=ie>e}hg7564?3Mr znMU`2LvJxVhuve<7KLig^Tc8nTRgkq3%BVwc%#JTHQ1wb|4RzDYJZCTEB8`lzUPwI z=lf9%`@M;t9X_?#_~Kpi3$P)BfI+|@U=T0}7z7Lg1_6VBLBJqj5HJWB1Rg&G_&@Yb z@Yi7eUxzOo;)l?8fZ+eG$1iV_kwL&9U=T0}7z7Lg1_6VBLBJqj5HJWB1pZG5JTo0# zeMw~JR-Qfi;6!7r5owIA980l~HYkG6qy8L4@GGv^5pMKYN;Xi`?w2UVo=ZuVWXhVi zp0%AK#y}B?dfqWvD@%LBE;M}`b@Vy9I9bb895KTW3Wvnj2{b%VJQ~4Q2mFq!J`VW) zz?ufsA*)j4w}L)PJXH-b235)MrC^+84Wbn4Q_0EQy5W?TF diff --git a/test/gcfiles/i386-gconly b/test/gcfiles/i386-gconly deleted file mode 100755 index 910274770f9e14fcd5b663d0236e3ec37f77fd7f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12608 zcmeHO&u<$=6rN38u&ANIM92tKwk6@0A}=6>I67(E9}wF7DpW{lYR8*+!CqUtYb6mq zM5>T#0#cB2LPFxii9`PcBsjDePH-rv9*UG!RN}w^p$hYTyPjram)=3|sc+}KnR(y5 z`F0)6+OI$Ub?5Jgl~M&RV?XeBZjZ$GQlNetPCmCBe&Y%^-pxLSuB&1$Qm!>Ch5 z=O9KM7&-LUfx$eDWDh1751|7X2-bDC(RPE_-@lIt7&iJNV34zW(#N2$^>P>mp1;~3 z9L!?}7*Anr2N-Ed<7wUT*F3+bJ%6QzNrMh^9RI$gg=<`Za*X*ZbrUI1TF&A|7`aVn)~g1UV8b~P zRGRKuEBGjMPP96HtrB@Hzqn$0op;?JL{I1(PmKeO_^(3^zGYnlzO}b6>@8WUI9HiyQ0jTi;QlXyQ!Jof>ifwJh9e>j2m``^Fdz&F1HynXpw4xF zz4TjQmrR`By8JVaPg~p9aqZ^DyG!GlZu#!+3mDsa=kDiUti$Mzjd#m`?mi0h)ob1I zAKj~$ekgqK{dRAHC;Z+-4%a$g7+W(Va#2TLA;i&U#q-q!3l$Ei zuaBVNh3;Vs-#p;old+9D27Cv>+6L4*#HGk@BsR+!)J$SCOwFcj*0XW~|33kwWPBUd g(!;svIG19aeOzRreU$F%dxaPK_ht4x#0nPr2ZBa1l>h($ diff --git a/test/gcfiles/i386-nogc b/test/gcfiles/i386-nogc deleted file mode 100755 index 4349810f0d0eaa39d8ac541048bc09bcd48d49f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12608 zcmeHO--{bn6uz@fZLra9w~IuSI&F7bl{KRv>SK1h=?`7o{bebF+$NjZO>{B|GZVTC zLKll*T!j_-D)P@#R%Cm($4gTl6`^q~(@it+nqa=Wuh?H^Fjp=a(n_uli} zbHAA^caqbe?*94D9;MVS=n!-SdV9B0A1m9kV7~#?dUo<;>D|)&SvW?9jWO6V?lWAp zUM!toOpPNWFTA!VI z^EezMFT+5bvXhoE%-_U;e=pR5yBwku@}T&5uDzWv=izxIYAjke+%PP!xILVHj0|!h zM#g?9Mzh-X-0gd~b01k?a6ximc!NtxB=D+9zQ=3*~3oX@`O1)hkYQ&8xzH2Fe^hdg(}W{?hB;ezX6; zUqAnN={EBmhN``f$NHl2*#~(ZX)}(0U(&)gE6cNgU~tAZ2Q%6RB!r)Wz*}NbAu2)p>sSn4m9Gw3N`qabshND_P(&UWU0bb z0eh@h24j#D5O&5iEAKBACQXD=M=^u@{}7yF0rf)9Pi`)+6L4*#HGluB{s_#)O2DqOwFWh*0XW~|33kwXnY&h g(u29^IF~}4eOzRreU$F%`-K-f_ht4xzzP=n7j?!l%K!iX diff --git a/test/gcfiles/i386-nogc--x86_64-broken b/test/gcfiles/i386-nogc--x86_64-broken deleted file mode 100755 index 6570d398af036d501a9c0b9d6060fb2afadeab28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPTWDNG7@loYyV__su}DO%r)^SgiP=cKy$GAO(XGasn<9cvljfwmy4el88=8pF zV5qppLIf3j5c(qcP^3>jh^U}q>w|spf|76b0 zKbP;!d^0D^%+5cb{Pp&`N~tw)>*3Zam4J6^3XjGZ;-*bpFVrmvszx{wVL%uV2801& zKo}4PgaKhd7!U@80bxKGxWpLv^!K0svI~E+3DAuCpI@icQRT(3{#SRwY2Dj(Z}+3! z{SSs3$8CaTgymkqL+iorM+QTgSjLru&-db0#xa-HWq-2FP$1Fr-i=D#izIKOQVlK> z{RTDdRIN)>W5b14NGKR50djb7W1GB7;E zmw~~Jld4z@>RKNxm5bT@A)6Sj$4kJt7HKa5BP?M&ttaw_v-uI7&5st4DT;9t7-__x zjMgIr$Tq$Yr}f^h!LCq(hp_7p+ZWjUskTS!JU9%f94{A_EFUKv%7*Jq{+zCLZfGTp zxTh2`;t>pv3)jD_e;N<1AKZQ49w<&a5kNWR1wPBL{;nMO&y{r1A1xsgvQ&AK*ITSF z^I<#^)ri((eyKEc$hSCAj0V(!7!BTv7=@9EoWIzIwe5%lgD)f(jyI^3P(89^tc4v0 zb!}+@26=4pmd7BE!MEJAZ~y(>d-u8TGKB=b2^cA4B9F}LWUErsNHXc0;BA7^;u7Jv zLvMxO3%?yc+rV@d;#fEb{{Z|AaJ_J0VO~1dXW=v#K4o#7?)<9|xE_w*a&n?nOy;t~ z$&snt2=wdVSchkB*;eR3a_4*RG+q7knU9W~XFU^eYQq)A?Q}ZhAUC7hEXV&ZVc|Ot zz%h;WRq7j5xjmK~m@1Y1v1CtnxHwdtO5Rf(8uJeqicgl3_Y@}bBSYnEA>T3TVv`U1 z#S&sl$vvTTpb`HzIENpLod$kvv0Z3e@T!j89cW{AxV{YP1ccf$h2bX-c67NCN^M6D zw*P5xiWjIIRu4KDiij{E3tz~i$>-rjgaKhd7!U@80bxKG5C((+VL%uV2801& z;J?QJe}{ezj=%fyx1kfV2k_tPWNDEwAPfit!hkR!3tK!|-w}j)j6v-U!YrnGLSdd~Wd_<4KvGWka9X;)F51+k!_?j!ve0=P z-PP5D7q#tjZLVV_c$Ykd0bxKG5C((+VL%uV2801&Ko}4Pgn^5mfir>qe++BqX)dap z;J$+4F6{qJ-D#tqhFkIu0F%Qm(rtJ-k2sTfLDYtw`OPAqhY!MHb)HM$nw~yo)jI-U z_dvY6O>VW~T>$|eZT+p0xWTWr^?%&r(MFfz^gx5%wY__Qw$7B*?heRVyw)JQqHZ=b ztPaE6X&0og?N)?e&TxyOj=wpJ=iW~}EH&5$Z{>RlUa@$WTM+7DsmELSj)LPBk7I~J zJ&F;e1W&E~j5a3IPOt5cd5cF|X$p0-F%+nEPma5xXX^F^gjH%w`)Aqj;SWJnCUQAl2Y4{$GmRXW&Kp6*^_w zboL7O8+gGT3bfKnA-|KfXYK`1{j;+>-uiskO|S0!x{*6__)qRQKsM9< zaJheS&jHJAha(Tiy`W!rSe^YQA`A!v!hkR!3}HUb@3>CU$Jv;34?&?Q`oF95}RW)i-{WBfR-g~1DNpw>;stH7z)7W9)z(I xfX{7PFA>aH=|gOJ(K@u^(85u?sRD_#1(3d3yi= diff --git a/test/gcfiles/i386-nogc--x86_64-gc b/test/gcfiles/i386-nogc--x86_64-gc deleted file mode 100755 index 3f3b82c3832ef9857d316ee902c278b6577eb445..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPTWnNC7@mb9t5(@!V`@}9V9Ui|x5gVUBwL`eC<<-C5EC6*ddjYDcbna9Yz+yG z4aT*S)C5g@FyY0-2V!{g!5B3WBlsW>UV;e^Jcua-jWIqLHP-K&GqXK=wiodU_5ag3 zGyhz^GxN=yW@dK&`TWoKKU7LBfm;r@RH+2K`%-u`#t=7d;&ww_ji73T6A=c40bxKG z5C((+VL%uV2801&Ko}4Pgn@q<17H01!=HBHZ#Dr|qW(WEQ|gHFVp#vHO>kQ8?%dw> zR9DZgP~*5wFpsd@t9WR=r)%GyP$rgf<>2$Zc$IO?rFGdKEi)8Iw7hqhQrnT_tx&4L zWuo7p#+|BlX>6#!kkh5|NIxQ@7#U!26C(o**3(IRS%7gJB7lLyI^*Yt{i4xp+ffFF zhxjruxN%Yyi$PuM1Eq2?n?Gn1qxCojjN6cQ3>aYv<7qvTe?FTZ(AoT80hyv0CxMYh z{K;rNLV#@J$8cJ2>)g{BO7IYN{bBn8n?Kd|Xq^X#0hPn$;+*B-{4eFuGu6UAsi9f;B3Er?MV7|HpweOTL$I57A^a^ZM`N(t2?JH%Sp zVNlnW7GRLa7H@tG@)&%}J9j+sSl6~4?z>DOfo}pv3Yo|w^Ez3h)Hsq%dNaIcC@n4# z{zmAn@OQ)C2%l|WIty_uT!6nH{*7?E;ljeabgs|BX)b));yB&;HzRNd9KYq{NU502 zW&4u@W4Qt7x5KdxFW!51q37_!AAQht%a3P2J^T~vnSfI_-eBB4PG=lsJ*v%e{QnXb zzGFWe(^y}nzC)Ea#*)2brLsSi+?nk!_7%sHj}-fc{O1eB=Ss;(3M2V}zH+vZZy$8A z$tV3{2{EPQ)=)aoi2neb!;i(z06#X{F0?IpRr{89v@zRXUj}soLT#Bs|FZ|$J6#E- z)*=Vne+Zo71!{xUgARrwA`A!v!hkR!366z8~k$aR|P*og$2Z?E%pH4~8QS3wJTlkKG;}(x& zh(bMz5u^l9t^JHPCeu!@?T=}TM_XwMb+a)PsC7?{yK@*2b@v}&eASFQH~vE7F0`M7 z@{8PuVDTIb_p`$FV*iBOz1ZCdi3lF`Fdn}L?r}M>0HMRjQbJ_+l<5k!PFuO4nfX_V$VijL0nXkrvU>?S<%6Q-}P#<^e diff --git a/test/gcfiles/i386-nogc--x86_64-nogc b/test/gcfiles/i386-nogc--x86_64-nogc deleted file mode 100755 index 48dce6ab2d6a3f3ccae5a3ad72116e0df59fa9c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 29056 zcmeHPU2IfE7@dV8D^%G6F*Pb)phYm)1%G*vWD67)MW8i(FbC9GciU@#0Z+mlL;Chcn~cF4KY3tCDe1~&TQ}AZ43AW_MzT;I!V}x~c7@wvKI~ z#&Mfq3Sqf-@X)%eZD&^~6U(@A@cCZ6$~flIy5tX)7z!j>-g{W7O-S+v2i?nl~tzz9niPwT<_{%pQiXY+gektvFC0vKt;pNQ5Y z1jsgi38(dj)~?o2f`_o{58D^m{He4@>pVCNs2nI2CM_Q)9Lk34O#Ym%b*_6ljJT&1 zG2#&njtkentbZC0t+%avZaowys}MjrP(b_|Fw}!QWd%BxJJkCa*V9U*^Mj zB&rdu`}|_Dd!KJ{q8N3k12O8nX)*eH2Xp>JA6B;`4h+7KTsYpKQbP5}_OTXr7}T_- z5g6pL(VH5BJOyUkGm@ltz~be22jEzTHy&Hs-*M=fFFtEn@W+|24_#(G6L4zo9OE8!I^!VAQEisv|Cg}v9lPO} z#`-Gt3#z;_mh3!GEct!OE!mzzci}*CW1+jx-``(&wV2%4KbY_BE@k`kEqh&T@iI|3vCNt)v~SyZOr!6mO-6>P+g|K=aoGzt*(Sp%aDWZ zKL}3o0=2^GK?g$-5e9?-VL%uV2801&Kp0Rv%I6PXY8)dIJ4fF70n?|E(I4?F*Ef~B zn(E4JSH~Vf+QJqWHoU2glUeCqJY#+Sbu9dG6gf4sRK&kPp~ z z1NdDYB=3NzKQMez8bIxmKu2mIYOpwXOdyi$@z>iqiuPcGvdq0opoKu6K7p&f+x%*%fuOnPGJp z<_^0ceQmcQ{8ENn6gB*fT0HlD>S3wEHh9zDOYnikyW4_L4@)iH^mh~-vv?ds6zWlo zASHNe?Ps(xnRa@0e~ei?+DcQXn~kABt$A|XoyCBtx&Hv;t7_c2@fR9*q5UM3zs`LK z7SF+OKPy}>_D{Io*Si}b5y7J##^d+EJuWA9oS2IOcuzpIy`II3+XB?X^>H}Pw|GTA z7u=x`#akZ1qaLmwzFEXfh{ZFTZA6UXt%~4L590x;3UA{7Qsh1ZFVe5jDZ{3-r@7z2 z3+_;$l~xM*ouoZ;FMt|=zY3A$jAQR3=nLquujQ>{*QMce72WwC_az_qtH>^Bi%Ko}4PgaKhd7!U@gV_-+Rd_H~nQX)Mvi}&HBN1pl<+0*53 z#yG=BA6?sS)V3=p)EMV%*62Td_+-LZ{U1l%c=N}2;`i#K54{hswR61r2z+b%|7*qe z|7V1vCjWS6m7PCSqjL@ww)f|{lrn|Ae{5lptY*H}p<+XcSfYM*e3al3IcV2cZ(83a zyLG11#TN#I0bxKG5C((+VL%uV2801&Ko}4Pgn`?KfrcfCj#lhpG^ET0>w5i`*3M4d zG;tKZy|8L@5`%!@Q`oF95*uY}3yCV*fW}E}1DNq9>;stH7z)7WeuuFWfX{7e|y@GoBXb>;v7 diff --git a/test/gcfiles/i386-noobjc b/test/gcfiles/i386-noobjc deleted file mode 100755 index 8f01860aba841a005c4accf46f4d7cb404973b38..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4228 zcmeHKK`TU26u!@pLRd*rh7i)sD9K8Z7aAc(Nx03MdGa*#)XaMsyID}|tX(<3SJvGEQfADJOyrz3stzmrHUR8PlhU&*Ug(GM_xkIeu;#i=i2AQI z22~YG>AQ!Ho0pEedw|WKGt_X$Joan2L-kd)YW;fm;=)qeoLx|Fh$G|U4aaafKXaA! z2~ka{;I9>TjN+XAJAo8p@}I%V`etASh#GT=Xus(BUp~KSH_!qk{cGy|Fe&46Y=GoTsxuMDi8 zJ&)aP-WB288sL|olB!njoAV%)*e-nhJ*&Mlf^x0SL~ec*>ca$=IxT` zEoL284DiC3p!5#@7`7P@X83gkez?yWFjF8;<51nd?~}oOl%2Bd`IfYx($NO?4%XCP ADB(pKjom4d zM%9cQ`=TgK!Zy%DZatN3dni2=HixtgrIbQ?Fx|Gh6be1GkV8^ROP~}=VGm2czxgA} zV>@gPg(iF-e)Im!dvD%+W_-}VoB7TCpZ`)Rbqetdh!aXpBl*?`*i0oxbvIFbZC~mT z87M#j3Q&Lo6rcbFC_n)UP=Epy_>2ks{@%y;!yo?Y4Ca0oaW|>d8+ZfN>n;+YgMSd(shT$c_9wb`n>ZcskjKP@}5M~*EIVEh1ssC!sGJVrXJl#C&1Z2n_oxO?q})AA>Qwpt$^ z<0bT!#*&^qM%!ujTh@W&;W4`57?PemhTX1jkA^rhk0JU_Dy1YnX$-j*ONd&R*2>Qn z3v1CmIYea^5$Ol$EZ0KXM&S`;E+Nh!j@P&BM#tGUQ_Vf(uve> z;ZXjV&!8;F_!H7eR6dOSRYVC7g;Cnd!?W?@qxeAKQM8h|-}%SWKYqKEx$ukgubsYg zJOAA|)cNzA!ch>PjLJwQ$nh2trLP=csSgno%ILd21E(y}X?(*>rP3Q&Lo6rcbFC_n)U zP=Epypa2CZKmiI+fC3bt00k&O0SZun0u-PC1t>rP3Q&Lo6rcbFC_n)UP=Epypa2CZ zKmiI+fC3bt00lln0-JAs^y2#~ScfjFa?ghoA5UeSudJ(=b-xmS60VPzysXckLeF4j zKMDxZ`uQl8{<8k}#E=d4MC~7RJ+rivb?UMrP3Q&Lo6rcbFC_n)UP=Epypa2CZKmiI+fC3bt00k&O0SZun0u-PC1t>rP z3Q&Lo6rcbFC_n)UP=Epypa2DwI`^3BCcATp>F(qPL{8}~$59tj@?r4c`}4X}Z(ABK z9k@85ZoQ(C>YZUiGX6Kkn<3(Dq*4AZ@>>%L_17E4YqIV?g^j$E;Co2t!u$`Az8>~P zR?Q)vMwEOR8_Dk@okc1^>YK7b_%qV8Xdv$s_^0F%W&D35#h<+9K+P(n+p~A-o@IFL zuHjj(w-Ec>`9h;_x0*(7Bfpxlzp`{?kk4n#R=u-5C={=jDle=T%QaogZLAb)nak7uPfGo;j#gGwr(RxVwXm&2ltK z)7q-{Ti&2rE0orjGnkJ#KR>8dDy95tZmpa_b7R}II$KW9w0fyvehOcxZ+~vi1}Ebv z{}wZ@Ijw%%a&et3({}oopYOAsed*%krK-`3>*?7)PqLXYG2(gdskj{C<}pbC3{~gA%q$H5}!U(fZ&4h5vja zHoxe-htct)ax{j#=Wsk|-S51NAC20^uW#&#&+ol(eyd?8DaXecA5YxA?Yk9nwcOaA zY?wbG6*rkPE`cA$&$DH_p4P{YHyO59*#q;iN?x}(3gLKmyS{D7sm8~f%mz)P@ud7o z=NZk<>A3#YjgL1Sj`vnDtdeqcO~R@}NjP4!?$!PJ_;@$bU&7TOQBwY-@#H$lHPgDZ zR(`HnSc~rAAu1(g{jG}buKjQ@xemB|qwolFmk?(V$LsM7r{myB1s*-v4eYy!k%GADyD1AO{coZGvI6wN&&h1|p zo}Rz)_Sa_rUb^?kIn>E<)zm|Nd@?Fa9l{ji5~B1Cj{6~U31#%%o`F-ADF2D6+=u}C zgMMkG2mM6taKC!rbD~~Hk8?K}Z(rSWJ*%Bsj{3-P9!HerP3Q&Lo6rcbFC_n)UP=Epypa2CZKmiI+fC3bt z00k&O0SZun0u-PC1t>rP3Q&Lo6!;$tY_7if(TnfnU3{{d_I!A-@l@96%G&uwq!ND` zu3MM9tRJ32&uPCm3Rqx!GwY|LjkJ+<&JxE%eEt0RzT@&Itx^88rP3Q&Lo6rcbFC_n)UP=Epypa2CZKmiI+fC3bt00k&O0SZun z0u-PC1t>rP3Q&Lo6rcbFC_n)UP=Epypa2CZKmiI+fC3bt00k&O0Sf#-37mUOb!WTT z?xpTbb@_Rv&Sk?duv_h}x2Mgz`E^S@|IK$2@4gjGJE7HSb!y9T)R!vD&*rt7nNYW0 z!N3*( zZ+U}itx#H9&R|C7{QRI+sg&}oxwUcz&5do->TEeZ)9R&y8LDkF)&3lv4^GEXT5fgY zn$zmHEf?3yGHs{t8s?5`$an7-{YwT5oVoV@A;#|epd$9q@PMu4u*du>HuqXh?Rx!& NZrGis)r;=S{{kfq;2!`0 diff --git a/test/gcfiles/libnoobjc.dylib b/test/gcfiles/libnoobjc.dylib deleted file mode 100755 index 56167e2988e548060e5b67bab17adf9af211be26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 41640 zcmeI4U1%It7>3W9YW<1W%lZR~#-f(&BsGK}EZxoSk_0!qCD|0ckTE+ui7WdPc4uRH zGYwb~F9mNEEEIdwURMPbyihFmN4ymDrlkm83%v-w@64RAEfQ1EQp5X^Z@%-LIcMga zXL7R#zW0Z%FTOIy+y~zeA2Q|u!t@$0!>R4QH`#ZVeQ=owh=2%)fCz|y2#A0Ph=2%) zfCz}d|BS$;n}2L2Km5%DSUZ2<7jPQz9?k*q``>&6cin37wJI8emQ4~w&c7edLj;=Z z2L476x8wZQ`R9)s^Du5O-eUXArp;Zq-+rgjuDgx)N(Y^{ui*mb&Z3{uzSqvRk?Ws= zyKZT&{A#fK}$^>~L5ht*1@l%LJbl`|Obta?7w z3c8-(O~>mqt6tjMGrPfNI)?u)&hbvO-|~C-OupCX^m~rC-gEeUCZewe5ZE?HLMWl` z_JV%RtwGGe@5UcqR}l~a5fA|p5CIVo0TB=Z5fFj9PT+E^|Ci#~fYtxo#32Ip|7efH zZ@_QY-{ZZ=vPrZd$?sGX@LY0EK`lI*&EpK$0c}9`?M5Gv4@05-5dUa#;HP%^!O_ zw!`L7XwvTw{oe2W-n`%Mz4^@OFaz(+Z|?r$mrAKq(9c3gl$wC~#=9uSQu}Q;;eK;Z z+MpB?AOR8}0TLhq5+DH*AOR8}0TLjAPZ)teeE7lL@WEew26G>Wem$+!t9TE9=YN$( zp>?fvwU#)seCWeU{Anm&9{8fQXYF{Q1Cxxu^;FPb>>)|3VdQIVx?a!jY=sTU9B(2P z(^O*PA}=x`(YoJx(e5<0-Pv@|n9Ok-u}f$d8;^_P#R`LO`wXh{my59@dDLSXF%%9W zTDL8)uFY26bp!usd{z$RkBluZV0;0CC_MBJ=g8)jk{n_a^N;0lciRo8otN_T(IPyS}xb;>bLPh#gf*i9IQYT#GrV*5&2O)1`&w z=$;&+F%N}(7r}BZq;Dkmqw+=QBsAXMvKu|?THo$j?v|Np?!s5+kVTkEu6>B&CuCx% z)OobYTqW<5@MWC0VMoz;2K7~_$bF$oUwMHRp99=`k_XXC#`#t42S2&o{c`We&wb}B zpC5nyN7HEY=Q)N^ppQpBtcZ*^3zb+IU#a(?DP{ECo`Dkw!Jvj6(7>sjs0d=&F2GRnN?hQa^%vCtQe6cQi-5+DH*AOR8}0TLhq z5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH* zAOR8}0TLhq5+DH*AOR8}0TTFx5ZHL_{pa6W#5#0Yk^4|M>A00OzOt@f*8Gb8G+ZAq zby=T3hR9%LKO9K3em=4iFKd5Klzea`>VL0mnWdksQ!I5(nhy`<9WjbTbE{`bV2A-vzhM)f;Xh z$h!Xw3fYt3b=c{!{(ab2!dO(*H1sr7>J=1H--Dfk6_NI)6p(jd$I(Id6Zogpp_2dK zu=vR~2WmSy_v+P&Sx5ZyVW!bYsICU{iV4}gJLmfw(6a&!9r=Z zTzz(>RH^A&VQsNg%U#T7E+$gLqtQ5-SYB84`lnWI!ES7RWWtpD6ZOxd2mC{PC z-7^Q(^<2AdI_}P3ZKD!pX~T8 zo@wSN&c3l==evIXqNrT`OI!!AVL!(GdEw&!0TQ`w!!=T6@-xhe{~PeDeG*672;T z`9Uknv>NW$QPKL~1%>~9A~C;c-@|D9$d7W!K8JD9y5D&b4~^Q!(>D$z=XX1t-%=PP zelkZqpK$w@&uyVlE5wfE!}tEeuzKJm3jUcTOKe{Gi(;*Y)Yu3HG-=55O6Y(Oe zfl=arR6e;5a?P|ZFIS!}Ei6a(@DPn6dB0TA-L)4clk0%Xw=egjmW572+Rj7#6MP-Vg zgG$WDl?M?Z$ z8GW~B;G{+6ADJn{AfSiDWuXUgLho=~z3(|u)X{P7q?7&EcU{kFXXc|ABvg+=W&S%u zrR3gyEcAsZg#<`|1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14c zNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCTMbJ_k4G9ef}Ax&gR1}uv_h}x2w&% z`Bh6jbM1}Pn>T_fr?fh)PHj4ldZ0Rgxv16Th`RL>J_=qe7uMJHw8rKHmsU!(Ld<_M zt!}=YQfak^G7t$7k$nQ1ek~||pYZ<#e{&?I{&Br@MS4hl28HZ#Ao4xbr(^yPQGX>k zH$Qd-Hid*-0Jo@>`l$K4sMZB(KxO>48>Z+V0D+Cq7GK8G2Z7cLBH)oQu8R9LR$ z(B0TFtJwhJ5dS*1u$6 pfiu_sA0l?&2Mvio!#(zrVUPJMHg{W1?Rx!&ZrGis)r;=S{{U2%Gzo*_IPZE z&7shw-v>SK^FHsq&-1?XnbBbe-gn-=^XuO#rA|S=0v%Oq0_K$uQH+o5w_St#gFR`3 zQb>RVNPq-LfCNZ@1W14cNPq-LfCRo^1paXQlRM#qzdD1tpM`!urPOP94}j-?l|rF) zt$3xDII&#l!%F;VC|(};qP1u3c%cK6jKBV^pugBdl2XIS*V=Twp556B87X&0<2}SKqg`w~E{Yc`41VM@s4P5Hh#kqH9@B`Sa1hbD zZFzNVw(71M_($V2av*nPYCa09-5Sy5PEQh<>Za6J}66mY-;T+E+ zRyvD4IY--R_FL8g$HO_gVGgk;=dj!Lt^E{7<}pOBe@Ti$DxzZczerk^sK9WyJxvuX1cixU!6x5VJf-yA&Q@niJ?*# z&?a-0yidZHasC23hQ@nQUxSL=6RPx;7ijT0z`ZMZ0KH_Kw|?@cPrviKZ~o>N)xzvw zZ>RUB(dN%{9HT%Vi+orS8E+0Mu`<3=A3;Zy(RX_WPFU1we9uhhLjX7`F~Z;=MrM3C zrr!6QDB|cCw^GUe>$|RJwbKhx%p=Gs^PV3D|KmqPUpP`ofCNZ@1W14cNPq-LfCNZ@ z1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14c zNPq-LfCNZ@1W14cNPq-L;0r=vyRn3I=&~aB!En-XD{FjZUA?UN75!znK3?jw zK7SmM!ODI(kZApUWF=nK{+=kg;7HW}Zr3tPKUt?PYqMqD@2@bg=i!S4>ym>yOA;Uf z5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH* zAOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*_!I6tbczob&??cs=2{B`@y5ZVW@QT-O`o1-J@AFmfL z%ewzG3fYt3$FS33{b#VRhOwxsY3OOF)XONOz6U!6D-n zsq*YH$T3pSx zd*-0Bo^97n$K4rJH_B0#rnOn`x4gl6ZLzenki~q=ix&sAN~Kg-&aafS=x%J8R%g@c znN}|y%unIl^zARr+2CXx<=8? zFQwFL{w{q=eGUFR9;&sj6|dCbBqf(<2z}Y-PoClZhjCb~J!{89C6r`7d43m>Ifz7l z;Gs;Z;eH(ztq)#M`0pnY^NaR9jK+`rC`WS7L9P3p7xB=jZ9IMZ`46A(d*S?+!vx|d zb40nt3b!Bn+!ph-eC$Xrte;?oo6H%Pz?b-Wwrtna`uKcfVSkl7U>-Kfc8mKG=Cj-N zElW-{nQtr?bdBN7p25I%L9p&AM0j+mrdOAzoxH zFiQN-$|u)Bu9?=QmGaZY#g*tD9->hs=a(wFyY|9lavgB__T@g*GSEqAydCdwde+sx z-LqW0ufPikyWz*2M@H!{c_&~dV5eY{k{Jf`JB0(1dkA0p4_`(*8t+AY4JsmaQJLab zp%U|X+xOwx&H@^GIcNj zQAXeG88~TC`A4SnF$m}(aT(}AoX|TQSMPgH6m@i*Td8FK^f0mlyctY+M61*4)TZO8`zs4i6||ZhRo7p_N5KoF{QA0{*4Uij(rU4mkNHog z)U}sKR7&lk3`9aiWS@YhUki$x3IEUVH%CX*KVL6imL3wHMj?9~i2M}w>6rf(>aPZe z{1UF3hMo!A#4n?W{NJNK6&@4w_k<5uHKhpUU#OqO$Kj)W21aoL;z#=rOp1>_(fl5Q z4|hL9Ch)_r%*)tDw`XtHJvuksZz*h2Bmx%4NJu=zR+%&p1p0& zn1j{gYPQ`o2bJ|~yKXw}&Y-$cjYRgQwzeMMQ({YrRTiv+qwEAt!#kI0b+v&T8x$PSAz56-;l7R)z nT>F2B*nJ-~B>oI{*-M5!=C9b?Z8f#)^&7fjcbZl&x-b6&uxRCk diff --git a/test/gcfiles/libsupportsgc.dylib b/test/gcfiles/libsupportsgc.dylib deleted file mode 100755 index 8a1efdac309cb57c33805b544ce9ff7e6ab022ee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74696 zcmeI5-)|#V700hP-X&_YY!(CxYAdCysM>ak?Omc(Bx+&DF-cngNE0X8K2*2%c$^Nh zJ;pOm6A8IqXtk}x!~O$E3lH$PeOOAx11}X)1T8EG2_8V|LyLHz0wH)Q(u(hyAIW5r zRNDt6TE0g<_uO;mo^$VfX8bTC-8;Xy_tT##rA|RV107Rp66V|QqL@hRx7~vKojqxT zQb>RVNPq-LfCNZ@1W14cNPq-LfCN5a1b+452ls*pe{}|PKL`C*QmI$)9stk(Dv3ht zTJc&fc4E1}hn4u#P`o_wL~GaFaRUb?9)J65et)rtB&kNBueIU0eXF|_G{keguZlEc~UH0-uF3G~(aaE|8@ zE1kujoTFnm2W|6!y7gxf2 za)ibl6!u*N%e9ccp`1nK%g`xkw7qRLoI$T=_g!bpNHuris|&~?OeNPoMDb%XIa2B( z+GMVh_euCN&L3dM(fA>Z=Mqv1^Ptj!-~jwb5Mzu@s)ZHno#M1(@*1sMeW8nj8r}VfTI#43=U#s#)o6- z1J@2Cj*f9R8SlTo>$qkowGhTUhKw@r`H}xWemwAnBZUM=fCNZ@1W14cNPq-LfCNZ@ z1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14cNPq-LfCNZ@1W14c zNPq-LfCNZ@1W14cNPq-BAp|yFd;ht&maq<8R^&b$Ogd_1jjyb$mo>klKMvN%OI_CI zPax7?*$)R2uAdLB#LL>>6D8*#3H#sgT4w1d>(pg!wygX83Fh@Yd=Y)oy4VsW)p zeYRYz)O0Pswp6TTFK1GhuMC$q@@tFbe5q6|WHZB3zJi9OViu@O!?m`}Sz}l(mb0C{ zF|4j(?#}EB5M|{sgtq0u~@zAI(Jbiol51;Rw!Tgqk1meeY zgt^5Ew^u!Gi}_kUawHejPq4yG=8Q|=N%TD1mg8!De7^CZzsemj51M4V#eE6#S)KZp zDW@9GH=gsmhWW(*sPhcxXLlX%>c;a;2KjFKX_fflH3^yynIK=Y?$*8bc)nYR7g_a< z691#}$#sxxrgdqh@^o==CA^16XcWnLr3&w^y&#!f2VA~=Ig45bIt7ik;~kDO==JQr z>)?F_UO-q4FXjR=N`J{a2{Q>h4I7ut$e-US9FW{2_|kuL9qnj*5cO54h}1=8il2o_ z%*U075Fq3HzC8AuH|~7(=huJqxwHTL`tLSp&?e)mi3dD=BJ`yVG69{3N~}Nbd#EK; zdf@caIB8M&N2c--22gN3!Xa-Q`HiIqMV1N9n%BEEh; zdhEEmzef1zk&l~aFLyLYwB=fGJnR=epJ=~u9eT8lbrK)}5+DH*AOR8}0TLhq5+DH* zAOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8}0TLhq5+DH*AOR8} z0TLhq5+DH*AOR8}0TLhq5+DH*_+KG#{!!JN?&W$HdsFKR&nR_17kq);?DX7SZPbky zO!dq+Ur*e*=}$SK)oFFAW!viW)rF@DT1}0q+b`jx;Du6teO=FJY)){sT&(3I{*y^{ z>v}>Z)gDS;Bt%5^321sXzj!C+{}z60ETR5#qqr$OBtC^g_BasvKI$_O|A(l*>>u(< zxM~J^CTJ7Cf+F6EN6G-uj;FF!$KizwCmli z;bL*MRDHHwtkiTZzqVAYWiMw^m#++$Hu7tW<$S4BEo3vpQoe$QrD7IeXg3Yl+BRp6 zVYyh&cKXJ!x}NRS4cplnu5DDpEKRdjAGF=!dTp_^vXI4$j7yh>wQ99gSkA9hvgn@P zGR$tv?i*%5<7%Q6rQDpT5Uv)2*v!$R4oK3SwIP@pq0FKU7Xf5Ar2)ff?;iM;w|eR?XGD8 zsuT%0xB2MV;=++jLG*y&R3VTs-`kzBch^qNNX(PQGjHC^H}8Ej zJD%P7{;%KuIl!2}Ok1opaCcR=r_W z5^q`H$?>HwJER1jw1;@SVL2VkMWZC%s=#|#0FX|+4`s~1cJ*?NH!Z)>GMm;Io-6Q9 z2msQf62pdp^KR>k-KulDwbl;w_q zWwcJt7%N7k8P-E=oyhePSto=$nuqmvWSJoD$#^L_1aP8$5l%3kuyfvWxVPD?wjJK_ z-7&nB@K3r$dJ-?nBj=8}7hXo-N&iTX;?Z-lU;2)m^I)CWO>^C%sZGY4P2iCp^(O)q z6a}7VIj*&a$0Lb1Bk+&~LVDCcJhRdi#j81{=MiEO@1($!@sb|J14s;Sv@ewxE}So( zS&(~5Hk`MQ<>Wypkvj1vO;mpr``+_Z$TCJ_r;slpm$I4-Ol(8s#2KyS9q6=1rk`M} zgZvPZ>__-AQTymTL5&Og)Bcu}dr^@^nnKdMo~t`{Rj+S4b?79IAdw%}>QDXt>3662 z&MRM?`|{G&o5xU>Mq-B!2I(28Q{C6tgAp(FOYVy_&MFew!gX;3C5`EoP0zQQdfBeJ zrn{-1rAPfr+kMZ|3+-CloDH6Rs?aZ5u4lJfo^FID^bh@SARpmoq3wxlbUKbzskwH? zU#wm(OdcW8DB$U2WE@TV7PMm=$sPC$Lz&Ibf=9cG#*p#2Kg4uAw^!U4`n>H=a;u<+ zHi7#t@=@Hk;Lizd4?fSfyREwEgTl$!BfG$l#>?0()W+kb9bwjpM$6cfXhrMe^K7`@ zpBDNKbR~)b#eiZ!F`yXu|1t1JsrR$-rg5>dw9vcL`@6J#I)g)8X?tEnR^C4S11K>D zKceDKluEsuP(2^X>PoLU{V2w^EGF9;LpJXijG|I>XH&r&K|igvxYoo$++ow&F5u$yt%(RWPmYzD0>J#`b(h%=$WTtbje9ycZso$lms#Xjr1{4E|0mXn~ zKrx^gPz)#r6a$I@#eibqKgPg)M>Cz&GGq7W=yEBz4Chtc599LsbvM<2AjcQ;=L)<~ zHYycX&avsOSFBc@ZQa1vjX<6Hu;nfowU3)xiX(Bs?lrNgWH5o2Z*kLWW;R?QP(ESQNV^DP< z&*B&Q@JcNo>huXCkR|-WKD=6_0jaa-kdqEf@@_&ryRZ`{W{g?ath+Dk`us~T%+1fe G$o>UZ$ed*W diff --git a/test/gcfiles/x86_64-broken b/test/gcfiles/x86_64-broken deleted file mode 100755 index 4346f8ac435152adab696f59ce65004d50094f49..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8576 zcmeHM&x;&I7_Cl>8I#4#Mv;g==!C2$A?&O~!AoFfHxp-d$&Z~?M9|92PVdfWPtVdl zLw3c(K!~t}z+4o(?M3hq#FPJkg2~B)B01!!vH=AV52E1sUUl_sb&s37P#;uRy{fNX zfAy-msqXsruV4S!FNCvKi2fcS#3a()hlRK_cKsld|ircjI!5(|qnB3JMIL16G|C% zd-Z5km5uKm%{QSv5}b6%X+B7%EasC<&tLOG)MWX5&39b;6I1t*jm6igzq>5kUbh+4 z+TIx7K=UQ{CpIm`jRq-~qi)y=R<%<$zK=EEBf6c~v~j*(aJ3aQWGh(dz%k3WrTMsq zXvA5LxFx^_5z8$@(+a9eKI- zXi^5gj5gZ!jxX1`;jVlM4cD(37w5+ux0HVEL40}iXM71CvB}5h!Cco;s&&$8*H%4F zMRvTd=KF0J8?hM==u)QnBF_)K6+9kUz7v`+r+X2b@j*A#?3m_T_G?i@#w_1VhL70f z15=7`=e@Krf8n+Ax%uRs;%4R*nATU59lu9(QPm$q(+o?l5FASmc@cSH6<3H80BgwU zN6onQJ_F{Qx<`e$g8U%TB_z}6%Vf@z+0Jqf{gY?cYIMdEv>(2K%?->MfH8-4?r z_7oE1`1HGbC%^pu%=4d|`um4lozshauPC7o9zakB#Hj=$r(rC8_TzieG~VY565ANF z5I-X9(@vqf5p}(GVWCwIYvD%WJRi-go$$S=aK6(E8nte#6HKcImbHa9yf8vdRG2f` z!AAae5>i%X^gW~hchhO4{@P3-H$7Ox@7he;yHLxhb=dr*OMA^ z-mF;#tO8a6tAJI&Dqt0OkOHq)Hh-#I`z2qwvCnbvDSYFl-{D@_{AQawjLOZkmlZ7k z`Gwf#p6!J@o8>V{LF<0{0$&1XJBR;muCt|K2bS$Y3wY{;AJ zdjB3uYJ)7oUJ&3L4O=+eQ#?&Sn3;2r#y#AN$eH+EN@JP(<_Yx7tKO^?&f_XW+xv^c zS5lJ-%i+x6aFHiZ>ZDxOaoCHK?fz|j zkT`S(9C4UGT$F1#SnjTffe4R0R*VlM+tF&(3mT}?=182W?vd@L^wz!QUf07}d5w%m z+qG68ruegAuh@E4yC(%!A9B>mmij;Ogu+7o14;goARqnFID>Y8(Gg-P#-vd;k1N_o h7>7~HS)cb76YS#Tb39h5V%_!nS2NTC1# diff --git a/test/gcfiles/x86_64-gc b/test/gcfiles/x86_64-gc deleted file mode 100755 index 23246c22b53107dab8a66a5c06b88481390e5dfc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8576 zcmeHM&x;&I7_Cl>8I#4#Mv;g==!C2$A?&O~!AoFfHxp-d$&Z~?M9|92PVdfWPtVfb zLw3c(K!~t}z+4o(?M3hq#FPJkg2~B)B01z3WdnK<52E1sUUl_MO|P4~P#;uRzpuV} z{ne}Lrn~F=zkd5?zYxw|A$og+5R)i(9~I)daB(KY(1IJ%7y$;mPv(n(w%dC#LR0+mTEzQR@ zL?g~}BrVBNO0OwbLJn0~mTz109nqbLv37SguDiQf5K^8iEtQNgtgo?km##l!-I14T zk0xc{%V?ur@Az^p4tM2CX}EsPyf{DRxTW-K597XvAhdiFb6=e39pe-U@DyEZ>O?AF-Jqbc1JPns3>!MG+aZd@~t7Vv`R{ zV|=6M(!%_OH_GSc(`SmCnO9(1Pfd369MPxBe+*4CJadJhHaXNq)TvcmAx;3Sp=KO4 z{Or`wEz4nBo%yz)E3E1TbK^9`eN^Xz2>%YS|) zw)xKX(w)unADg!;*S^ia`%Ql^bql`1)R#C4v2gRbPf(SrgQ@GN`%O9@{4y8}n(X@E z9?sMYvIx6DfNwOs!r_|YS;oQ4n(t_QhkF?{3%^TgJm-7!1bXIGZ+eCEq^i*N;i7QX z^G<$Zf38pRjC}+ix6|f3e1*`&A0jq#jz$#!BunJfOtOEZp z1tyQ>7fbkuF9cd$F_KyN#a} z%A7t&9Oe%fWIWog zwE{84pACD(*7G_%J)!%Mqjt8`|B0s*7U~~J`i}(l@Q=nBv;z#c5K9Rrjk0+h(LTi3 gj8egRzqgoT7dxLjbCoLBT@)K1cq+;c1(Q7f1%l~7zyJUM diff --git a/test/gcfiles/x86_64-gcaso b/test/gcfiles/x86_64-gcaso deleted file mode 100755 index 9a58c23c2353415bf3e89506ea46fe2663d39895..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8920 zcmeHNO>7%Q6rLrN28ifZDX54aIaPuJs^V5EQZAM3I4&-Pq={1m6=-5Ro7iQ&wszNT z!UaneKT*}n9QY9@4shTWAugbTkU%1pkjSwI4jfvPN+5y6A>vT;z1g2P z3OV=uM$zl2biQf9Hy~O_EIafIzI%isE(w&0WVA*@O-U&-^`inA;l>GoX}eD{ia z!m9N0HJpnTXOUN&r5YOQd>;rtib)c}I!9O&{&H@W`I1Xh4MgYrS@0beoe1Mv*j2i5 zJei(PE6otEkvNv|ti&<#%5~!0vAMTeE!J#a_g&0$XFf?o*Mn*ot&ciy&h5fpd?xyn zeUgu`%176kdYyjusXkhMEQ~S z)f_Ll8|Zw;1Yb+^A*|V_uyyS7EZen~a68cXz7c$KzJygi+@qlBinNceTVmAtHZ(rM zDj%43;#(~EgnCUPoE+ffXJ;*k=KxtshB z9(k`)+dKe}K%)D0vf;T&yHZRpuG)*hB#$7G9kcKMVZLRZNMHTp^@XX;uYMSY4z7bi z_XX)GiAe{+T$uOh0NQ^Q)e3Rg+VdqFZoRlc(rbbg|~X>LpLr z8V=U8Qgg)AhP-5gU9|eL>{gcxcMzG&Ys- z4iqG09g*Lm9mT&W&~um)$(v%l|H>o3S3{P6{*~Va@_l)?9xKYccWV>tV-Lh0h&>Q{ zAof7)f!G7F2VxJz9*8{MiFjXAwFf z2FL#f`gpa>tre@(@GYFl7s%kzYN6t=qroiMaoW@9W39(=YNJ{fZp)mNhKBH5AKM&4 z>+K=Eu*F1@k~qPl>mJTH)b%=s@8DM+_%+JXi5^`o*1ea1 VIxnljbyZ^^y3#TVdLlBI*xyyL45|PC diff --git a/test/gcfiles/x86_64-gcaso2 b/test/gcfiles/x86_64-gcaso2 deleted file mode 100755 index 3ac79a2f133918ea56a660a31c210729f126b194..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8640 zcmeHMO^6&t6t2#XS(C*~+$a%$=v}fBf7o8pfS16`P9}EZl1+A2Sp}(`pWdC0Jv~Er z57`xkkr2tS1jd5_ZyrTFcu?>dR4_St_24m)2`G3|R2;uoT{Y9wlU?vA)Pw4(_v+PG zufKlX)6?&VKY#t_0At2p#@c%rV^e6K!f=(D7%}!3nvKT!h1@HJHw(*4WE~Y_RK2f8 zIGfmj^OeHt%BYK2pBc3=sUsA&AB}nv&Ld|tipQ~@!aIzUAwT5;eV-arzmK((D(7L; zDu)A8I=(fDHz_@ot`RN-|es`;h5 zGltidcxr#rlQM#h3C=5F6uAC|bV|o}L*hLs=Sfe>I9|)Y?D|#i`s)oirtvF=5}?E9(&E|kuNF?9RnHVNbzPy8+!Pl(evZhYu+QMRH%}vzG2&(d{VIATYbIkS zpj<;we#9B&-c9Iq4eV(=*U%qAyMUGy@};J7C^@Ib^H{I$U1g*GAv6<>o;y}63@pzr zTh%SE3Z3L}G>YS!YhTTL*gEyWr=VP0uM-0=F40l`rM*w@J=QIf;EXe6CGy|Fe&46Y=GoTsxZw$O}y|uk?yHmK+xn*DZ&V1|JZh!VO*!r`d z=xsEaxsVb|lM}7S82)elb@p^5gt` zaRJpG&tVsX`R-JfDo>(P&dVtFqO#q-g^v|NjPB8~wW!xPl?au!ZjhPE@+Jz-(K_q0 z;MgPCxKLRSH;Rs5g-wdVQ77BO>ox9dI+a%BpoU(eou=z0*Jrcz5n?ZEKPBDOfXs&t zQSlPrEgls*9fsdd@I7PjrQ=0RK|yUNmUFQ_DABFP`k?f9`NqusTd zAP$zQB894wFXfyg2ZZT&f2?c(mT?8((KHe`R466 zZ)R7sJKz2F%Rfhiunr5+J0ygdLfU;;h?~MjNr)$qDo9dZDZN&Hr@VZbwP~}`@IMiG zHgN+fSITd%q+L}1e7a6(jM&5k5_6iAp|crA)^osXbyGcgSfzNTw{}lTF9naD3Ec`CQF+R{IlE_o0o&*Qvk1D%(!j^lEKq zjIXcx;`TkF6v%eSNXxQ1xN zS&pbBx=QIZ<(ki-D$DZiX}(js6EW8AfyQ;Wiv=O&h0;nX;fD2?nddDxQxqavH|cV?VwZlg9g8Lt-0a7UD;Q z{eo4fZUv#!E-ba`e$C%1T;!v9z2kon6fSnUUZWPaI^K+WU|Cyu%kcx$1cik}JJ`to z3R28(S@*zijmC!YMYo!npTRg=bvVPa{SX&w)BU*eXG+OlLOcsE>ab%+^ZJ~|oHt`e z0i%FXz$jo8FbWt29;CpVmF@eL8$ahOw?Qi$E!>HUoe^tTqpI?eS z?%C$gC%qB(YS|u2{{wLOKB`~-#m$)dDWY>!aAukw0*cNtj)ZY zpFEQ5lRRS|frr%}>zvKppT^&r#Kw#QMggOMQNSo*6fg=H1&jhl0i%FXz$jo8_$aaim;qut{ItB+giWJ~>@cwAwj{(;2*NRSWzXuODafZ-8hDZ->twumd*hZu)Z e%2}WH7GrGVY}T7@ln?HEBehf>043+E#H@5Q@W>#DPe5QnwC7{fGlXNOWq)o7k{kN4sk^ zK^%;#B1KgtU*Mc02ZXrs13;n`2d)+3$VV!{1#v(iVV-Ap*4bT?-jU{&W@n#w-g)+& zXJ%KsJKz2F+doHyuqK4)9}+@LquhN^h#SJjnGlboR8ge7R6bvMyRveT-n3b1_@9V8 zTeyIftChD_(;=#VKJ614BR6pvg*8pe(Af&3^9ZN;>|+=V8s>Ef@dNwe1pOqGG6;K( zVC2f?cSZ9}>4+G|9deovk|~S$WXo|k9Uq=7pR4(1bUZP2AK6%b-Nw6Xvg3p;uikOS z_y(FUzCLl%Qru{eaxDn`wzr{!viW_Y`5w~k#7P_H>v`AOUQ@Qc^)3Rle7l;DYludi z<%n9Mqm)iduKOISvMk@8<~yM~5o7HhXk2%9u^^;8TV5?E!mz$l>n>h@iFHR_u05KR zo-2dxPNVC}&CoxPFQ(!8P3Fb? zcx}y>Lq8gEGM~u1f7E<|mDcR2R9Qw!iZnEuR02_tRonkNWi*CD7Y;LNX z%;Vb+ue>@l|HAn%C;u)?eE-X7K39}bM<&YLl# zfKk9GU=%P47zK<1_fz1F>dsHq>%Zo!HxFADK80_-{5!&{JKyZ_4WoK%{<4CVKfe%r zd}n+9&Q9fzo!iyxU+3TXYA`H(2H&voIgUas-+Jm}ROQ;Ra0B(AMd!Vrhr?luUGM#X zGxdTj{GR9G8x5~;xTbiLaj;~~cQn4ky@Z;D-=#F3^SyZrJ@cwJy~27_RcL#EMOa&T zD?fE4Hz0Y&J^&BhAL%-7j=wWwY-*!`QNSo*6fg=H1&jhl0i%FXz$jo8FbWt2{yz## zpUSV4@egCVBrh(!wkQ{u%e9)Ekr!$U*xhj*aoL|694~R_N$r%&1~z-Kv)#XqpB2ii z0Y~fSkC)^oHkQM6F%bTVM@rF#WGC3DIbIV!ZI;BI>LJ-_NoUJh>xB;X%Ijo2*{QcZ zQQ(h_39C~YF*hxJq?6E+w9{Bz7&e-? z>k=3K1pk056W!oOegYd;#y{YKaL)AA&eZC{^}ETP_s)HH&b{x@tgyYsXwpm)4n^e2BT;py@N_4c61|^E#>8RtCYRu7 zDKgd2wnGQ74Dxj*aT&zNT?+ANKb_zx4pm$A7eT$EL4EH42ZMaaj*lA9h^f=xOx0F( zuTiSmZpI+rnd7_W2#9-k8^+n!e`9TPz3^bIrycZ3=;rh~b|lm-I(K*$-*;(Pfx#Gf zn8!P^$2>6mtGf6E_Ci=Y2%&;Kn8Ps4MqW+S&BlyjBC{R%McI-GrcIsN+M zR&n{!$KW9K>dlAuw-G~2A`_P*U+7`RU~{-H-{bd>8|&oZJd1iZC-^-~zc%VWK5T}z zws?OLeFWw1+jp_B-Vz@Cx8@zDKHj_nUIDLwSHLUa74Qmp1-t@Y0k6RSDDY(U?f2zR z&p&gPy5R`oD`ytYH21L5-?{mQd(LTT%rcq&Bxa@%qorPMi)m*}COtOZi4STsubIvar = 10; - uintptr_t *subwords = (uintptr_t *)(__bridge void*)sub; - testassert(subwords[2] == 10); + testassertequal(readWord(sub, 2), 10); #ifdef __cplusplus - testassert(subwords[5] == 1); - testassert(sub->cxx.magic == 1); + testassertequal(readWord(sub, 5), 1); + testassertequal(sub->cxx.magic, 1); sub->cxx.magic++; - testassert(subwords[5] == 2); - testassert(sub->cxx.magic == 2); + testassertequal(readWord(sub, 5), 2); + testassertequal(sub->cxx.magic, 2); # if __has_feature(objc_arc) sub = nil; # else @@ -254,15 +261,14 @@ int main(int argc __attribute__((unused)), char **argv) */ Sub2 *sub2 = [Sub2 new]; - uintptr_t *sub2words = (uintptr_t *)(__bridge void*)sub2; sub2->subIvar = (void *)10; - testassert(sub2words[11] == 10); + testassertequal(readWord(sub2, 11), 10); - testassert(class_getInstanceSize([Sub2 class]) == 13*sizeof(void*)); + testassertequal(class_getInstanceSize([Sub2 class]), 13*sizeof(void*)); ivar = class_getInstanceVariable([Sub2 class], "subIvar"); testassert(ivar); - testassert(11*sizeof(void*) == (size_t)ivar_getOffset(ivar)); + testassertequal(11*sizeof(void*), (size_t)ivar_getOffset(ivar)); testassert(0 == strcmp(ivar_getName(ivar), "subIvar")); ivar = class_getInstanceVariable([ShrinkingSuper class], "superIvar"); diff --git a/test/lazyClassName.m b/test/lazyClassName.m new file mode 100644 index 0000000..264c20f --- /dev/null +++ b/test/lazyClassName.m @@ -0,0 +1,136 @@ +/* +TEST_RUN_OUTPUT +LazyClassName +LazyClassName2 +END +*/ + +#include "test.h" +#include "testroot.i" + +typedef const char * _Nullable (*objc_hook_lazyClassNamer)(_Nonnull Class); + +void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue, + _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue); + +#define RW_COPIED_RO (1<<27) + +struct ObjCClass { + struct ObjCClass * __ptrauth_objc_isa_pointer isa; + struct ObjCClass * __ptrauth_objc_super_pointer superclass; + void *cachePtr; + uintptr_t zero; + uintptr_t data; +}; + +struct ObjCClass_ro { + uint32_t flags; + uint32_t instanceStart; + uint32_t instanceSize; +#ifdef __LP64__ + uint32_t reserved; +#endif + + union { + const uint8_t * ivarLayout; + struct ObjCClass * nonMetaClass; + }; + + const char * name; + struct ObjCMethodList * __ptrauth_objc_method_list_pointer baseMethodList; + struct protocol_list_t * baseProtocols; + const struct ivar_list_t * ivars; + + const uint8_t * weakIvarLayout; + struct property_list_t *baseProperties; +}; + +extern struct ObjCClass OBJC_METACLASS_$_NSObject; +extern struct ObjCClass OBJC_CLASS_$_NSObject; + +extern struct ObjCClass LazyClassName; +extern struct ObjCClass LazyClassName2; + +struct ObjCClass_ro LazyClassNameMetaclass_ro = { + .flags = 1, + .instanceStart = 40, + .instanceSize = 40, + .nonMetaClass = &LazyClassName, +}; + +struct ObjCClass LazyClassNameMetaclass = { + .isa = &OBJC_METACLASS_$_NSObject, + .superclass = &OBJC_METACLASS_$_NSObject, + .cachePtr = &_objc_empty_cache, + .data = (uintptr_t)&LazyClassNameMetaclass_ro, +}; + +struct ObjCClass_ro LazyClassName_ro = { + .instanceStart = 8, + .instanceSize = 8, +}; + +struct ObjCClass LazyClassName = { + .isa = &LazyClassNameMetaclass, + .superclass = &OBJC_CLASS_$_NSObject, + .cachePtr = &_objc_empty_cache, + .data = (uintptr_t)&LazyClassName_ro + 2, +}; + +struct ObjCClass_ro LazyClassName2Metaclass_ro = { + .flags = 1, + .instanceStart = 40, + .instanceSize = 40, + .nonMetaClass = &LazyClassName2, +}; + +struct ObjCClass LazyClassName2Metaclass = { + .isa = &OBJC_METACLASS_$_NSObject, + .superclass = &OBJC_METACLASS_$_NSObject, + .cachePtr = &_objc_empty_cache, + .data = (uintptr_t)&LazyClassName2Metaclass_ro, +}; + +struct ObjCClass_ro LazyClassName2_ro = { + .instanceStart = 8, + .instanceSize = 8, +}; + +struct ObjCClass LazyClassName2 = { + .isa = &LazyClassName2Metaclass, + .superclass = &OBJC_CLASS_$_NSObject, + .cachePtr = &_objc_empty_cache, + .data = (uintptr_t)&LazyClassName2_ro + 2, +}; + +static objc_hook_lazyClassNamer OrigNamer; + +static const char *ClassNamer(Class cls) { + if (cls == (__bridge Class)&LazyClassName) + return "LazyClassName"; + return OrigNamer(cls); +} + +static objc_hook_lazyClassNamer OrigNamer2; + +static const char *ClassNamer2(Class cls) { + if (cls == (__bridge Class)&LazyClassName2) + return "LazyClassName2"; + return OrigNamer2(cls); +} + +__attribute__((section("__DATA,__objc_classlist,regular,no_dead_strip"))) +struct ObjCClass *LazyClassNamePtr = &LazyClassName; +__attribute__((section("__DATA,__objc_classlist,regular,no_dead_strip"))) +struct ObjCClass *LazyClassNamePtr2 = &LazyClassName2; + +int main() { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunguarded-availability" + objc_setHook_lazyClassNamer(ClassNamer, &OrigNamer); + objc_setHook_lazyClassNamer(ClassNamer2, &OrigNamer2); +#pragma clang diagnostic pop + + printf("%s\n", class_getName([(__bridge id)&LazyClassName class])); + printf("%s\n", class_getName([(__bridge id)&LazyClassName2 class])); +} diff --git a/test/libraryPath.c b/test/libraryPath.c index 40cf7bd..af3151a 100644 --- a/test/libraryPath.c +++ b/test/libraryPath.c @@ -8,11 +8,16 @@ // etc.) then the typical result is a silent failure and we end up testing // /usr/lib/libobjc.A.dylib instead. This test detects when DYLD_LIBRARY_PATH is // set but libobjc isn't loaded from it. -int main() { +int main(int argc __unused, char **argv) { + char *containingDirectory = realpath(dirname(argv[0]), NULL); + testprintf("containingDirectory is %s\n", containingDirectory); + char *dyldLibraryPath = getenv("DYLD_LIBRARY_PATH"); testprintf("DYLD_LIBRARY_PATH is %s\n", dyldLibraryPath); + if (dyldLibraryPath != NULL && strlen(dyldLibraryPath) > 0) { int foundMatch = 0; + int foundNonMatch = 0; dyldLibraryPath = strdup(dyldLibraryPath); @@ -27,6 +32,10 @@ int main() { while ((path = strsep(&cursor, ":"))) { char *resolved = realpath(path, NULL); testprintf("Resolved %s to %s\n", path, resolved); + if (strcmp(resolved, containingDirectory) == 0) { + testprintf("This is equal to our containing directory, ignoring.\n"); + continue; + } testprintf("Comparing %s and %s\n", resolved, info.dli_fname); int comparison = strncmp(resolved, info.dli_fname, strlen(resolved)); free(resolved); @@ -34,11 +43,13 @@ int main() { testprintf("Found a match!\n"); foundMatch = 1; break; + } else { + foundNonMatch = 1; } } - testprintf("Finished searching, foundMatch=%d\n", foundMatch); - testassert(foundMatch); + testprintf("Finished searching, foundMatch=%d foundNonMatch=%d\n", foundMatch, foundNonMatch); + testassert(foundMatch || !foundNonMatch); } succeed(__FILE__); } diff --git a/test/methodCacheLeaks.m b/test/methodCacheLeaks.m index 968bf5a..cb624c0 100644 --- a/test/methodCacheLeaks.m +++ b/test/methodCacheLeaks.m @@ -61,5 +61,10 @@ int main() exit(1); } wait4(pid, NULL, 0, NULL); - printf("objs=%p\n", objs); + + // Clean up. Otherwise leaks can end up seeing this as a leak, oddly enough. + for (int i = 0; i < classCount; i++) { + [objs[i] release]; + } + free(objs); } diff --git a/test/methodListSmall.h b/test/methodListSmall.h index c6f32e2..233e9c0 100644 --- a/test/methodListSmall.h +++ b/test/methodListSmall.h @@ -1,8 +1,8 @@ #include "test.h" struct ObjCClass { - struct ObjCClass *isa; - struct ObjCClass *superclass; + struct ObjCClass * __ptrauth_objc_isa_pointer isa; + struct ObjCClass * __ptrauth_objc_super_pointer superclass; void *cachePtr; uintptr_t zero; struct ObjCClass_ro *data; @@ -19,7 +19,7 @@ struct ObjCClass_ro { const uint8_t * ivarLayout; const char * name; - struct ObjCMethodList * baseMethodList; + struct ObjCMethodList * __ptrauth_objc_method_list_pointer baseMethodList; struct protocol_list_t * baseProtocols; const struct ivar_list_t * ivars; @@ -142,6 +142,8 @@ _BoringMethodType: .asciz "v16@0:8" _MyMethodStretName: .asciz "myMethodStret" +_MyMethodNullTypesName: + .asciz "myMethodNullTypes" _StretType: .asciz "{BigStruct=QQQQQQQ}16@0:8" )ASM"); @@ -157,6 +159,8 @@ _MyMethod3NameRef: .quad _MyMethod3Name _MyMethodStretNameRef: .quad _MyMethodStretName +_MyMethodNullTypesNameRef: + .quad _MyMethodNullTypesName )ASM"); #else asm(R"ASM( @@ -169,6 +173,8 @@ _MyMethod3NameRef: .long _MyMethod3Name _MyMethodStretNameRef: .long _MyMethodStretName +_MyMethodNullTypesNameRef: + .long _MyMethodNullTypesName )ASM"); #endif @@ -182,7 +188,7 @@ asm(R"ASM( .p2align 2 _Foo_methodlistSmall: .long 12 | 0x80000000 - .long 4 + .long 5 .long _MyMethod1NameRef - . .long _BoringMethodType - . @@ -199,6 +205,10 @@ _Foo_methodlistSmall: .long _MyMethodStretNameRef - . .long _StretType - . .long _myMethodStret - . + + .long _MyMethodNullTypesNameRef - . + .long 0 + .long _myMethod1 - . )ASM"); struct ObjCClass_ro Foo_ro = { diff --git a/test/methodListSmall.mm b/test/methodListSmall.mm index c10f29d..82f157a 100644 --- a/test/methodListSmall.mm +++ b/test/methodListSmall.mm @@ -68,6 +68,12 @@ void testClass(Class c) { auto *descstret = method_getDescription(mstret); testassert(descstret->name == @selector(myMethodStret)); testassert(descstret->types == method_getTypeEncoding(mstret)); + + Method nullTypeMethod = class_getInstanceMethod(c, @selector(myMethodNullTypes)); + testassert(nullTypeMethod); + testassert(method_getName(nullTypeMethod) == @selector(myMethodNullTypes)); + testassertequal(method_getTypeEncoding(nullTypeMethod), NULL); + testassertequal(method_getImplementation(nullTypeMethod), (IMP)myMethod1); } int main() { diff --git a/test/methodListSmallMutableMemory.mm b/test/methodListSmallMutableMemory.mm deleted file mode 100644 index 9250fea..0000000 --- a/test/methodListSmallMutableMemory.mm +++ /dev/null @@ -1,18 +0,0 @@ -/* -TEST_CFLAGS -std=c++11 -TEST_CRASHES -TEST_RUN_OUTPUT -objc\[\d+\]: CLASS: class 'Foo' 0x[0-9a-fA-F]+ small method list 0x[0-9a-fA-F]+ is not in immutable memory -objc\[\d+\]: HALTED -END -*/ - -#define MUTABLE_METHOD_LIST 1 - -#include "methodListSmall.h" - -int main() { - Class fooClass = (__bridge Class)&FooClass; - [fooClass new]; - fail("Should have crashed"); -} diff --git a/test/nonpointerisa.m b/test/nonpointerisa.m index d7b007f..659aed5 100644 --- a/test/nonpointerisa.m +++ b/test/nonpointerisa.m @@ -14,7 +14,9 @@ # if __x86_64__ # define RC_ONE (1ULL<<56) # elif __arm64__ && __LP64__ -# define RC_ONE (1ULL<<45) +// Quiet the warning about redefining the macro from isa.h. +# undef RC_ONE +# define RC_ONE (objc_debug_isa_magic_value == 1 ? 1ULL<<56 : 1ULL<<45) # elif __ARM_ARCH_7K__ >= 2 || (__arm64__ && !__LP64__) # define RC_ONE (1ULL<<25) # else @@ -29,9 +31,9 @@ void check_raw_pointer(id obj, Class cls) testassert(!NONPOINTER(obj)); uintptr_t isa = ISA(obj); - testassert((Class)isa == cls); - testassert((Class)(isa & objc_debug_isa_class_mask) == cls); - testassert((Class)(isa & ~objc_debug_isa_class_mask) == 0); + testassertequal(ptrauth_strip((void *)isa, ptrauth_key_process_independent_data), (void *)cls); + testassertequal((Class)(isa & objc_debug_isa_class_mask), cls); + testassertequal(ptrauth_strip((void *)(isa & ~objc_debug_isa_class_mask), ptrauth_key_process_independent_data), 0); CFRetain(obj); testassert(ISA(obj) == isa); @@ -80,37 +82,37 @@ int main() void check_nonpointer(id obj, Class cls) { - testassert(object_getClass(obj) == cls); + testassertequal(object_getClass(obj), cls); testassert(NONPOINTER(obj)); uintptr_t isa = ISA(obj); if (objc_debug_indexed_isa_magic_mask != 0) { // Indexed isa. - testassert((isa & objc_debug_indexed_isa_magic_mask) == objc_debug_indexed_isa_magic_value); + testassertequal((isa & objc_debug_indexed_isa_magic_mask), objc_debug_indexed_isa_magic_value); testassert((isa & ~objc_debug_indexed_isa_index_mask) != 0); uintptr_t index = (isa & objc_debug_indexed_isa_index_mask) >> objc_debug_indexed_isa_index_shift; testassert(index < objc_indexed_classes_count); - testassert(objc_indexed_classes[index] == cls); + testassertequal(objc_indexed_classes[index], cls); } else { // Packed isa. - testassert((Class)(isa & objc_debug_isa_class_mask) == cls); + testassertequal((Class)(isa & objc_debug_isa_class_mask), cls); testassert((Class)(isa & ~objc_debug_isa_class_mask) != 0); - testassert((isa & objc_debug_isa_magic_mask) == objc_debug_isa_magic_value); + testassertequal((isa & objc_debug_isa_magic_mask), objc_debug_isa_magic_value); } CFRetain(obj); - testassert(ISA(obj) == isa + RC_ONE); - testassert([obj retainCount] == 2); + testassertequal(ISA(obj), isa + RC_ONE); + testassertequal([obj retainCount], 2); [obj retain]; - testassert(ISA(obj) == isa + RC_ONE*2); - testassert([obj retainCount] == 3); + testassertequal(ISA(obj), isa + RC_ONE*2); + testassertequal([obj retainCount], 3); CFRelease(obj); - testassert(ISA(obj) == isa + RC_ONE); - testassert([obj retainCount] == 2); + testassertequal(ISA(obj), isa + RC_ONE); + testassertequal([obj retainCount], 2); [obj release]; - testassert(ISA(obj) == isa); - testassert([obj retainCount] == 1); + testassertequal(ISA(obj), isa); + testassertequal([obj retainCount], 1); } @@ -152,14 +154,21 @@ int main() # if !OBJC_HAVE_NONPOINTER_ISA || !OBJC_HAVE_PACKED_NONPOINTER_ISA || OBJC_HAVE_INDEXED_NONPOINTER_ISA # error wrong # endif - testassert(objc_debug_isa_class_mask == (uintptr_t)&objc_absolute_packed_isa_class_mask); + void *absoluteMask = (void *)&objc_absolute_packed_isa_class_mask; +#if __has_feature(ptrauth_calls) + absoluteMask = ptrauth_strip(absoluteMask, ptrauth_key_process_independent_data); +#endif + // absoluteMask should "cover" objc_debug_isa_class_mask + testassert((objc_debug_isa_class_mask & (uintptr_t)absoluteMask) == objc_debug_isa_class_mask); + // absoluteMask should only possibly differ in the high bits + testassert((objc_debug_isa_class_mask & 0xffff) == ((uintptr_t)absoluteMask & 0xffff)); // Indexed isa variables DO NOT exist on packed-isa platforms testassert(!dlsym(RTLD_DEFAULT, "objc_absolute_indexed_isa_magic_mask")); testassert(!dlsym(RTLD_DEFAULT, "objc_absolute_indexed_isa_magic_value")); testassert(!dlsym(RTLD_DEFAULT, "objc_absolute_indexed_isa_index_mask")); testassert(!dlsym(RTLD_DEFAULT, "objc_absolute_indexed_isa_index_shift")); - + #elif SUPPORT_INDEXED_ISA # if !OBJC_HAVE_NONPOINTER_ISA || OBJC_HAVE_PACKED_NONPOINTER_ISA || !OBJC_HAVE_INDEXED_NONPOINTER_ISA # error wrong @@ -175,7 +184,7 @@ int main() #else # error unknown nonpointer isa format #endif - + testprintf("Isa with index\n"); id index_o = [Fake_OS_object new]; check_nonpointer(index_o, [Fake_OS_object class]); diff --git a/test/preopt-caches.entitlements b/test/preopt-caches.entitlements new file mode 100644 index 0000000..bc4acf2 --- /dev/null +++ b/test/preopt-caches.entitlements @@ -0,0 +1,12 @@ + + + + + com.apple.springboard-ui.client + + com.apple.security.system-groups + + systemgroup.com.apple.powerlog + + + diff --git a/test/preopt-caches.mm b/test/preopt-caches.mm new file mode 100644 index 0000000..7aec275 --- /dev/null +++ b/test/preopt-caches.mm @@ -0,0 +1,380 @@ +/* +TEST_ENTITLEMENTS preopt-caches.entitlements +TEST_CONFIG OS=iphoneos MEM=mrc +TEST_BUILD + mkdir -p $T{OBJDIR} + /usr/sbin/dtrace -h -s $DIR/../runtime/objc-probes.d -o $T{OBJDIR}/objc-probes.h + $C{COMPILE} $DIR/preopt-caches.mm -std=gnu++17 -isystem $C{SDK_PATH}/System/Library/Frameworks/System.framework/PrivateHeaders -I$T{OBJDIR} -ldsc -o preopt-caches.exe +END +*/ +// +// check_preopt_caches.m +// check-preopt-caches +// +// Created by Thomas Deniau on 11/06/2020. +// + +#define TEST_CALLS_OPERATOR_NEW + +#include "test-defines.h" +#include "../runtime/objc-private.h" +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "test.h" + +int validate_dylib_in_forked_process(const char * const toolPath, const char * const dylib) +{ + int out_pipe[2] = {-1}; + int err_pipe[2] = {-1}; + int exit_code = -1; + pid_t pid = 0; + int rval = 0; + + std::string child_stdout; + std::string child_stderr; + + posix_spawn_file_actions_t actions = NULL; + const char * const args[] = {toolPath, dylib, NULL}; + int ret = 0; + + if (pipe(out_pipe)) { + exit(3); + } + + if (pipe(err_pipe)) { + exit(3); + } + + //Do-si-do the FDs + posix_spawn_file_actions_init(&actions); + posix_spawn_file_actions_addclose(&actions, out_pipe[0]); + posix_spawn_file_actions_addclose(&actions, err_pipe[0]); + posix_spawn_file_actions_adddup2(&actions, out_pipe[1], 1); + posix_spawn_file_actions_adddup2(&actions, err_pipe[1], 2); + posix_spawn_file_actions_addclose(&actions, out_pipe[1]); + posix_spawn_file_actions_addclose(&actions, err_pipe[1]); + + // Fork so that we can dlopen the dylib in a clean context + ret = posix_spawnp(&pid, args[0], &actions, NULL, (char * const *)args, NULL); + + if (ret != 0) { + fail("posix_spawn for %s failed: returned %d, %s\n", dylib, ret, strerror(ret)); + exit(3); + } + + posix_spawn_file_actions_destroy(&actions); + close(out_pipe[1]); + close(err_pipe[1]); + + std::string buffer(4096,' '); + std::vector plist = { {out_pipe[0],POLLIN,0}, {err_pipe[0],POLLIN,0} }; + while (( (rval = poll(&plist[0],(nfds_t)plist.size(), 100000)) > 0 ) || ((rval < 0) && (errno == EINTR))) { + if (rval < 0) { + // EINTR + continue; + } + + ssize_t bytes_read = 0; + + if (plist[0].revents&(POLLERR|POLLHUP) || plist[1].revents&(POLLERR|POLLHUP)) { + bytes_read = read(out_pipe[0], &buffer[0], buffer.length()); + bytes_read = read(err_pipe[0], &buffer[0], buffer.length()); + break; + } + + if (plist[0].revents&POLLIN) { + bytes_read = read(out_pipe[0], &buffer[0], buffer.length()); + child_stdout += buffer.substr(0, static_cast(bytes_read)); + } + else if ( plist[1].revents&POLLIN ) { + bytes_read = read(err_pipe[0], &buffer[0], buffer.length()); + child_stderr += buffer.substr(0, static_cast(bytes_read)); + } + else break; // nothing left to read + + plist[0].revents = 0; + plist[1].revents = 0; + } + if (rval == 0) { + // Early timeout so try to clean up. + fail("Failed to validate dylib %s: timeout!\n", dylib); + return 1; + } + + + if (err_pipe[0] != -1) { + close(err_pipe[0]); + } + + if (out_pipe[0] != -1) { + close(out_pipe[0]); + } + + if (pid != 0) { + if (waitpid(pid, &exit_code, 0) < 0) { + fail("Could not wait for PID %d (dylib %s): err %s\n", pid, dylib, strerror(errno)); + } + + if (!WIFEXITED(exit_code)) { + fail("PID %d (%s) did not exit: %d. stdout: %s\n stderr: %s\n", pid, dylib, exit_code, child_stdout.c_str(), child_stderr.c_str()); + } + if (WEXITSTATUS(exit_code) != 0) { + fail("Failed to validate dylib %s\nstdout: %s\nstderr: %s\n", dylib, child_stdout.c_str(), child_stderr.c_str()); + } + } + + testprintf("%s", child_stdout.c_str()); + + return 0; +} + +bool check_class(Class cls, unsigned & cacheCount) { + // printf("%s %s\n", class_getName(cls), class_isMetaClass(cls) ? "(metaclass)" : ""); + + // For the initialization of the cache so that we setup the constant cache if any + class_getMethodImplementation(cls, @selector(initialize)); + + if (objc_cache_isConstantOptimizedCache(&(cls->cache), true, (uintptr_t)&_objc_empty_cache)) { + cacheCount++; + // printf("%s has a preopt cache\n", class_getName(cls)); + + // Make the union of all selectors until the preopt fallback class + const class_ro_t * fallback = ((const objc_class *) objc_cache_preoptFallbackClass(&(cls->cache)))->data()->ro(); + + std::unordered_map methods; + + Method *methodList; + unsigned count; + Class currentClass = cls; + unsigned dynamicCount = 0; + while (currentClass->data()->ro() != fallback) { + methodList = class_copyMethodList(currentClass, &count); + // printf("%d methods in method list for %s\n", count, class_getName(currentClass)); + for (unsigned i = 0 ; i < count ; i++) { + SEL sel = method_getName(methodList[i]); + if (methods.find(sel) == methods.end()) { + const char *name = sel_getName(sel); + // printf("[dynamic] %s -> %p\n", name, method_getImplementation(methodList[i])); + methods[sel] = ptrauth_strip(method_getImplementation(methodList[i]), ptrauth_key_function_pointer); + if ( (currentClass == cls) || + ( (strcmp(name, ".cxx_construct") != 0) + && (strcmp(name, ".cxx_destruct") != 0))) { + dynamicCount++; + } + } + } + if (count > 0) { + free(methodList); + } + currentClass = class_getSuperclass(currentClass); + } + + // Check we have an equality between the two caches + + // Count the methods in the preopt cache + unsigned preoptCacheCount = 0; + unsigned capacity = objc_cache_preoptCapacity(&(cls->cache)); + const preopt_cache_entry_t *buckets = objc_cache_preoptCache(&(cls->cache))->entries; + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wcast-of-sel-type" + const uint8_t *selOffsetsBase = (const uint8_t*)@selector(🤯); +#pragma clang diagnostic pop + for (unsigned i = 0 ; i < capacity ; i++) { + uint32_t selOffset = buckets[i].sel_offs; + if (selOffset != 0xFFFFFFFF) { + SEL sel = (SEL)(selOffsetsBase + selOffset); + IMP imp = (IMP)((uint8_t*)cls - buckets[i].imp_offs); + if (methods.find(sel) == methods.end()) { + fail("ERROR: %s: %s not found in dynamic method list\n", class_getName(cls), sel_getName(sel)); + return false; + } + IMP dynamicImp = methods.at(sel); + // printf("[static] %s -> %p\n", sel_getName(sel), imp); + if (imp != dynamicImp) { + fail("ERROR: %s: %s has different implementations %p vs %p in static and dynamic caches", class_getName(cls), sel_getName(sel), imp, dynamicImp); + return false; + } + preoptCacheCount++; + } + } + + if (preoptCacheCount != dynamicCount) { + testwarn("Methods in preopt cache:\n"); + + for (unsigned i = 0 ; i < capacity ; i++) { + uint32_t selOffset = buckets[i].sel_offs; + if (selOffset != 0xFFFFFFFF) { + SEL sel = (SEL)(selOffsetsBase + selOffset); + testwarn("%s\n", sel_getName(sel)); + } + } + + testwarn("Methods in dynamic cache:\n"); + + for (const auto & [sel, imp] : methods) { + testwarn("%s\n", sel_getName(sel)); + } + + fail("ERROR: %s's preoptimized cache is missing some methods\n", class_getName(cls)); + + return false; + } + + } else { + // printf("%s does NOT have a preopt cache\n", class_getName(cls)); + } + + return true; +} + +bool check_library(const char *path) { + std::set blacklistedClasses { + "PNPWizardScratchpadInkView", // Can only be +initialized on Pencil-capable devices + "CACDisplayManager", // rdar://64929282 (CACDisplayManager does layout in +initialize!) + }; + + testprintf("Checking %s… ", path); + + __unused void *lib = dlopen(path, RTLD_NOW); + extern uint32_t _dyld_image_count(void) __OSX_AVAILABLE_STARTING(__MAC_10_1, __IPHONE_2_0); + unsigned outCount = 0; + + // Realize all classes first. + Class *allClasses = objc_copyClassList(&outCount); + if (allClasses != NULL) { + free(allClasses); + } + + allClasses = objc_copyClassesForImage(path, &outCount); + if (allClasses != NULL) { + unsigned classCount = 0; + unsigned cacheCount = 0; + + for (const Class * clsPtr = allClasses ; *clsPtr != nil ; clsPtr++) { + classCount++; + Class cls = *clsPtr; + + if (blacklistedClasses.find(class_getName(cls)) != blacklistedClasses.end()) { + continue; + } + + if (!check_class(cls, cacheCount)) { + return false; + } + + if (!class_isMetaClass(cls)) { + if (!check_class(object_getClass(cls), cacheCount)) { + return false; + } + } + } + testprintf("checked %d caches in %d classes\n", cacheCount, classCount); + free(allClasses); + } else { + testprintf("could not find %s or no class names inside\n", path); + } + + return true; +} + +size_t size_of_shared_cache_with_uuid(uuid_t uuid) { + DIR* dfd = opendir(IPHONE_DYLD_SHARED_CACHE_DIR); + if (!dfd) { + fail("Error: unable to open shared cache dir %s\n", + IPHONE_DYLD_SHARED_CACHE_DIR); + exit(1); + } + + uint64_t shared_cache_size = 0; + + struct dirent *dp; + while ((dp = readdir(dfd))) { + char full_filename[512]; + snprintf(full_filename, sizeof(full_filename), "%s%s", + IPHONE_DYLD_SHARED_CACHE_DIR, dp->d_name); + + struct stat stat_buf; + if (stat(full_filename, &stat_buf) != 0) + continue; + + if ((stat_buf.st_mode & S_IFMT) == S_IFDIR) + continue; + + int fd = open(full_filename, O_RDONLY); + if (fd < 0) { + fprintf(stderr, "Error: unable to open file %s\n", full_filename); + continue; + } + + struct dyld_cache_header header; + if (read(fd, &header, sizeof(header)) != sizeof(header)) { + fprintf(stderr, "Error: unable to read dyld shared cache header from %s\n", + full_filename); + close(fd); + continue; + } + + if (uuid_compare(header.uuid, uuid) == 0) { + shared_cache_size = stat_buf.st_size; + break; + } + } + + closedir(dfd); + + return shared_cache_size; +} + +int main (int argc, const char * argv[]) +{ + if (argc == 1) { + int err = 0; + dyld_process_info process_info = _dyld_process_info_create(mach_task_self(), 0, &err); + if (NULL == process_info) { + mach_error("_dyld_process_info_create", err); + fail("_dyld_process_info_create"); + return 2; + } + dyld_process_cache_info cache_info; + _dyld_process_info_get_cache(process_info, &cache_info); + + __block std::set dylibsSet; + size_t size = size_of_shared_cache_with_uuid(cache_info.cacheUUID); + dyld_shared_cache_iterate((void*)cache_info.cacheBaseAddress, (uint32_t)size, ^(const dyld_shared_cache_dylib_info* dylibInfo, __unused const dyld_shared_cache_segment_info* segInfo) { + if (dylibInfo->isAlias) return; + std::string path(dylibInfo->path); + dylibsSet.insert(path); + }); + std::vector dylibs(dylibsSet.begin(), dylibsSet.end()); + + dispatch_apply(dylibs.size(), DISPATCH_APPLY_AUTO, ^(size_t idx) { + validate_dylib_in_forked_process(argv[0], dylibs[idx].c_str()); + }); + } else { + const char *libraryName = argv[1]; + if (!check_library(libraryName)) { + fail("checking library %s\n", libraryName); + return 1; + } + } + + succeed(__FILE__); + return 0; +} diff --git a/test/protocolSmall.m b/test/protocolSmall.m new file mode 100644 index 0000000..a3f6fa6 --- /dev/null +++ b/test/protocolSmall.m @@ -0,0 +1,91 @@ +// TEST_CFLAGS -framework Foundation +// need Foundation to get NSObject compatibility additions for class Protocol +// because ARC calls [protocol retain] +/* +TEST_BUILD_OUTPUT +.*protocolSmall.m:\d+:\d+: warning: cannot find protocol definition for 'SmallProto' +.*protocolSmall.m:\d+:\d+: note: protocol 'SmallProto' has no definition +END +*/ + +#include "test.h" +#include "testroot.i" +#include + +struct MethodListOneEntry { + uint32_t entSizeAndFlags; + uint32_t count; + SEL name; + const char *types; + void *imp; +}; + +struct SmallProtoStructure { + Class isa; + const char *mangledName; + struct protocol_list_t *protocols; + void *instanceMethods; + void *classMethods; + void *optionalInstanceMethods; + void *optionalClassMethods; + void *instanceProperties; + uint32_t size; // sizeof(protocol_t) + uint32_t flags; +}; + +struct MethodListOneEntry SmallProtoMethodList = { + .entSizeAndFlags = 3 * sizeof(void *), + .count = 1, + .name = NULL, + .types = "v@:", + .imp = NULL, +}; + +struct SmallProtoStructure SmallProtoData + __asm__("__OBJC_PROTOCOL_$_SmallProto") + = { + .mangledName = "SmallProto", + .instanceMethods = &SmallProtoMethodList, + .size = sizeof(struct SmallProtoStructure), +}; + +void *SmallProtoListEntry + __attribute__((section("__DATA,__objc_protolist,coalesced,no_dead_strip"))) + = &SmallProtoData; + +@protocol SmallProto; +@protocol NormalProto +- (void)protoMethod; +@end + +@interface C: TestRoot @end +@implementation C +- (void)protoMethod {} +@end + +int main() +{ + // Fix up the method list selector by hand, getting the compiler to generate a + // proper selref as a compile-time constant is a pain. + SmallProtoMethodList.name = @selector(protoMethod); + unsigned protoCount; + + Protocol * __unsafe_unretained *protos = class_copyProtocolList([C class], &protoCount); + for (unsigned i = 0; i < protoCount; i++) { + testprintf("Checking index %u protocol %p\n", i, protos[i]); + const char *name = protocol_getName(protos[i]); + testprintf("Name is %s\n", name); + testassert(strcmp(name, "SmallProto") == 0 || strcmp(name, "NormalProto") == 0); + + objc_property_t *classProperties = protocol_copyPropertyList2(protos[i], NULL, YES, NO); + testassert(classProperties == NULL); + + struct objc_method_description desc = protocol_getMethodDescription(protos[i], @selector(protoMethod), YES, YES); + testprintf("Protocol protoMethod name is %s types are %s\n", desc.name, desc.types); + testassert(desc.name == @selector(protoMethod)); + testassert(desc.types[0] == 'v'); + } + free(protos); + + succeed(__FILE__); +} diff --git a/test/readClassPair.m b/test/readClassPair.m index 80313b2..ebc8587 100644 --- a/test/readClassPair.m +++ b/test/readClassPair.m @@ -48,10 +48,16 @@ int main() // Read a non-root class. testassert(!objc_getClass("Sub")); - extern intptr_t OBJC_CLASS_$_Sub[OBJC_MAX_CLASS_SIZE/sizeof(void*)]; + // Clang assumes too much alignment on this by default (rdar://problem/60881608), + // so tell it that it's only as aligned as an intptr_t. + extern _Alignas(intptr_t) intptr_t OBJC_CLASS_$_Sub[OBJC_MAX_CLASS_SIZE/sizeof(void*)]; // Make a duplicate of class Sub for use later. intptr_t Sub2_buf[OBJC_MAX_CLASS_SIZE/sizeof(void*)]; memcpy(Sub2_buf, &OBJC_CLASS_$_Sub, sizeof(Sub2_buf)); + // Re-sign the isa and super pointers in the new location. + ((Class __ptrauth_objc_isa_pointer *)(void *)Sub2_buf)[0] = ((Class __ptrauth_objc_isa_pointer *)(void *)&OBJC_CLASS_$_Sub)[0]; + ((Class __ptrauth_objc_super_pointer *)(void *)Sub2_buf)[1] = ((Class __ptrauth_objc_super_pointer *)(void *)&OBJC_CLASS_$_Sub)[1]; + Class Sub = objc_readClassPair((__bridge Class)(void*)&OBJC_CLASS_$_Sub, &ii); testassert(Sub); diff --git a/test/rr-sidetable.m b/test/rr-sidetable.m index daa4090..ac3606a 100644 --- a/test/rr-sidetable.m +++ b/test/rr-sidetable.m @@ -9,7 +9,7 @@ #include "test.h" #import -#define OBJECTS 1 +#define OBJECTS 10 #define LOOPS 256 #define THREADS 16 #if __x86_64__ diff --git a/test/runtime.m b/test/runtime.m index 50bd68c..4e22606 100644 --- a/test/runtime.m +++ b/test/runtime.m @@ -221,6 +221,13 @@ int main() testassert(strcmp(class_getName([SwiftV1Class3 class]), class_getName(object_getClass([SwiftV1Class3 class]))) == 0); testassert(strcmp(class_getName([SwiftV1Class4 class]), class_getName(object_getClass([SwiftV1Class4 class]))) == 0); + testassert(!_class_isSwift([TestRoot class])); + testassert(!_class_isSwift([Sub class])); + testassert(_class_isSwift([SwiftV1Class class])); + testassert(_class_isSwift([SwiftV1Class2 class])); + testassert(_class_isSwift([SwiftV1Class3 class])); + testassert(_class_isSwift([SwiftV1Class4 class])); + succeed(__FILE__); } diff --git a/test/setAssociatedObjectHook.m b/test/setAssociatedObjectHook.m index e244d5c..97f78c1 100644 --- a/test/setAssociatedObjectHook.m +++ b/test/setAssociatedObjectHook.m @@ -1,47 +1,46 @@ -// TEST_CONFIG +/* + TEST_CONFIG MEM=mrc + TEST_ENV OBJC_DISABLE_NONPOINTER_ISA=YES +*/ #include "test.h" #include "testroot.i" -id sawObject; -const void *sawKey; -id sawValue; -objc_AssociationPolicy sawPolicy; +bool hasAssociations = false; -objc_hook_setAssociatedObject originalSetAssociatedObject; +@interface TestRoot (AssocHooks) +@end -void hook(id _Nonnull object, const void * _Nonnull key, id _Nullable value, objc_AssociationPolicy policy) { - sawObject = object; - sawKey = key; - sawValue = value; - sawPolicy = policy; - originalSetAssociatedObject(object, key, value, policy); +@implementation TestRoot (AssocHooks) + +- (void)_noteAssociatedObjects { + hasAssociations = true; +} + +// -_noteAssociatedObjects is currently limited to raw-isa custom-rr to avoid overhead +- (void) release { } +@end + int main() { id obj = [TestRoot new]; id value = [TestRoot new]; const void *key = "key"; objc_setAssociatedObject(obj, key, value, OBJC_ASSOCIATION_RETAIN); - testassert(sawObject == nil); - testassert(sawKey == nil); - testassert(sawValue == nil); - testassert(sawPolicy == 0); + testassert(hasAssociations == true); id out = objc_getAssociatedObject(obj, key); testassert(out == value); - objc_setHook_setAssociatedObject(hook, &originalSetAssociatedObject); - + hasAssociations = false; key = "key2"; objc_setAssociatedObject(obj, key, value, OBJC_ASSOCIATION_RETAIN); - testassert(sawObject == obj); - testassert(sawKey == key); - testassert(sawValue == value); - testassert(sawPolicy == OBJC_ASSOCIATION_RETAIN); + testassert(hasAssociations == false); //only called once + out = objc_getAssociatedObject(obj, key); testassert(out == value); succeed(__FILE__); -} \ No newline at end of file +} diff --git a/test/swift-class-def.m b/test/swift-class-def.m index 6bc2d05..9ca2f16 100644 --- a/test/swift-class-def.m +++ b/test/swift-class-def.m @@ -15,9 +15,15 @@ #if __has_feature(ptrauth_calls) # define SIGNED_METHOD_LIST_IMP "@AUTH(ia,0,addr) " # define SIGNED_STUB_INITIALIZER "@AUTH(ia,0xc671,addr) " +# define SIGNED_METHOD_LIST "@AUTH(da,0xC310,addr) " +# define SIGNED_ISA "@AUTH(da, 0x6AE1, addr) " +# define SIGNED_SUPER "@AUTH(da, 0xB5AB, addr) " #else # define SIGNED_METHOD_LIST_IMP # define SIGNED_STUB_INITIALIZER +# define SIGNED_METHOD_LIST +# define SIGNED_ISA +# define SIGNED_SUPER #endif #define str(x) #x @@ -41,8 +47,8 @@ asm( \ ".section __DATA,__objc_data \n" \ ".align 3 \n" \ "_OBJC_CLASS_$_" #name ": \n" \ - PTR "_OBJC_METACLASS_$_" #name "\n" \ - PTR "_OBJC_CLASS_$_" #superclass "\n" \ + PTR "_OBJC_METACLASS_$_" #name SIGNED_ISA "\n" \ + PTR "_OBJC_CLASS_$_" #superclass SIGNED_SUPER "\n" \ PTR "__objc_empty_cache \n" \ PTR "0 \n" \ PTR "L_" #name "_ro + 2 \n" \ @@ -82,8 +88,8 @@ asm( \ PTR "0 \n" \ \ "_OBJC_METACLASS_$_" #name ": \n" \ - PTR "_OBJC_METACLASS_$_" #superclass "\n" \ - PTR "_OBJC_METACLASS_$_" #superclass "\n" \ + PTR "_OBJC_METACLASS_$_" #superclass SIGNED_ISA "\n" \ + PTR "_OBJC_METACLASS_$_" #superclass SIGNED_SUPER "\n" \ PTR "__objc_empty_cache \n" \ PTR "0 \n" \ PTR "L_" #name "_meta_ro \n" \ @@ -123,7 +129,7 @@ asm( \ ONLY_LP64(".long 0 \n") \ PTR "0 \n" \ PTR "L_" #name "_name \n" \ - PTR "L_" #name "_methods \n" \ + PTR "L_" #name "_methods" SIGNED_METHOD_LIST "\n" \ PTR "0 \n" \ PTR "L_" #name "_ivars \n" \ PTR "0 \n" \ @@ -137,7 +143,7 @@ asm( \ ONLY_LP64(".long 0 \n") \ PTR "0 \n" \ PTR "L_" #name "_name \n" \ - PTR "L_" #name "_meta_methods \n" \ + PTR "L_" #name "_meta_methods" SIGNED_METHOD_LIST "\n" \ PTR "0 \n" \ PTR "0 \n" \ PTR "0 \n" \ diff --git a/test/swiftMetadataInitializerRealloc.m b/test/swiftMetadataInitializerRealloc.m index c50d1dc..9e72211 100644 --- a/test/swiftMetadataInitializerRealloc.m +++ b/test/swiftMetadataInitializerRealloc.m @@ -65,6 +65,9 @@ Class initSub(Class cls, void *arg) // Example: rdar://problem/50707074 Class HeapSwiftSub = (Class)malloc(OBJC_MAX_CLASS_SIZE); memcpy(HeapSwiftSub, RawRealSwiftSub, OBJC_MAX_CLASS_SIZE); + // Re-sign the isa and super pointers in the new location. + ((Class __ptrauth_objc_isa_pointer *)(void *)HeapSwiftSub)[0] = ((Class __ptrauth_objc_isa_pointer *)(void *)RawRealSwiftSub)[0]; + ((Class __ptrauth_objc_super_pointer *)(void *)HeapSwiftSub)[1] = ((Class __ptrauth_objc_super_pointer *)(void *)RawRealSwiftSub)[1]; testprintf("initSub beginning _objc_realizeClassFromSwift\n"); _objc_realizeClassFromSwift(HeapSwiftSub, cls); diff --git a/test/taggedPointers.m b/test/taggedPointers.m index 76f1617..490838b 100644 --- a/test/taggedPointers.m +++ b/test/taggedPointers.m @@ -295,6 +295,22 @@ void testGenericTaggedPointer(objc_tag_index_t tag, Class cls) RELEASE_VAR(w); } +#if OBJC_SPLIT_TAGGED_POINTERS +void testConstantTaggedPointerRoundTrip(void *ptr) +{ + uintptr_t tagged = (uintptr_t)ptr | objc_debug_constant_cfstring_tag_bits; + void *untagged = _objc_getTaggedPointerRawPointerValue((void *)tagged); + testassert(ptr == untagged); +} + +void testConstantTaggedPointers(void) +{ + testConstantTaggedPointerRoundTrip(0); + testConstantTaggedPointerRoundTrip((void *)sizeof(void *)); + testConstantTaggedPointerRoundTrip((void *)(MACH_VM_MAX_ADDRESS - sizeof(void *))); +} +#endif + int main() { testassert(objc_debug_taggedpointer_mask != 0); @@ -336,6 +352,10 @@ int main() objc_getClass("TaggedNSObjectSubclass")); testGenericTaggedPointer(OBJC_TAG_NSManagedObjectID, objc_getClass("TaggedNSObjectSubclass")); + +#if OBJC_SPLIT_TAGGED_POINTERS + testConstantTaggedPointers(); +#endif } POP_POOL; succeed(__FILE__); diff --git a/test/taggedPointersTagObfuscationDisabled.m b/test/taggedPointersTagObfuscationDisabled.m index a3aad8b..e9fee7d 100644 --- a/test/taggedPointersTagObfuscationDisabled.m +++ b/test/taggedPointersTagObfuscationDisabled.m @@ -14,7 +14,13 @@ int main() int main() { - testassert(_objc_getTaggedPointerTag((void *)1) == 0); +#if OBJC_SPLIT_TAGGED_POINTERS + void *obj = (void *)0; +#else + void *obj = (void *)1; +#endif + + testassert(_objc_getTaggedPointerTag(obj) == 0); succeed(__FILE__); } diff --git a/test/test-defines.h b/test/test-defines.h new file mode 100644 index 0000000..0a74274 --- /dev/null +++ b/test/test-defines.h @@ -0,0 +1 @@ +#define TEST_OVERRIDES_NEW 1 diff --git a/test/test.h b/test/test.h index 4ae8137..33f223a 100644 --- a/test/test.h +++ b/test/test.h @@ -15,7 +15,8 @@ #include #if __cplusplus #include -using namespace std; +using std::atomic_int; +using std::memory_order_relaxed; #else #include #endif @@ -83,6 +84,40 @@ static inline void fail(const char *msg, ...) #define __testassert(cond, file, line) \ (fail("failed assertion '%s' at %s:%u", cond, __FILE__, __LINE__)) +static inline char *hexstring(uint8_t *data, size_t size) +{ + char *str; + switch (size) { + case sizeof(unsigned long long): + asprintf(&str, "%016llx", *(unsigned long long *)data); + break; + case sizeof(unsigned int): + asprintf(&str, "%08x", *(unsigned int*)data); + break; + case sizeof(uint16_t): + asprintf(&str, "%04x", *(uint16_t *)data); + break; + default: + str = (char *)malloc(size * 2 + 1); + for (size_t i = 0; i < size; i++) { + sprintf(str + i, "%02x", data[i]); + } + } + return str; +} + +static inline void failnotequal(uint8_t *lhs, size_t lhsSize, uint8_t *rhs, size_t rhsSize, const char *lhsStr, const char *rhsStr, const char *file, unsigned line) +{ + fprintf(stderr, "BAD: failed assertion '%s != %s' (0x%s != 0x%s) at %s:%u\n", lhsStr, rhsStr, hexstring(lhs, lhsSize), hexstring(rhs, rhsSize), file, line); + exit(1); +} + +#define testassertequal(lhs, rhs) do {\ + __typeof__(lhs) __lhs = lhs; \ + __typeof__(rhs) __rhs = rhs; \ + if ((lhs) != (rhs)) failnotequal((uint8_t *)&__lhs, sizeof(__lhs), (uint8_t *)&__rhs, sizeof(__rhs), #lhs, #rhs, __FILE__, __LINE__); \ +} while(0) + /* time-sensitive assertion, disabled under valgrind */ #define timecheck(name, time, fast, slow) \ if (getenv("VALGRIND") && 0 != strcmp(getenv("VALGRIND"), "NO")) { \ @@ -208,17 +243,20 @@ static inline void testonthread(__unsafe_unretained testblock_t code) `#define TEST_CALLS_OPERATOR_NEW` before including test.h. */ #if __cplusplus && !defined(TEST_CALLS_OPERATOR_NEW) +#if !defined(TEST_OVERRIDES_NEW) +#define TEST_OVERRIDES_NEW 1 +#endif #pragma clang diagnostic push #pragma clang diagnostic ignored "-Winline-new-delete" #import -inline void* operator new(std::size_t) throw (std::bad_alloc) { fail("called global operator new"); } -inline void* operator new[](std::size_t) throw (std::bad_alloc) { fail("called global operator new[]"); } -inline void* operator new(std::size_t, const std::nothrow_t&) throw() { fail("called global operator new(nothrow)"); } -inline void* operator new[](std::size_t, const std::nothrow_t&) throw() { fail("called global operator new[](nothrow)"); } -inline void operator delete(void*) throw() { fail("called global operator delete"); } -inline void operator delete[](void*) throw() { fail("called global operator delete[]"); } -inline void operator delete(void*, const std::nothrow_t&) throw() { fail("called global operator delete(nothrow)"); } -inline void operator delete[](void*, const std::nothrow_t&) throw() { fail("called global operator delete[](nothrow)"); } +inline void* operator new(std::size_t) { fail("called global operator new"); } +inline void* operator new[](std::size_t) { fail("called global operator new[]"); } +inline void* operator new(std::size_t, const std::nothrow_t&) noexcept(true) { fail("called global operator new(nothrow)"); } +inline void* operator new[](std::size_t, const std::nothrow_t&) noexcept(true) { fail("called global operator new[](nothrow)"); } +inline void operator delete(void*) noexcept(true) { fail("called global operator delete"); } +inline void operator delete[](void*) noexcept(true) { fail("called global operator delete[]"); } +inline void operator delete(void*, const std::nothrow_t&) noexcept(true) { fail("called global operator delete(nothrow)"); } +inline void operator delete[](void*, const std::nothrow_t&) noexcept(true) { fail("called global operator delete[](nothrow)"); } #pragma clang diagnostic pop #endif @@ -301,7 +339,7 @@ static inline void leak_mark(void) leak_dump_heap("HEAP AT leak_check"); \ } \ inuse = leak_inuse(); \ - if (inuse > _leak_start + n) { \ + if (inuse > _leak_start + (n)) { \ fprintf(stderr, "BAD: %zu bytes leaked at %s:%u " \ "(try LEAK_HEAP and HANG_ON_LEAK to debug)\n", \ inuse - _leak_start, __FILE__, __LINE__); \ diff --git a/test/test.pl b/test/test.pl index 46c3d03..88221aa 100755 --- a/test/test.pl +++ b/test/test.pl @@ -6,6 +6,16 @@ use strict; use File::Basename; +use Config; +my $supportsParallelBuilds = $Config{useithreads}; + +if ($supportsParallelBuilds) { + require threads; + import threads; + require Thread::Queue; + import Thread::Queue; +} + # We use encode_json() to write BATS plist files. # JSON::PP does not exist on iOS devices, but we need not write plists there. # So we simply load JSON:PP if it exists. @@ -13,6 +23,13 @@ if (eval { require JSON::PP; 1; }) { JSON::PP->import(); } +# iOS also doesn't have Text::Glob. We don't need it there. +my $has_match_glob = 0; +if (eval { require Text::Glob; 1; }) { + Text::Glob->import(); + $has_match_glob = 1; +} + chdir dirname $0; chomp (my $DIR = `pwd`); @@ -31,6 +48,8 @@ options: ARCH= OS=[sdk version][-[-]] ROOT=/path/to/project.roots/ + HOST= + DEVICE= CC= @@ -44,6 +63,8 @@ options: BATS=0|1 (build for and/or run in BATS?) BUILD_SHARED_CACHE=0|1 (build a dyld shared cache with the root and test against that) DYLD=2|3 (test in dyld 2 or dyld 3 mode) + PARALLELBUILDS=N (number of parallel builds to run simultaneously) + SHAREDCACHEDIR=/path/to/custom/shared/cache/directory examples: @@ -108,6 +129,11 @@ my $BATS; my $HOST; my $PORT; +my $DEVICE; + +my $PARALLELBUILDS; + +my $SHAREDCACHEDIR; my @TESTLIBNAMES = ("libobjc.A.dylib", "libobjc-trampolines.dylib"); my $TESTLIBDIR = "/usr/lib"; @@ -223,31 +249,20 @@ my %languages_for_extension = ( # Run some newline-separated commands like `make` would, stopping if any fail # run("cmd1 \n cmd2 \n cmd3") sub make { + my ($cmdstr, $cwd) = @_; my $output = ""; - my @cmds = split("\n", $_[0]); + my @cmds = split("\n", $cmdstr); die if scalar(@cmds) == 0; $? = 0; foreach my $cmd (@cmds) { chomp $cmd; next if $cmd =~ /^\s*$/; $cmd .= " 2>&1"; - print "$cmd\n" if $VERBOSE; - eval { - local $SIG{ALRM} = sub { die "alarm\n" }; - # Timeout after 600 seconds so a deadlocked test doesn't wedge the - # entire test suite. Increase to an hour for B&I builds. - if (exists $ENV{"RC_XBS"}) { - alarm 3600; - } else { - alarm 600; - } - $output .= `$cmd`; - alarm 0; - }; - if ($@) { - die unless $@ eq "alarm\n"; - $output .= "\nTIMED OUT"; + if (defined $cwd) { + $cmd = "cd $cwd; $cmd"; } + print "$cmd\n" if $VERBOSE; + $output .= `$cmd`; last if $?; } print "$output\n" if $VERBOSE; @@ -262,7 +277,7 @@ sub chdir_verbose { sub rm_rf_verbose { my $dir = shift || die; - print "mkdir -p $dir\n" if $VERBOSE; + print "rm -rf $dir\n" if $VERBOSE; `rm -rf '$dir'`; die "couldn't rm -rf $dir" if $?; } @@ -749,6 +764,7 @@ sub gather_simple { # TEST_BUILD build instructions # TEST_BUILD_OUTPUT expected build stdout/stderr # TEST_RUN_OUTPUT expected run stdout/stderr + # TEST_ENTITLEMENTS path to entitlements file open(my $in, "< $file") || die; my $contents = join "", <$in>; @@ -758,11 +774,15 @@ sub gather_simple { my ($conditionstring) = ($contents =~ /\bTEST_CONFIG\b(.*)$/m); my ($envstring) = ($contents =~ /\bTEST_ENV\b(.*)$/m); my ($cflags) = ($contents =~ /\bTEST_CFLAGS\b(.*)$/m); + my ($entitlements) = ($contents =~ /\bTEST_ENTITLEMENTS\b(.*)$/m); + $entitlements =~ s/^\s+|\s+$//g; my ($buildcmd) = extract_multiline("TEST_BUILD", $contents, $name); my ($builderror) = extract_multiple_multiline("TEST_BUILD_OUTPUT", $contents, $name); my ($runerror) = extract_multiple_multiline("TEST_RUN_OUTPUT", $contents, $name); - return 0 if !$test_h && !$disabled && !$crashes && !defined($conditionstring) && !defined($envstring) && !defined($cflags) && !defined($buildcmd) && !defined($builderror) && !defined($runerror); + return 0 if !$test_h && !$disabled && !$crashes && !defined($conditionstring) + && !defined($envstring) && !defined($cflags) && !defined($buildcmd) + && !defined($builderror) && !defined($runerror) && !defined($entitlements); if ($disabled) { colorprint $yellow, "SKIP: $name (disabled by $disabled)"; @@ -828,6 +848,7 @@ sub gather_simple { TEST_RUN => $run, DSTDIR => "$C{DSTDIR}/$name.build", OBJDIR => "$C{OBJDIR}/$name.build", + ENTITLEMENTS => $entitlements, }; return 1; @@ -873,22 +894,34 @@ sub build_simple { my $name = shift; my %T = %{$C{"TEST_$name"}}; - mkdir_verbose $T{DSTDIR}; - chdir_verbose $T{DSTDIR}; + my $dstdir = $T{DSTDIR}; + if (-e "$dstdir/build-succeeded") { + # We delete the whole test directory before building (if it existed), + # so if this file exists now, that means another configuration already + # did an equivalent build. + print "note: $name is already built at $dstdir, skipping the build\n" if $VERBOSE; + return 1; + } + + mkdir_verbose $dstdir; # we don't mkdir $T{OBJDIR} because most tests don't use it my $ext = $ALL_TESTS{$name}; my $file = "$DIR/$name.$ext"; if ($T{TEST_CRASHES}) { - `echo '$crashcatch' > crashcatch.c`; - make("$C{COMPILE_C} -dynamiclib -o libcrashcatch.dylib -x c crashcatch.c"); - die "$?" if $?; + `echo '$crashcatch' > $dstdir/crashcatch.c`; + my $output = make("$C{COMPILE_C} -dynamiclib -o libcrashcatch.dylib -x c crashcatch.c", $dstdir); + if ($?) { + colorprint $red, "FAIL: building crashcatch.c"; + colorprefix $red, $output; + return 0; + } } my $cmd = $T{TEST_BUILD} ? eval "return \"$T{TEST_BUILD}\"" : "$C{COMPILE} $T{TEST_CFLAGS} $file -o $name.exe"; - my $output = make($cmd); + my $output = make($cmd, $dstdir); # ignore out-of-date text-based stubs (caused by ditto into SDK) $output =~ s/ld: warning: text-based stub file.*\n//g; @@ -901,6 +934,7 @@ sub build_simple { $output =~ s/^warning: callee: [^\n]+\n//g; # rdar://38710948 $output =~ s/ld: warning: ignoring file [^\n]*libclang_rt\.bridgeos\.a[^\n]*\n//g; + $output =~ s/ld: warning: building for iOS Simulator, but[^\n]*\n//g; # ignore compiler logging of CCC_OVERRIDE_OPTIONS effects if (defined $ENV{CCC_OVERRIDE_OPTIONS}) { $output =~ s/### (CCC_OVERRIDE_OPTIONS:|Adding argument|Deleting argument|Replacing) [^\n]*\n//g; @@ -943,23 +977,36 @@ sub build_simple { } if ($ok) { - foreach my $file (glob("*.exe *.dylib *.bundle")) { + foreach my $file (glob("$dstdir/*.exe $dstdir/*.dylib $dstdir/*.bundle")) { if (!$BATS) { # not for BATS to save space and build time # fixme use SYMROOT? - make("xcrun dsymutil $file"); + make("xcrun dsymutil $file", $dstdir); } if ($C{OS} eq "macosx" || $C{OS} =~ /simulator/) { # setting any entitlements disables dyld environment variables } else { # get-task-allow entitlement is required # to enable dyld environment variables - make("xcrun codesign -s - --entitlements $DIR/get_task_allow_entitlement.plist $file"); - die "$?" if $?; + if (!$T{ENTITLEMENTS}) { + $T{ENTITLEMENTS} = "get_task_allow_entitlement.plist"; + } + my $output = make("xcrun codesign -s - --entitlements $DIR/$T{ENTITLEMENTS} $file", $dstdir); + if ($?) { + colorprint $red, "FAIL: codesign $file"; + colorprefix $red, $output; + return 0; + } } } } + # Mark the build as successful so other configs with the same build + # requirements can skip buildiing. + if ($ok) { + make("touch build-succeeded", $dstdir); + } + return $ok; } @@ -993,6 +1040,10 @@ sub run_simple { die "unknown DYLD setting $C{DYLD}"; } + if ($SHAREDCACHEDIR) { + $env .= " DYLD_SHARED_REGION=private DYLD_SHARED_CACHE_DIR=$SHAREDCACHEDIR"; + } + my $output; if ($C{ARCH} =~ /^arm/ && `uname -p` !~ /^arm/) { @@ -1008,23 +1059,12 @@ sub run_simple { $env .= " DYLD_INSERT_LIBRARIES=$remotedir/libcrashcatch.dylib"; } - my $cmd = "ssh -p $PORT $HOST 'cd $remotedir && env $env ./$name.exe'"; + my $cmd = "ssh $PORT $HOST 'cd $remotedir && env $env ./$name.exe'"; $output = make("$cmd"); } elsif ($C{OS} =~ /simulator/) { # run locally in a simulator - # fixme selection of simulated OS version - my $simdevice; - if ($C{OS} =~ /iphonesimulator/) { - $simdevice = 'iPhone X'; - } elsif ($C{OS} =~ /watchsimulator/) { - $simdevice = 'Apple Watch Series 4 - 40mm'; - } elsif ($C{OS} =~ /tvsimulator/) { - $simdevice = 'Apple TV 1080p'; - } else { - die "unknown simulator $C{OS}\n"; - } - my $sim = "xcrun -sdk iphonesimulator simctl spawn '$simdevice'"; + my $sim = "xcrun -sdk iphonesimulator simctl spawn '$DEVICE'"; # Add test dir and libobjc's dir to DYLD_LIBRARY_PATH. # Insert libcrashcatch.dylib if necessary. $env .= " DYLD_LIBRARY_PATH=$testdir"; @@ -1138,11 +1178,11 @@ sub make_one_config { # set the config name now, after massaging the language and OS versions, # but before adding other settings - my $configname = config_name(%C); - die if ($configname =~ /'/); - die if ($configname =~ / /); - ($C{NAME} = $configname) =~ s/~/ /g; - (my $configdir = $configname) =~ s#/##g; + my $configdirname = config_dir_name(%C); + die if ($configdirname =~ /'/); + die if ($configdirname =~ / /); + ($C{NAME} = $configdirname) =~ s/~/ /g; + (my $configdir = $configdirname) =~ s#/##g; $C{DSTDIR} = "$DSTROOT$BUILDDIR/$configdir"; $C{OBJDIR} = "$OBJROOT$BUILDDIR/$configdir"; @@ -1404,9 +1444,9 @@ sub make_one_config { $C{XCRUN} = "env LANG=C /usr/bin/xcrun -toolchain '$C{TOOLCHAIN}'"; $C{COMPILE_C} = "$C{XCRUN} '$C{CC}' $cflags -x c -std=gnu99"; - $C{COMPILE_CXX} = "$C{XCRUN} '$C{CXX}' $cflags -x c++"; + $C{COMPILE_CXX} = "$C{XCRUN} '$C{CXX}' $cflags -x c++ -std=gnu++17"; $C{COMPILE_M} = "$C{XCRUN} '$C{CC}' $cflags $objcflags -x objective-c -std=gnu99"; - $C{COMPILE_MM} = "$C{XCRUN} '$C{CXX}' $cflags $objcflags -x objective-c++"; + $C{COMPILE_MM} = "$C{XCRUN} '$C{CXX}' $cflags $objcflags -x objective-c++ -std=gnu++17"; $C{COMPILE_SWIFT} = "$C{XCRUN} '$C{SWIFT}' $swiftflags"; $C{COMPILE} = $C{COMPILE_C} if $C{LANGUAGE} eq "c"; @@ -1483,10 +1523,13 @@ sub make_configs { return @newresults; } -sub config_name { +sub config_dir_name { my %config = @_; my $name = ""; for my $key (sort keys %config) { + # Exclude settings that only influence the run, not the build. + next if $key eq "DYLD" || $key eq "GUARDMALLOC"; + $name .= '~' if $name ne ""; $name .= "$key=$config{$key}"; } @@ -1496,7 +1539,7 @@ sub config_name { sub rsync_ios { my ($src, $timeout) = @_; for (my $i = 0; $i < 10; $i++) { - make("$DIR/timeout.pl $timeout rsync -e 'ssh -p $PORT' -av $src $HOST:/$REMOTEBASE/"); + make("$DIR/timeout.pl $timeout rsync -e 'ssh $PORT' -av $src $HOST:/$REMOTEBASE/"); return if $? == 0; colorprint $yellow, "WARN: RETRY\n" if $VERBOSE; } @@ -1521,8 +1564,15 @@ sub build_and_run_one_config { if ($ALL_TESTS{$test}) { gather_simple(\%C, $test) || next; # not pass, not fail push @gathertests, $test; - } else { - die "No test named '$test'\n"; + } elsif ($has_match_glob) { + my @matched = Text::Glob::match_glob($test, (keys %ALL_TESTS)); + if (not @matched) { + die "No test matched '$test'\n"; + } + foreach my $match (@matched) { + gather_simple(\%C, $match) || next; # not pass, not fail + push @gathertests, $match; + } } } @@ -1530,7 +1580,56 @@ sub build_and_run_one_config { if (!$BUILD) { @builttests = @gathertests; $testcount = scalar(@gathertests); + } elsif ($PARALLELBUILDS > 1 && $supportsParallelBuilds) { + my $workQueue = Thread::Queue->new(); + my $resultsQueue = Thread::Queue->new(); + my @threads = map { + threads->create(sub { + while (defined(my $test = $workQueue->dequeue())) { + local *STDOUT; + local *STDERR; + my $output; + open STDOUT, '>>', \$output; + open STDERR, '>>', \$output; + + my $success = build_simple(\%C, $test); + $resultsQueue->enqueue({ test => $test, success => $success, output => $output }); + } + }); + } (1 .. $PARALLELBUILDS); + + foreach my $test (@gathertests) { + if ($VERBOSE) { + print "\nBUILD $test\n"; + } + if ($ALL_TESTS{$test}) { + $testcount++; + $workQueue->enqueue($test); + } else { + die "No test named '$test'\n"; + } + } + $workQueue->end(); + foreach (@gathertests) { + my $result = $resultsQueue->dequeue(); + my $test = $result->{test}; + my $success = $result->{success}; + my $output = $result->{output}; + + print $output; + if ($success) { + push @builttests, $test; + } else { + $failcount++; + } + } + foreach my $thread (@threads) { + $thread->join(); + } } else { + if ($PARALLELBUILDS > 1) { + print "WARNING: requested parallel builds, but this perl interpreter does not support threads. Falling back to sequential builds.\n"; + } foreach my $test (@gathertests) { if ($VERBOSE) { print "\nBUILD $test\n"; @@ -1553,7 +1652,7 @@ sub build_and_run_one_config { # nothing to do } else { - if ($C{ARCH} =~ /^arm/ && `uname -p` !~ /^arm/) { + if ($HOST && $C{ARCH} =~ /^arm/ && `uname -p` !~ /^arm/) { # upload timeout - longer for slow watch devices my $timeout = ($C{OS} =~ /watch/) ? 120 : 20; @@ -1686,8 +1785,16 @@ $args{DYLD} = getargs("DYLD", "2,3"); $args{CC} = getargs("CC", "clang"); -$HOST = getarg("HOST", "iphone"); -$PORT = getarg("PORT", "10022"); +$HOST = getarg("HOST", 0); +$PORT = getarg("PORT", ""); +if ($PORT) { + $PORT = "-p $PORT"; +} +$DEVICE = getarg("DEVICE", "booted"); + +$PARALLELBUILDS = getarg("PARALLELBUILDS", `sysctl -n hw.ncpu`); + +$SHAREDCACHEDIR = getarg("SHAREDCACHEDIR", ""); { my $guardmalloc = getargs("GUARDMALLOC", 0); @@ -1760,6 +1867,8 @@ for my $configref (@configs) { } } +make("find $DSTROOT$BUILDDIR -name build-succeeded -delete", "/"); + print "note: -----\n"; my $color = ($failconfigs ? $red : ""); colorprint $color, "note: $testconfigs configurations, " . diff --git a/test/unload.m b/test/unload.m index ccd99b7..0cf437c 100644 --- a/test/unload.m +++ b/test/unload.m @@ -138,6 +138,9 @@ void cycle(void) int main() { + char *useClosures = getenv("DYLD_USE_CLOSURES"); + int dyld3 = useClosures != NULL && useClosures[0] != '0'; + objc_setForwardHandler((void*)&forward_handler, (void*)&forward_handler); #if defined(__arm__) || defined(__arm64__) @@ -153,10 +156,11 @@ int main() #endif leak_mark(); - while (count--) { + for (int i = 0; i < count; i++) { cycle(); } - leak_check(0); + // dyld3 currently leaks 8 bytes for each dlopen/dlclose pair, so accommodate it. rdar://problem/53769254 + leak_check(dyld3 ? (count * sizeof(void *)) : 0); // 5359412 Make sure dylibs with nothing other than image_info can close void *dylib = dlopen("unload3.dylib", RTLD_LAZY); @@ -164,7 +168,9 @@ int main() int err = dlclose(dylib); testassert(err == 0); err = dlclose(dylib); - testassert(err == -1); // already closed + // dyld3 doesn't error when dlclosing the dylib twice. This is probably expected. rdar://problem/53769374 + if (!dyld3) + testassert(err == -1); // already closed // Make sure dylibs with real objc content cannot close dylib = dlopen("unload4.dylib", RTLD_LAZY); @@ -172,7 +178,9 @@ int main() err = dlclose(dylib); testassert(err == 0); err = dlclose(dylib); - testassert(err == -1); // already closed + // dyld3 doesn't error when dlclosing the dylib twice. This is probably expected. rdar://problem/53769374 + if (!dyld3) + testassert(err == -1); // already closed succeed(__FILE__); } diff --git a/test/weakReferenceHook.m b/test/weakReferenceHook.m new file mode 100644 index 0000000..890173d --- /dev/null +++ b/test/weakReferenceHook.m @@ -0,0 +1,49 @@ +/* + TEST_CONFIG MEM=mrc + TEST_ENV OBJC_DISABLE_NONPOINTER_ISA=YES +*/ + +#include "test.h" +#include "testroot.i" + +bool hasWeakRefs = false; + +@interface TestRoot (WeakHooks) +@end + +@implementation TestRoot (WeakHooks) + +- (void)_setWeaklyReferenced { + hasWeakRefs = true; +} + +// -_setWeaklyReferenced is currently limited to raw-isa custom-rr to avoid overhead +- (void) release { +} + +@end + +int main() { + id obj = [TestRoot new]; + id wobj = nil; + objc_storeWeak(&wobj, obj); + testassert(hasWeakRefs == true); + + id out = objc_loadWeak(&wobj); + testassert(out == obj); + + objc_storeWeak(&wobj, nil); + out = objc_loadWeak(&wobj); + testassert(out == nil); + + hasWeakRefs = false; + objc_storeWeak(&wobj, obj); + testassert(hasWeakRefs == true); + + + out = objc_loadWeak(&wobj); + testassert(out == obj); + objc_storeWeak(&wobj, nil); + + succeed(__FILE__); +} -- 2.45.2