_gc_fixup_barrier_stubs
__objc_update_stubs_in_mach_header
_sel_init
-_sel_lock
___sel_registerName
__objc_search_builtins
__ZNK8objc_opt13objc_selopt_t3getEPKc
__ZNK8objc_opt13objc_selopt_t4hashEPKc
-_sel_unlock
_sel_registerName
_arr_init
__ZN4objc8DenseMapIP11objc_objectmLb1ENS_12DenseMapInfoIS2_EENS3_ImEEE4initEj
393CEAC60DC69E67000B69DE /* objc-references.h in Headers */ = {isa = PBXBuildFile; fileRef = 393CEAC50DC69E67000B69DE /* objc-references.h */; };
39ABD72312F0B61800D1054C /* objc-weak.h in Headers */ = {isa = PBXBuildFile; fileRef = 39ABD71F12F0B61800D1054C /* objc-weak.h */; };
39ABD72412F0B61800D1054C /* objc-weak.mm in Sources */ = {isa = PBXBuildFile; fileRef = 39ABD72012F0B61800D1054C /* objc-weak.mm */; };
+ 7593EC58202248E50046AB96 /* objc-object.h in Headers */ = {isa = PBXBuildFile; fileRef = 7593EC57202248DF0046AB96 /* objc-object.h */; };
+ 75A9504F202BAA0600D7D56F /* objc-locks-new.h in Headers */ = {isa = PBXBuildFile; fileRef = 75A9504E202BAA0300D7D56F /* objc-locks-new.h */; };
+ 75A95051202BAA9A00D7D56F /* objc-locks.h in Headers */ = {isa = PBXBuildFile; fileRef = 75A95050202BAA9A00D7D56F /* objc-locks.h */; };
+ 75A95053202BAC4100D7D56F /* objc-lockdebug.h in Headers */ = {isa = PBXBuildFile; fileRef = 75A95052202BAC4100D7D56F /* objc-lockdebug.h */; };
+ 8306440920D24A5D00E356D2 /* objc-block-trampolines.h in Headers */ = {isa = PBXBuildFile; fileRef = 8306440620D24A3E00E356D2 /* objc-block-trampolines.h */; settings = {ATTRIBUTES = (Private, ); }; };
830F2A740D737FB800392440 /* objc-msg-arm.s in Sources */ = {isa = PBXBuildFile; fileRef = 830F2A690D737FB800392440 /* objc-msg-arm.s */; };
830F2A750D737FB900392440 /* objc-msg-i386.s in Sources */ = {isa = PBXBuildFile; fileRef = 830F2A6A0D737FB800392440 /* objc-msg-i386.s */; };
830F2A7D0D737FBB00392440 /* objc-msg-x86_64.s in Sources */ = {isa = PBXBuildFile; fileRef = 830F2A720D737FB800392440 /* objc-msg-x86_64.s */; };
834DF8B715993EE1002F2BC9 /* objc-sel-old.mm in Sources */ = {isa = PBXBuildFile; fileRef = 834DF8B615993EE1002F2BC9 /* objc-sel-old.mm */; };
834EC0A411614167009B2563 /* objc-abi.h in Headers */ = {isa = PBXBuildFile; fileRef = 834EC0A311614167009B2563 /* objc-abi.h */; settings = {ATTRIBUTES = (Private, ); }; };
83725F4A14CA5BFA0014370E /* objc-opt.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83725F4914CA5BFA0014370E /* objc-opt.mm */; };
- 8379996E13CBAF6F007C2B5F /* a1a2-blocktramps-arm64.s in Sources */ = {isa = PBXBuildFile; fileRef = 8379996D13CBAF6F007C2B5F /* a1a2-blocktramps-arm64.s */; };
- 8383A3A3122600E9009290B8 /* a1a2-blocktramps-arm.s in Sources */ = {isa = PBXBuildFile; fileRef = 8383A3A1122600E9009290B8 /* a1a2-blocktramps-arm.s */; };
- 8383A3A4122600E9009290B8 /* a2a3-blocktramps-arm.s in Sources */ = {isa = PBXBuildFile; fileRef = 8383A3A2122600E9009290B8 /* a2a3-blocktramps-arm.s */; };
838485BF0D6D687300CEA253 /* hashtable2.h in Headers */ = {isa = PBXBuildFile; fileRef = 838485B70D6D687300CEA253 /* hashtable2.h */; settings = {ATTRIBUTES = (Public, ); }; };
838485C00D6D687300CEA253 /* hashtable2.mm in Sources */ = {isa = PBXBuildFile; fileRef = 838485B80D6D687300CEA253 /* hashtable2.mm */; };
838485C30D6D687300CEA253 /* maptable.h in Headers */ = {isa = PBXBuildFile; fileRef = 838485BB0D6D687300CEA253 /* maptable.h */; settings = {ATTRIBUTES = (Private, ); }; };
83C9C3391668B50E00F4E544 /* objc-msg-simulator-x86_64.s in Sources */ = {isa = PBXBuildFile; fileRef = 83C9C3381668B50E00F4E544 /* objc-msg-simulator-x86_64.s */; };
83D49E4F13C7C84F0057F1DD /* objc-msg-arm64.s in Sources */ = {isa = PBXBuildFile; fileRef = 83D49E4E13C7C84F0057F1DD /* objc-msg-arm64.s */; };
83EB007B121C9EC200B92C16 /* objc-sel-table.s in Sources */ = {isa = PBXBuildFile; fileRef = 83EB007A121C9EC200B92C16 /* objc-sel-table.s */; };
+ 83EF5E9820D2298400F486A4 /* objc-blocktramps-i386.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9C116AB2820071B552 /* objc-blocktramps-i386.s */; };
+ 83EF5E9920D2298400F486A4 /* objc-blocktramps-x86_64.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9D116AB2820071B552 /* objc-blocktramps-x86_64.s */; };
+ 83EF5E9C20D2299E00F486A4 /* objc-blocktramps-arm.s in Sources */ = {isa = PBXBuildFile; fileRef = 8383A3A1122600E9009290B8 /* objc-blocktramps-arm.s */; };
83F4B52815E843B100E0926F /* NSObjCRuntime.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F4B52615E843B100E0926F /* NSObjCRuntime.h */; settings = {ATTRIBUTES = (Public, ); }; };
83F4B52915E843B100E0926F /* NSObject.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F4B52715E843B100E0926F /* NSObject.h */; settings = {ATTRIBUTES = (Public, ); }; };
83F550E0155E030800E95D3B /* objc-cache-old.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83F550DF155E030800E95D3B /* objc-cache-old.mm */; };
87BB4EA70EC39854005D08E1 /* objc-probes.d in Sources */ = {isa = PBXBuildFile; fileRef = 87BB4E900EC39633005D08E1 /* objc-probes.d */; };
9672F7EE14D5F488007CEC96 /* NSObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = 9672F7ED14D5F488007CEC96 /* NSObject.mm */; };
- E8923DA1116AB2820071B552 /* a1a2-blocktramps-i386.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9C116AB2820071B552 /* a1a2-blocktramps-i386.s */; };
- E8923DA2116AB2820071B552 /* a1a2-blocktramps-x86_64.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9D116AB2820071B552 /* a1a2-blocktramps-x86_64.s */; };
- E8923DA3116AB2820071B552 /* a2a3-blocktramps-i386.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9E116AB2820071B552 /* a2a3-blocktramps-i386.s */; };
- E8923DA4116AB2820071B552 /* a2a3-blocktramps-x86_64.s in Sources */ = {isa = PBXBuildFile; fileRef = E8923D9F116AB2820071B552 /* a2a3-blocktramps-x86_64.s */; };
E8923DA5116AB2820071B552 /* objc-block-trampolines.mm in Sources */ = {isa = PBXBuildFile; fileRef = E8923DA0116AB2820071B552 /* objc-block-trampolines.mm */; };
+ F9BCC71B205C68E800DD9AFC /* objc-blocktramps-arm64.s in Sources */ = {isa = PBXBuildFile; fileRef = 8379996D13CBAF6F007C2B5F /* objc-blocktramps-arm64.s */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
remoteGlobalIDString = D2AAC0620554660B00DB518D;
remoteInfo = objc;
};
+ F9BCC728205C6A0900DD9AFC /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 08FB7793FE84155DC02AAC07 /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = F9BCC6CA205C68E800DD9AFC;
+ remoteInfo = "objc-trampolines";
+ };
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
393CEAC50DC69E67000B69DE /* objc-references.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-references.h"; path = "runtime/objc-references.h"; sourceTree = "<group>"; };
39ABD71F12F0B61800D1054C /* objc-weak.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-weak.h"; path = "runtime/objc-weak.h"; sourceTree = "<group>"; };
39ABD72012F0B61800D1054C /* objc-weak.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "objc-weak.mm"; path = "runtime/objc-weak.mm"; sourceTree = "<group>"; };
+ 7593EC57202248DF0046AB96 /* objc-object.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "objc-object.h"; path = "runtime/objc-object.h"; sourceTree = "<group>"; };
+ 75A9504E202BAA0300D7D56F /* objc-locks-new.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "objc-locks-new.h"; path = "runtime/objc-locks-new.h"; sourceTree = "<group>"; };
+ 75A95050202BAA9A00D7D56F /* objc-locks.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-locks.h"; path = "runtime/objc-locks.h"; sourceTree = "<group>"; };
+ 75A95052202BAC4100D7D56F /* objc-lockdebug.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-lockdebug.h"; path = "runtime/objc-lockdebug.h"; sourceTree = "<group>"; };
+ 8306440620D24A3E00E356D2 /* objc-block-trampolines.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "objc-block-trampolines.h"; path = "runtime/objc-block-trampolines.h"; sourceTree = "<group>"; };
830F2A690D737FB800392440 /* objc-msg-arm.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-msg-arm.s"; path = "runtime/Messengers.subproj/objc-msg-arm.s"; sourceTree = "<group>"; };
830F2A6A0D737FB800392440 /* objc-msg-i386.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-msg-i386.s"; path = "runtime/Messengers.subproj/objc-msg-i386.s"; sourceTree = "<group>"; };
830F2A720D737FB800392440 /* objc-msg-x86_64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-msg-x86_64.s"; path = "runtime/Messengers.subproj/objc-msg-x86_64.s"; sourceTree = "<group>"; tabWidth = 8; usesTabs = 1; };
834DF8B615993EE1002F2BC9 /* objc-sel-old.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "objc-sel-old.mm"; path = "runtime/objc-sel-old.mm"; sourceTree = "<group>"; };
834EC0A311614167009B2563 /* objc-abi.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "objc-abi.h"; path = "runtime/objc-abi.h"; sourceTree = "<group>"; };
83725F4914CA5BFA0014370E /* objc-opt.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "objc-opt.mm"; path = "runtime/objc-opt.mm"; sourceTree = "<group>"; };
- 8379996D13CBAF6F007C2B5F /* a1a2-blocktramps-arm64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a1a2-blocktramps-arm64.s"; path = "runtime/a1a2-blocktramps-arm64.s"; sourceTree = "<group>"; };
- 8383A3A1122600E9009290B8 /* a1a2-blocktramps-arm.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a1a2-blocktramps-arm.s"; path = "runtime/a1a2-blocktramps-arm.s"; sourceTree = "<group>"; };
- 8383A3A2122600E9009290B8 /* a2a3-blocktramps-arm.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a2a3-blocktramps-arm.s"; path = "runtime/a2a3-blocktramps-arm.s"; sourceTree = "<group>"; };
+ 8379996D13CBAF6F007C2B5F /* objc-blocktramps-arm64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-arm64.s"; path = "runtime/objc-blocktramps-arm64.s"; sourceTree = "<group>"; };
+ 8383A3A1122600E9009290B8 /* objc-blocktramps-arm.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-arm.s"; path = "runtime/objc-blocktramps-arm.s"; sourceTree = "<group>"; };
838485B30D6D682B00CEA253 /* libobjc.order */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = libobjc.order; sourceTree = "<group>"; };
838485B40D6D683300CEA253 /* APPLE_LICENSE */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = APPLE_LICENSE; sourceTree = "<group>"; };
838485B50D6D683300CEA253 /* ReleaseNotes.rtf */ = {isa = PBXFileReference; lastKnownFileType = text.rtf; path = ReleaseNotes.rtf; sourceTree = "<group>"; };
9672F7ED14D5F488007CEC96 /* NSObject.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = NSObject.mm; path = runtime/NSObject.mm; sourceTree = "<group>"; };
BC8B5D1212D3D48100C78A5B /* libauto.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libauto.dylib; path = /usr/lib/libauto.dylib; sourceTree = "<absolute>"; };
D2AAC0630554660B00DB518D /* libobjc.A.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; includeInIndex = 0; path = libobjc.A.dylib; sourceTree = BUILT_PRODUCTS_DIR; };
- E8923D9C116AB2820071B552 /* a1a2-blocktramps-i386.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a1a2-blocktramps-i386.s"; path = "runtime/a1a2-blocktramps-i386.s"; sourceTree = "<group>"; };
- E8923D9D116AB2820071B552 /* a1a2-blocktramps-x86_64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a1a2-blocktramps-x86_64.s"; path = "runtime/a1a2-blocktramps-x86_64.s"; sourceTree = "<group>"; };
- E8923D9E116AB2820071B552 /* a2a3-blocktramps-i386.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a2a3-blocktramps-i386.s"; path = "runtime/a2a3-blocktramps-i386.s"; sourceTree = "<group>"; };
- E8923D9F116AB2820071B552 /* a2a3-blocktramps-x86_64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "a2a3-blocktramps-x86_64.s"; path = "runtime/a2a3-blocktramps-x86_64.s"; sourceTree = "<group>"; };
+ E8923D9C116AB2820071B552 /* objc-blocktramps-i386.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-i386.s"; path = "runtime/objc-blocktramps-i386.s"; sourceTree = "<group>"; };
+ E8923D9D116AB2820071B552 /* objc-blocktramps-x86_64.s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.asm; name = "objc-blocktramps-x86_64.s"; path = "runtime/objc-blocktramps-x86_64.s"; sourceTree = "<group>"; };
E8923DA0116AB2820071B552 /* objc-block-trampolines.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "objc-block-trampolines.mm"; path = "runtime/objc-block-trampolines.mm"; sourceTree = "<group>"; };
+ F9BCC727205C68E800DD9AFC /* libobjc-trampolines.dylib */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.dylib"; includeInIndex = 0; path = "libobjc-trampolines.dylib"; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
);
runOnlyForDeploymentPostprocessing = 0;
};
+ F9BCC721205C68E800DD9AFC /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
08FB7795FE84155DC02AAC07 /* Source */,
838485B20D6D67F900CEA253 /* Other */,
1AB674ADFE9D54B511CA2CBB /* Products */,
+ F9BCC72A205C6A1600DD9AFC /* Frameworks */,
);
name = objc;
sourceTree = "<group>";
08FB7795FE84155DC02AAC07 /* Source */ = {
isa = PBXGroup;
children = (
- 8383A3A1122600E9009290B8 /* a1a2-blocktramps-arm.s */,
- 8383A3A2122600E9009290B8 /* a2a3-blocktramps-arm.s */,
838485B80D6D687300CEA253 /* hashtable2.mm */,
838485BC0D6D687300CEA253 /* maptable.mm */,
9672F7ED14D5F488007CEC96 /* NSObject.mm */,
834DF8B615993EE1002F2BC9 /* objc-sel-old.mm */,
838485EA0D6D68A200CEA253 /* objc-sync.mm */,
838485EB0D6D68A200CEA253 /* objc-typeencoding.mm */,
- 8379996D13CBAF6F007C2B5F /* a1a2-blocktramps-arm64.s */,
- E8923D9C116AB2820071B552 /* a1a2-blocktramps-i386.s */,
- E8923D9D116AB2820071B552 /* a1a2-blocktramps-x86_64.s */,
- E8923D9E116AB2820071B552 /* a2a3-blocktramps-i386.s */,
- E8923D9F116AB2820071B552 /* a2a3-blocktramps-x86_64.s */,
+ 8383A3A1122600E9009290B8 /* objc-blocktramps-arm.s */,
+ 8379996D13CBAF6F007C2B5F /* objc-blocktramps-arm64.s */,
+ E8923D9C116AB2820071B552 /* objc-blocktramps-i386.s */,
+ E8923D9D116AB2820071B552 /* objc-blocktramps-x86_64.s */,
830F2A690D737FB800392440 /* objc-msg-arm.s */,
83D49E4E13C7C84F0057F1DD /* objc-msg-arm64.s */,
830F2A6A0D737FB800392440 /* objc-msg-i386.s */,
isa = PBXGroup;
children = (
D2AAC0630554660B00DB518D /* libobjc.A.dylib */,
+ F9BCC727205C68E800DD9AFC /* libobjc-trampolines.dylib */,
);
name = Products;
sourceTree = "<group>";
834EC0A311614167009B2563 /* objc-abi.h */,
838485BB0D6D687300CEA253 /* maptable.h */,
834266D70E665A8B002E4DA2 /* objc-gdb.h */,
+ 8306440620D24A3E00E356D2 /* objc-block-trampolines.h */,
);
name = "Private Headers";
sourceTree = "<group>";
isa = PBXGroup;
children = (
838485CF0D6D68A200CEA253 /* objc-config.h */,
- 83BE02E60FCCB24D00661494 /* objc-file.h */,
83BE02E50FCCB24D00661494 /* objc-file-old.h */,
+ 83BE02E60FCCB24D00661494 /* objc-file.h */,
838485D40D6D68A200CEA253 /* objc-initialize.h */,
838485D90D6D68A200CEA253 /* objc-loadmethod.h */,
+ 75A9504E202BAA0300D7D56F /* objc-locks-new.h */,
+ 75A95052202BAC4100D7D56F /* objc-lockdebug.h */,
+ 75A95050202BAA9A00D7D56F /* objc-locks.h */,
+ 7593EC57202248DF0046AB96 /* objc-object.h */,
831C85D30E10CF850066E64C /* objc-os.h */,
838485DC0D6D68A200CEA253 /* objc-private.h */,
393CEAC50DC69E67000B69DE /* objc-references.h */,
name = "Project Headers";
sourceTree = "<group>";
};
+ F9BCC72A205C6A1600DD9AFC /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
+ 8306440820D24A5300E356D2 /* Headers */ = {
+ isa = PBXHeadersBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 8306440920D24A5D00E356D2 /* objc-block-trampolines.h in Headers */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
D2AAC0600554660B00DB518D /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
83A4AEDE1EA08C7200ACADDE /* ObjectiveC.apinotes in Headers */,
+ 75A95051202BAA9A00D7D56F /* objc-locks.h in Headers */,
83A4AEDC1EA0840800ACADDE /* module.modulemap in Headers */,
830F2A980D738DC200392440 /* hashtable.h in Headers */,
838485BF0D6D687300CEA253 /* hashtable2.h in Headers */,
838485F80D6D68A200CEA253 /* objc-exception.h in Headers */,
83BE02E80FCCB24D00661494 /* objc-file-old.h in Headers */,
83BE02E90FCCB24D00661494 /* objc-file.h in Headers */,
+ 75A9504F202BAA0600D7D56F /* objc-locks-new.h in Headers */,
834266D80E665A8B002E4DA2 /* objc-gdb.h in Headers */,
838485FB0D6D68A200CEA253 /* objc-initialize.h in Headers */,
+ 7593EC58202248E50046AB96 /* objc-object.h in Headers */,
83112ED40F00599600A5FBAF /* objc-internal.h in Headers */,
838485FE0D6D68A200CEA253 /* objc-load.h in Headers */,
838486000D6D68A200CEA253 /* objc-loadmethod.h in Headers */,
+ 75A95053202BAC4100D7D56F /* objc-lockdebug.h in Headers */,
831C85D50E10CF850066E64C /* objc-os.h in Headers */,
838486030D6D68A200CEA253 /* objc-private.h in Headers */,
393CEAC60DC69E67000B69DE /* objc-references.h in Headers */,
buildRules = (
);
dependencies = (
+ F9BCC729205C6A0900DD9AFC /* PBXTargetDependency */,
);
name = objc;
productName = objc;
productReference = D2AAC0630554660B00DB518D /* libobjc.A.dylib */;
productType = "com.apple.product-type.library.dynamic";
};
+ F9BCC6CA205C68E800DD9AFC /* objc-trampolines */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = F9BCC724205C68E800DD9AFC /* Build configuration list for PBXNativeTarget "objc-trampolines" */;
+ buildPhases = (
+ 8306440820D24A5300E356D2 /* Headers */,
+ F9BCC6EF205C68E800DD9AFC /* Sources */,
+ F9BCC721205C68E800DD9AFC /* Frameworks */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = "objc-trampolines";
+ productName = objc;
+ productReference = F9BCC727205C68E800DD9AFC /* libobjc-trampolines.dylib */;
+ productType = "com.apple.product-type.library.dynamic";
+ };
/* End PBXNativeTarget section */
/* Begin PBXProject section */
targets = (
D2AAC0620554660B00DB518D /* objc */,
837F67A81A771F63004D34FA /* objc-simulator */,
+ F9BCC6CA205C68E800DD9AFC /* objc-trampolines */,
);
};
/* End PBXProject section */
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
- shellScript = "set -x\n/usr/bin/xcrun -sdk macosx clang++ -Wall -mmacosx-version-min=10.9 -arch x86_64 -std=c++11 \"${SRCROOT}/markgc.cpp\" -o \"${BUILT_PRODUCTS_DIR}/markgc\"\n\"${BUILT_PRODUCTS_DIR}/markgc\" \"${BUILT_PRODUCTS_DIR}/libobjc.A.dylib\"";
+ shellScript = "set -x\n/usr/bin/xcrun -sdk macosx.internal clang++ -Wall -mmacosx-version-min=10.12 -arch x86_64 -std=c++11 \"${SRCROOT}/markgc.cpp\" -o \"${BUILT_PRODUCTS_DIR}/markgc\"\n\"${BUILT_PRODUCTS_DIR}/markgc\" \"${BUILT_PRODUCTS_DIR}/libobjc.A.dylib\"";
};
830F2AFA0D73BC5800392440 /* Run Script (symlink) */ = {
isa = PBXShellScriptBuildPhase;
831C85D60E10CF850066E64C /* objc-os.mm in Sources */,
87BB4EA70EC39854005D08E1 /* objc-probes.d in Sources */,
83BE02E40FCCB23400661494 /* objc-file-old.mm in Sources */,
- E8923DA1116AB2820071B552 /* a1a2-blocktramps-i386.s in Sources */,
- E8923DA2116AB2820071B552 /* a1a2-blocktramps-x86_64.s in Sources */,
- E8923DA3116AB2820071B552 /* a2a3-blocktramps-i386.s in Sources */,
- E8923DA4116AB2820071B552 /* a2a3-blocktramps-x86_64.s in Sources */,
E8923DA5116AB2820071B552 /* objc-block-trampolines.mm in Sources */,
83B1A8BE0FF1AC0D0019EA5B /* objc-msg-simulator-i386.s in Sources */,
83EB007B121C9EC200B92C16 /* objc-sel-table.s in Sources */,
- 8383A3A3122600E9009290B8 /* a1a2-blocktramps-arm.s in Sources */,
- 8383A3A4122600E9009290B8 /* a2a3-blocktramps-arm.s in Sources */,
39ABD72412F0B61800D1054C /* objc-weak.mm in Sources */,
83D49E4F13C7C84F0057F1DD /* objc-msg-arm64.s in Sources */,
- 8379996E13CBAF6F007C2B5F /* a1a2-blocktramps-arm64.s in Sources */,
9672F7EE14D5F488007CEC96 /* NSObject.mm in Sources */,
83725F4A14CA5BFA0014370E /* objc-opt.mm in Sources */,
83F550E0155E030800E95D3B /* objc-cache-old.mm in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
+ F9BCC6EF205C68E800DD9AFC /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 83EF5E9C20D2299E00F486A4 /* objc-blocktramps-arm.s in Sources */,
+ 83EF5E9820D2298400F486A4 /* objc-blocktramps-i386.s in Sources */,
+ 83EF5E9920D2298400F486A4 /* objc-blocktramps-x86_64.s in Sources */,
+ F9BCC71B205C68E800DD9AFC /* objc-blocktramps-arm64.s in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
target = D2AAC0620554660B00DB518D /* objc */;
targetProxy = 837F67AC1A771F6E004D34FA /* PBXContainerItemProxy */;
};
+ F9BCC729205C6A0900DD9AFC /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = F9BCC6CA205C68E800DD9AFC /* objc-trampolines */;
+ targetProxy = F9BCC728205C6A0900DD9AFC /* PBXContainerItemProxy */;
+ };
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
/System/Library/Frameworks/System.framework/PrivateHeaders,
);
INSTALL_PATH = /usr/lib;
+ IS_ZIPPERED = YES;
ORDER_FILE = "$(SDKROOT)/AppleInternal/OrderFiles/libobjc.order";
"ORDER_FILE[sdk=iphonesimulator*]" = "";
OTHER_CFLAGS = (
"-Xlinker",
interposable.txt,
);
+ OTHER_TAPI_FLAGS = "-exclude-public-header $(DSTROOT)/usr/include/objc/ObjectiveC.apinotes -exclude-public-header $(DSTROOT)/usr/include/objc/module.modulemap -Xparser -Wno-deprecated-declarations -Xparser -Wno-unavailable-declarations -Xparser -D_OBJC_PRIVATE_H_=1 -DOBJC_DECLARE_SYMBOLS=1";
PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc;
PRODUCT_NAME = objc.A;
PUBLIC_HEADERS_FOLDER_PATH = /usr/include/objc;
+ SUPPORTS_TEXT_BASED_API = YES;
+ TAPI_VERIFY_MODE = Pedantic;
UNEXPORTED_SYMBOLS_FILE = unexported_symbols;
};
name = Debug;
/System/Library/Frameworks/System.framework/PrivateHeaders,
);
INSTALL_PATH = /usr/lib;
+ IS_ZIPPERED = YES;
ORDER_FILE = "$(SDKROOT)/AppleInternal/OrderFiles/libobjc.order";
"ORDER_FILE[sdk=iphonesimulator*]" = "";
OTHER_CFLAGS = (
"-Xlinker",
interposable.txt,
);
+ OTHER_TAPI_FLAGS = "-exclude-public-header $(DSTROOT)/usr/include/objc/ObjectiveC.apinotes -exclude-public-header $(DSTROOT)/usr/include/objc/module.modulemap -Xparser -Wno-deprecated-declarations -Xparser -Wno-unavailable-declarations -Xparser -D_OBJC_PRIVATE_H_=1 -DOBJC_DECLARE_SYMBOLS=1";
PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc;
PRODUCT_NAME = objc.A;
PUBLIC_HEADERS_FOLDER_PATH = /usr/include/objc;
+ SUPPORTS_TEXT_BASED_API = YES;
+ TAPI_VERIFY_MODE = Pedantic;
UNEXPORTED_SYMBOLS_FILE = unexported_symbols;
+ WARNING_CFLAGS = (
+ "$(inherited)",
+ "-Wglobal-constructors",
+ );
};
name = Release;
};
1DEB914F08733D8E0010E9CD /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_LINK_OBJC_RUNTIME = NO;
CLANG_OBJC_RUNTIME = NO;
1DEB915008733D8E0010E9CD /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_LINK_OBJC_RUNTIME = NO;
CLANG_OBJC_RUNTIME = NO;
};
name = Release;
};
+ F9BCC725205C68E800DD9AFC /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";
+ COPY_HEADERS_RUN_UNIFDEF = YES;
+ COPY_HEADERS_UNIFDEF_FLAGS = "-UBUILD_FOR_OSX";
+ "COPY_HEADERS_UNIFDEF_FLAGS[sdk=macosx*]" = "-DBUILD_FOR_OSX";
+ COPY_PHASE_STRIP = NO;
+ DYLIB_CURRENT_VERSION = 228;
+ EXECUTABLE_PREFIX = lib;
+ GCC_CW_ASM_SYNTAX = NO;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = NO;
+ HEADER_SEARCH_PATHS = (
+ "$(DSTROOT)/usr/include/**",
+ "$(DSTROOT)/usr/local/include/**",
+ "$(CONFIGURATION_BUILD_DIR)/usr/include/**",
+ "$(CONFIGURATION_BUILD_DIR)/usr/local/include/**",
+ /System/Library/Frameworks/System.framework/PrivateHeaders,
+ );
+ INSTALL_PATH = /usr/lib;
+ IS_ZIPPERED = YES;
+ OTHER_CFLAGS = (
+ "-fdollars-in-identifiers",
+ "$(OTHER_CFLAGS)",
+ );
+ OTHER_LDFLAGS = (
+ "-Xlinker",
+ "-not_for_dyld_shared_cache",
+ );
+ PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PUBLIC_HEADERS_FOLDER_PATH = /usr/include/objc;
+ SUPPORTS_TEXT_BASED_API = YES;
+ TAPI_VERIFY_MODE = Pedantic;
+ };
+ name = Debug;
+ };
+ F9BCC726205C68E800DD9AFC /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ COPY_HEADERS_RUN_UNIFDEF = YES;
+ COPY_HEADERS_UNIFDEF_FLAGS = "-UBUILD_FOR_OSX";
+ "COPY_HEADERS_UNIFDEF_FLAGS[sdk=macosx*]" = "-DBUILD_FOR_OSX";
+ DYLIB_CURRENT_VERSION = 228;
+ EXECUTABLE_PREFIX = lib;
+ GCC_CW_ASM_SYNTAX = NO;
+ GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = NO;
+ HEADER_SEARCH_PATHS = (
+ "$(DSTROOT)/usr/include/**",
+ "$(DSTROOT)/usr/local/include/**",
+ "$(CONFIGURATION_BUILD_DIR)/usr/include/**",
+ "$(CONFIGURATION_BUILD_DIR)/usr/local/include/**",
+ /System/Library/Frameworks/System.framework/PrivateHeaders,
+ );
+ INSTALL_PATH = /usr/lib;
+ IS_ZIPPERED = YES;
+ OTHER_CFLAGS = (
+ "-fdollars-in-identifiers",
+ "$(OTHER_CFLAGS)",
+ );
+ OTHER_LDFLAGS = (
+ "-Xlinker",
+ "-not_for_dyld_shared_cache",
+ );
+ PRIVATE_HEADERS_FOLDER_PATH = /usr/local/include/objc;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PUBLIC_HEADERS_FOLDER_PATH = /usr/include/objc;
+ SUPPORTS_TEXT_BASED_API = YES;
+ TAPI_VERIFY_MODE = Pedantic;
+ };
+ name = Release;
+ };
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
+ F9BCC724205C68E800DD9AFC /* Build configuration list for PBXNativeTarget "objc-trampolines" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ F9BCC725205C68E800DD9AFC /* Debug */,
+ F9BCC726205C68E800DD9AFC /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
/* End XCConfigurationList section */
};
rootObject = 08FB7793FE84155DC02AAC07 /* Project object */;
#ifdef __arm__
#include <arm/arch.h>
+#include "objc-config.h"
+#include "isa.h"
#ifndef _ARM_ARCH_7
# error requires armv7
#endif
-// Define SUPPORT_INDEXED_ISA for targets which store the class in the ISA as
-// an index in to a class table.
-// Note, keep this in sync with objc-config.h.
-// FIXME: Remove this duplication. We should get this from objc-config.h.
-#if __ARM_ARCH_7K__ >= 2
-# define SUPPORT_INDEXED_ISA 1
-#else
-# define SUPPORT_INDEXED_ISA 0
-#endif
-
-// Note, keep these in sync with objc-private.h
-#define ISA_INDEX_IS_NPI 1
-#define ISA_INDEX_MASK 0x0001FFFC
-#define ISA_INDEX_SHIFT 2
-#define ISA_INDEX_BITS 15
-#define ISA_INDEX_COUNT (1 << ISA_INDEX_BITS)
-#define ISA_INDEX_MAGIC_MASK 0x001E0001
-#define ISA_INDEX_MAGIC_VALUE 0x001C0001
-
.syntax unified
#define MI_EXTERN(var) \
.long LExit_objc_msgLookupSuper2_stret
.long 0
-
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-7:
- .section __DATA,__objc_msg_break
- .long 7b
- .long ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-7:
- .section __DATA,__objc_msg_break
- .long 7b
- .long FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-7:
- .section __DATA,__objc_msg_break
- .long 7b
- .long SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-7:
- .section __DATA,__objc_msg_break
- .long 7b
- .long NIL_EXIT
- .text
-.endmacro
-
/********************************************************************
* Names for relative labels
* Reserved labels: 6: 7: 8: 9:
********************************************************************/
// 6: used by CacheLookup
-// 7: used by MI_GET_ADDRESS etc and MESSENGER_START etc
+// 7: used by MI_GET_ADDRESS etc
// 8: used by CacheLookup
#define LNilReceiver 9
#define LNilReceiver_f 9f
#define CACHE 8
#define CACHE_MASK 12
+/* Field offsets in method cache bucket */
+#define CACHED_SEL 0
+#define CACHED_IMP 4
+
/* Selected field offsets in method structure */
#define METHOD_NAME 0
#define METHOD_TYPES 4
and r12, r12, r1 // r12 = index = SEL & mask
.endif
add r9, r9, r12, LSL #3 // r9 = bucket = buckets+index*8
- ldr r12, [r9] // r12 = bucket->sel
+ ldr r12, [r9, #CACHED_SEL] // r12 = bucket->sel
6:
.if $0 == STRET
teq r12, r2
teq r12, r1
.endif
bne 8f
- ldr r12, [r9, #4] // r12 = bucket->imp
+ ldr r12, [r9, #CACHED_IMP] // r12 = bucket->imp
.if $0 == STRET
tst r12, r12 // set ne for stret forwarding
.endmacro
.macro CacheLookup2
-
+#if CACHED_SEL != 0
+# error this code requires that SEL be at offset 0
+#endif
8:
cmp r12, #1
blo 8f // if (bucket->sel == 0) cache miss
it eq // if (bucket->sel == 1) cache wrap
- ldreq r9, [r9, #4] // bucket->imp is before first bucket
+ ldreq r9, [r9, #CACHED_IMP] // bucket->imp is before first bucket
ldr r12, [r9, #8]! // r12 = (++bucket)->sel
b 6b
8:
// Note: We are doing a little wasted work here to load values we might not
// need. Branching turns out to be even worse when performance was measured.
MI_GET_ADDRESS(r12, _objc_indexed_classes)
- tst.w r9, #ISA_INDEX_IS_NPI
+ tst.w r9, #ISA_INDEX_IS_NPI_MASK
itt ne
ubfxne r9, r9, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS
ldrne.w r9, [r12, r9, lsl #2]
********************************************************************/
ENTRY _objc_msgSend
- MESSENGER_START
cbz r0, LNilReceiver_f
GetClassFromIsa // r9 = class
CacheLookup NORMAL
// cache hit, IMP in r12, eq already set for nonstret forwarding
- MESSENGER_END_FAST
bx r12 // call imp
CacheLookup2 NORMAL
// cache miss
ldr r9, [r0] // r9 = self->isa
GetClassFromIsa // r9 = class
- MESSENGER_END_SLOW
b __objc_msgSend_uncached
LNilReceiver:
mov r2, #0
mov r3, #0
FP_RETURN_ZERO
- MESSENGER_END_NIL
bx lr
END_ENTRY _objc_msgSend
********************************************************************/
ENTRY _objc_msgSend_stret
- MESSENGER_START
cbz r1, LNilReceiver_f
GetClassFromIsa // r9 = class
CacheLookup STRET
// cache hit, IMP in r12, ne already set for stret forwarding
- MESSENGER_END_FAST
bx r12
CacheLookup2 STRET
// cache miss
ldr r9, [r1] // r9 = self->isa
GetClassFromIsa // r9 = class
- MESSENGER_END_SLOW
b __objc_msgSend_stret_uncached
LNilReceiver:
- MESSENGER_END_NIL
bx lr
END_ENTRY _objc_msgSend_stret
********************************************************************/
ENTRY _objc_msgSendSuper
- MESSENGER_START
ldr r9, [r0, #CLASS] // r9 = struct super->class
CacheLookup NORMAL
// cache hit, IMP in r12, eq already set for nonstret forwarding
ldr r0, [r0, #RECEIVER] // load real receiver
- MESSENGER_END_FAST
bx r12 // call imp
CacheLookup2 NORMAL
// cache miss
ldr r9, [r0, #CLASS] // r9 = struct super->class
ldr r0, [r0, #RECEIVER] // load real receiver
- MESSENGER_END_SLOW
b __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper
********************************************************************/
ENTRY _objc_msgSendSuper2
- MESSENGER_START
ldr r9, [r0, #CLASS] // class = struct super->class
ldr r9, [r9, #SUPERCLASS] // class = class->superclass
CacheLookup NORMAL
// cache hit, IMP in r12, eq already set for nonstret forwarding
ldr r0, [r0, #RECEIVER] // load real receiver
- MESSENGER_END_FAST
bx r12 // call imp
CacheLookup2 NORMAL
ldr r9, [r0, #CLASS] // class = struct super->class
ldr r9, [r9, #SUPERCLASS] // class = class->superclass
ldr r0, [r0, #RECEIVER] // load real receiver
- MESSENGER_END_SLOW
b __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper2
********************************************************************/
ENTRY _objc_msgSendSuper_stret
- MESSENGER_START
ldr r9, [r1, #CLASS] // r9 = struct super->class
CacheLookup STRET
// cache hit, IMP in r12, ne already set for stret forwarding
ldr r1, [r1, #RECEIVER] // load real receiver
- MESSENGER_END_FAST
bx r12 // call imp
CacheLookup2 STRET
// cache miss
ldr r9, [r1, #CLASS] // r9 = struct super->class
ldr r1, [r1, #RECEIVER] // load real receiver
- MESSENGER_END_SLOW
b __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper_stret
********************************************************************/
ENTRY _objc_msgSendSuper2_stret
- MESSENGER_START
ldr r9, [r1, #CLASS] // class = struct super->class
ldr r9, [r9, #SUPERCLASS] // class = class->superclass
CacheLookup STRET
// cache hit, IMP in r12, ne already set for stret forwarding
ldr r1, [r1, #RECEIVER] // load real receiver
- MESSENGER_END_FAST
bx r12 // call imp
CacheLookup2 STRET
ldr r9, [r1, #CLASS] // class = struct super->class
ldr r9, [r9, #SUPERCLASS] // class = class->superclass
ldr r1, [r1, #RECEIVER] // load real receiver
- MESSENGER_END_SLOW
b __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper2_stret
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band Z is 0 (EQ) for normal, 1 (NE) for stret
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
-
beq __objc_msgForward
b __objc_msgForward_stret
********************************************************************/
#ifdef __arm64__
-
-#include <arm/arch.h>
+#include <arm/arch.h>
+#include "isa.h"
+#include "arm64-asm.h"
.data
.align 4
.private_extern _objc_entryPoints
_objc_entryPoints:
- .quad _cache_getImp
- .quad _objc_msgSend
- .quad _objc_msgSendSuper
- .quad _objc_msgSendSuper2
- .quad _objc_msgLookup
- .quad _objc_msgLookupSuper2
- .quad 0
+ PTR _cache_getImp
+ PTR _objc_msgSend
+ PTR _objc_msgSendSuper
+ PTR _objc_msgSendSuper2
+ PTR _objc_msgLookup
+ PTR _objc_msgLookupSuper2
+ PTR 0
.private_extern _objc_exitPoints
_objc_exitPoints:
- .quad LExit_cache_getImp
- .quad LExit_objc_msgSend
- .quad LExit_objc_msgSendSuper
- .quad LExit_objc_msgSendSuper2
- .quad LExit_objc_msgLookup
- .quad LExit_objc_msgLookupSuper2
- .quad 0
-
-
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad NIL_EXIT
- .text
-.endmacro
+ PTR LExit_cache_getImp
+ PTR LExit_objc_msgSend
+ PTR LExit_objc_msgSendSuper
+ PTR LExit_objc_msgSendSuper2
+ PTR LExit_objc_msgLookup
+ PTR LExit_objc_msgLookupSuper2
+ PTR 0
/* objc_super parameter to sendSuper */
#define RECEIVER 0
-#define CLASS 8
+#define CLASS __SIZEOF_POINTER__
/* Selected field offsets in class structure */
-#define SUPERCLASS 8
-#define CACHE 16
-
-/* Selected field offsets in isa field */
-#define ISA_MASK 0x0000000ffffffff8
+#define SUPERCLASS __SIZEOF_POINTER__
+#define CACHE (2 * __SIZEOF_POINTER__)
/* Selected field offsets in method structure */
#define METHOD_NAME 0
-#define METHOD_TYPES 8
-#define METHOD_IMP 16
+#define METHOD_TYPES __SIZEOF_POINTER__
+#define METHOD_IMP (2 * __SIZEOF_POINTER__)
+
+#define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
+
+
+/********************************************************************
+ * GetClassFromIsa_p16 src
+ * src is a raw isa field. Sets p16 to the corresponding class pointer.
+ * The raw isa might be an indexed isa to be decoded, or a
+ * packed isa that needs to be masked.
+ *
+ * On exit:
+ * $0 is unchanged
+ * p16 is a class pointer
+ * x10 is clobbered
+ ********************************************************************/
+
+#if SUPPORT_INDEXED_ISA
+ .align 3
+ .globl _objc_indexed_classes
+_objc_indexed_classes:
+ .fill ISA_INDEX_COUNT, PTRSIZE, 0
+#endif
+
+.macro GetClassFromIsa_p16 /* src */
+
+#if SUPPORT_INDEXED_ISA
+ // Indexed isa
+ mov p16, $0 // optimistically set dst = src
+ tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
+ // isa in p16 is indexed
+ adrp x10, _objc_indexed_classes@PAGE
+ add x10, x10, _objc_indexed_classes@PAGEOFF
+ ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
+ ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
+1:
+
+#elif __LP64__
+ // 64-bit packed isa
+ and p16, $0, #ISA_MASK
+
+#else
+ // 32-bit raw isa
+ mov p16, $0
+
+#endif
+
+.endmacro
/********************************************************************
********************************************************************/
.macro UNWIND
.section __LD,__compact_unwind,regular,debug
- .quad $0
+ PTR $0
.set LUnwind$0, LExit$0 - $0
.long LUnwind$0
.long $1
- .quad 0 /* no personality */
- .quad 0 /* no LSDA */
+ PTR 0 /* no personality */
+ PTR 0 /* no LSDA */
.text
.endmacro
#define GETIMP 1
#define LOOKUP 2
+// CacheHit: x17 = cached IMP, x12 = address of cached IMP
.macro CacheHit
.if $0 == NORMAL
- MESSENGER_END_FAST
- br x17 // call imp
+ TailCallCachedImp x17, x12 // authenticate and call imp
.elseif $0 == GETIMP
- mov x0, x17 // return imp
- ret
+ mov p0, p17
+ AuthAndResignAsIMP x0, x12 // authenticate imp and re-sign as IMP
+ ret // return IMP
.elseif $0 == LOOKUP
+ AuthAndResignAsIMP x17, x12 // authenticate imp and re-sign as IMP
ret // return imp via x17
.else
.abort oops
.macro CheckMiss
// miss if bucket->sel == 0
.if $0 == GETIMP
- cbz x9, LGetImpMiss
+ cbz p9, LGetImpMiss
.elseif $0 == NORMAL
- cbz x9, __objc_msgSend_uncached
+ cbz p9, __objc_msgSend_uncached
.elseif $0 == LOOKUP
- cbz x9, __objc_msgLookup_uncached
+ cbz p9, __objc_msgLookup_uncached
.else
.abort oops
.endif
.endmacro
.macro CacheLookup
- // x1 = SEL, x16 = isa
- ldp x10, x11, [x16, #CACHE] // x10 = buckets, x11 = occupied|mask
+ // p1 = SEL, p16 = isa
+ ldp p10, p11, [x16, #CACHE] // p10 = buckets, p11 = occupied|mask
+#if !__LP64__
+ and w11, w11, 0xffff // p11 = mask
+#endif
and w12, w1, w11 // x12 = _cmd & mask
- add x12, x10, x12, LSL #4 // x12 = buckets + ((_cmd & mask)<<4)
+ add p12, p10, p12, LSL #(1+PTRSHIFT)
+ // p12 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
- ldp x9, x17, [x12] // {x9, x17} = *bucket
-1: cmp x9, x1 // if (bucket->sel != _cmd)
+ ldp p17, p9, [x12] // {imp, sel} = *bucket
+1: cmp p9, p1 // if (bucket->sel != _cmd)
b.ne 2f // scan more
CacheHit $0 // call or return imp
-2: // not hit: x12 = not-hit bucket
+2: // not hit: p12 = not-hit bucket
CheckMiss $0 // miss if bucket->sel == 0
- cmp x12, x10 // wrap if bucket == buckets
+ cmp p12, p10 // wrap if bucket == buckets
b.eq 3f
- ldp x9, x17, [x12, #-16]! // {x9, x17} = *--bucket
+ ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
b 1b // loop
-3: // wrap: x12 = first bucket, w11 = mask
- add x12, x12, w11, UXTW #4 // x12 = buckets+(mask<<4)
+3: // wrap: p12 = first bucket, w11 = mask
+ add p12, p12, w11, UXTW #(1+PTRSHIFT)
+ // p12 = buckets + (mask << 1+PTRSHIFT)
// Clone scanning loop to miss instead of hang when cache is corrupt.
// The slow path may detect any corruption and halt later.
- ldp x9, x17, [x12] // {x9, x17} = *bucket
-1: cmp x9, x1 // if (bucket->sel != _cmd)
+ ldp p17, p9, [x12] // {imp, sel} = *bucket
+1: cmp p9, p1 // if (bucket->sel != _cmd)
b.ne 2f // scan more
CacheHit $0 // call or return imp
-2: // not hit: x12 = not-hit bucket
+2: // not hit: p12 = not-hit bucket
CheckMiss $0 // miss if bucket->sel == 0
- cmp x12, x10 // wrap if bucket == buckets
+ cmp p12, p10 // wrap if bucket == buckets
b.eq 3f
- ldp x9, x17, [x12, #-16]! // {x9, x17} = *--bucket
+ ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
b 1b // loop
3: // double wrap
*
********************************************************************/
+#if SUPPORT_TAGGED_POINTERS
.data
.align 3
.globl _objc_debug_taggedpointer_classes
.globl _objc_debug_taggedpointer_ext_classes
_objc_debug_taggedpointer_ext_classes:
.fill 256, 8, 0
+#endif
ENTRY _objc_msgSend
UNWIND _objc_msgSend, NoFrame
- MESSENGER_START
- cmp x0, #0 // nil check and tagged pointer check
+ cmp p0, #0 // nil check and tagged pointer check
+#if SUPPORT_TAGGED_POINTERS
b.le LNilOrTagged // (MSB tagged pointer looks negative)
- ldr x13, [x0] // x13 = isa
- and x16, x13, #ISA_MASK // x16 = class
+#else
+ b.eq LReturnZero
+#endif
+ ldr p13, [x0] // p13 = isa
+ GetClassFromIsa_p16 p13 // p16 = class
LGetIsaDone:
CacheLookup NORMAL // calls imp or objc_msgSend_uncached
+#if SUPPORT_TAGGED_POINTERS
LNilOrTagged:
b.eq LReturnZero // nil check
// tagged
- mov x10, #0xf000000000000000
- cmp x0, x10
- b.hs LExtTag
adrp x10, _objc_debug_taggedpointer_classes@PAGE
add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
ubfx x11, x0, #60, #4
ldr x16, [x10, x11, LSL #3]
- b LGetIsaDone
+ adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
+ add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
+ cmp x10, x16
+ b.ne LGetIsaDone
-LExtTag:
// ext tagged
adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
ubfx x11, x0, #52, #8
ldr x16, [x10, x11, LSL #3]
b LGetIsaDone
-
+// SUPPORT_TAGGED_POINTERS
+#endif
+
LReturnZero:
// x0 is already zero
mov x1, #0
movi d1, #0
movi d2, #0
movi d3, #0
- MESSENGER_END_NIL
ret
END_ENTRY _objc_msgSend
ENTRY _objc_msgLookup
UNWIND _objc_msgLookup, NoFrame
-
- cmp x0, #0 // nil check and tagged pointer check
+ cmp p0, #0 // nil check and tagged pointer check
+#if SUPPORT_TAGGED_POINTERS
b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
- ldr x13, [x0] // x13 = isa
- and x16, x13, #ISA_MASK // x16 = class
+#else
+ b.eq LLookup_Nil
+#endif
+ ldr p13, [x0] // p13 = isa
+ GetClassFromIsa_p16 p13 // p16 = class
LLookup_GetIsaDone:
CacheLookup LOOKUP // returns imp
+#if SUPPORT_TAGGED_POINTERS
LLookup_NilOrTagged:
b.eq LLookup_Nil // nil check
ubfx x11, x0, #52, #8
ldr x16, [x10, x11, LSL #3]
b LLookup_GetIsaDone
+// SUPPORT_TAGGED_POINTERS
+#endif
LLookup_Nil:
adrp x17, __objc_msgNil@PAGE
ENTRY _objc_msgSendSuper
UNWIND _objc_msgSendSuper, NoFrame
- MESSENGER_START
- ldp x0, x16, [x0] // x0 = real receiver, x16 = class
+ ldp p0, p16, [x0] // p0 = real receiver, p16 = class
CacheLookup NORMAL // calls imp or objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper
ENTRY _objc_msgSendSuper2
UNWIND _objc_msgSendSuper2, NoFrame
- MESSENGER_START
- ldp x0, x16, [x0] // x0 = real receiver, x16 = class
- ldr x16, [x16, #SUPERCLASS] // x16 = class->superclass
+ ldp p0, p16, [x0] // p0 = real receiver, p16 = class
+ ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
CacheLookup NORMAL
END_ENTRY _objc_msgSendSuper2
ENTRY _objc_msgLookupSuper2
UNWIND _objc_msgLookupSuper2, NoFrame
- ldp x0, x16, [x0] // x0 = real receiver, x16 = class
- ldr x16, [x16, #SUPERCLASS] // x16 = class->superclass
+ ldp p0, p16, [x0] // p0 = real receiver, p16 = class
+ ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
CacheLookup LOOKUP
END_ENTRY _objc_msgLookupSuper2
.macro MethodTableLookup
// push frame
+ SignLR
stp fp, lr, [sp, #-16]!
mov fp, sp
mov x2, x16
bl __class_lookupMethodAndLoadCache3
- // imp in x0
+ // IMP in x0
mov x17, x0
// restore registers and return
mov sp, fp
ldp fp, lr, [sp], #16
+ AuthenticateLR
.endmacro
UNWIND __objc_msgSend_uncached, FrameWithNoSaves
// THIS IS NOT A CALLABLE C FUNCTION
- // Out-of-band x16 is the class to search
+ // Out-of-band p16 is the class to search
MethodTableLookup
- br x17
+ TailCallFunctionPointer x17
END_ENTRY __objc_msgSend_uncached
UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
// THIS IS NOT A CALLABLE C FUNCTION
- // Out-of-band x16 is the class to search
+ // Out-of-band p16 is the class to search
MethodTableLookup
ret
STATIC_ENTRY _cache_getImp
- and x16, x0, #ISA_MASK
+ GetClassFromIsa_p16 p0
CacheLookup GETIMP
LGetImpMiss:
- mov x0, #0
+ mov p0, #0
ret
END_ENTRY _cache_getImp
STATIC_ENTRY __objc_msgForward_impcache
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
-
// No stret specialization.
b __objc_msgForward
ENTRY __objc_msgForward
adrp x17, __objc_forward_handler@PAGE
- ldr x17, [x17, __objc_forward_handler@PAGEOFF]
- br x17
+ ldr p17, [x17, __objc_forward_handler@PAGEOFF]
+ TailCallFunctionPointer x17
END_ENTRY __objc_msgForward
ENTRY _method_invoke
// x1 is method triplet instead of SEL
- ldr x17, [x1, #METHOD_IMP]
- ldr x1, [x1, #METHOD_NAME]
- br x17
+ add p16, p1, #METHOD_IMP
+ ldr p17, [x16]
+ ldr p1, [x1, #METHOD_NAME]
+ TailCallMethodListImp x17, x16
END_ENTRY _method_invoke
#endif
.long 0
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long NIL_EXIT
- .text
-.endmacro
-
-
/********************************************************************
*
* Common offsets.
/////////////////////////////////////////////////////////////////////
.macro MethodTableLookup
- MESSENGER_END_SLOW
-
// stack has return address and nothing else
subl $$(12+5*16), %esp
********************************************************************/
ENTRY _objc_msgSend
- MESSENGER_START
CALL_MCOUNTER
// load receiver and selector
movl isa(%eax), %edx // class = self->isa
CacheLookup WORD_RETURN, MSG_SEND, LMsgSendCacheMiss
xor %edx, %edx // set nonstret for msgForward_internal
- MESSENGER_END_FAST
jmp *%eax
// cache miss: go search the method lists
movl $0,%edx
xorps %xmm0, %xmm0
LMsgSendDone:
- MESSENGER_END_NIL
ret
// guaranteed non-nil entry point (disabled for now)
********************************************************************/
ENTRY _objc_msgSendSuper
- MESSENGER_START
CALL_MCOUNTER
// load selector and class to search
// search the cache (class in %edx)
CacheLookup WORD_RETURN, MSG_SENDSUPER, LMsgSendSuperCacheMiss
xor %edx, %edx // set nonstret for msgForward_internal
- MESSENGER_END_FAST
jmp *%eax // goto *imp
// cache miss: go search the method lists
LMsgSendSuperIgnored:
movl super(%esp), %eax
movl receiver(%eax), %eax
- MESSENGER_END_NIL
ret
LMsgSendSuperExit:
********************************************************************/
ENTRY _objc_msgSend_fpret
- MESSENGER_START
CALL_MCOUNTER
// load receiver and selector
movl isa(%eax), %edx // class = self->isa
CacheLookup WORD_RETURN, MSG_SEND, LMsgSendFpretCacheMiss
xor %edx, %edx // set nonstret for msgForward_internal
- MESSENGER_END_FAST
jmp *%eax // goto *imp
// cache miss: go search the method lists
// %eax is already zero
fldz
LMsgSendFpretDone:
- MESSENGER_END_NIL
ret
LMsgSendFpretExit:
********************************************************************/
ENTRY _objc_msgSend_stret
- MESSENGER_START
CALL_MCOUNTER
// load receiver and selector
movl isa(%eax), %edx // class = self->isa
CacheLookup STRUCT_RETURN, MSG_SEND, LMsgSendStretCacheMiss
movl $1, %edx // set stret for objc_msgForward
- MESSENGER_END_FAST
jmp *%eax // goto *imp
// cache miss: go search the method lists
// message sent to nil: redirect to nil receiver, if any
LMsgSendStretNilSelf:
- MESSENGER_END_NIL
ret $4 // pop struct return address (#2995932)
// guaranteed non-nil entry point (disabled for now)
********************************************************************/
ENTRY _objc_msgSendSuper_stret
- MESSENGER_START
CALL_MCOUNTER
// load selector and class to search
// search the cache (class in %edx)
CacheLookup STRUCT_RETURN, MSG_SENDSUPER, LMsgSendSuperStretCacheMiss
movl $1, %edx // set stret for objc_msgForward
- MESSENGER_END_FAST
jmp *%eax // goto *imp
// cache miss: go search the method lists
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band register %edx is nonzero for stret, zero otherwise
-
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
// Check return type (stret or not)
testl %edx, %edx
.long 0
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-4:
- .section __DATA,__objc_msg_break
- .long 4b
- .long NIL_EXIT
- .text
-.endmacro
-
-
/********************************************************************
* Names for relative labels
* DO NOT USE THESE LABELS ELSEWHERE
// Selected field offsets in class structure
#define isa 0
#define superclass 4
+#define cache_buckets 8
+#define cache_mask 12
+
+// Method cache
+#define cached_sel 0
+#define cached_imp 4
// Method descriptor
#define method_name 0
// eax = found bucket
.if $1 == GETIMP
- movl 4(%eax), %eax // return imp
+ movl cached_imp(%eax), %eax // return imp
ret
.else
.endif
.if $1 == CALL
- MESSENGER_END_FAST
- jmp *4(%eax) // call imp
+ jmp *cached_imp(%eax) // call imp
.elseif $1 == LOOKUP
- movl 4(%eax), %eax // return imp
+ movl cached_imp(%eax), %eax // return imp
ret
.else
.macro CacheLookup
- movzwl 12(%edx), %eax // eax = mask
+ movzwl cache_mask(%edx), %eax // eax = mask
andl %ecx, %eax // eax = SEL & mask
shll $$3, %eax // eax = offset = (SEL & mask) * 8
- addl 8(%edx), %eax // eax = bucket = cache->buckets+offset
- cmpl (%eax), %ecx // if (bucket->sel != SEL)
+ addl cache_buckets(%edx), %eax // eax = bucket = buckets+offset
+ cmpl cached_sel(%eax), %ecx // if (bucket->sel != SEL)
jne 1f // scan more
// The `jne` above sets flags for CacheHit
CacheHit $0, $1 // call or return imp
1:
// loop
- cmpl $$1, (%eax)
+ cmpl $$1, cached_sel(%eax)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addl $$8, %eax // bucket++
2:
- cmpl (%eax), %ecx // if (bucket->sel != sel)
+ cmpl cached_sel(%eax), %ecx // if (bucket->sel != sel)
jne 1b // scan more
// The `jne` above sets flags for CacheHit
CacheHit $0, $1 // call or return imp
// wrap or miss
jb LCacheMiss_f // if (bucket->sel < 1) cache miss
// wrap
- movl 4(%eax), %eax // bucket->imp is really first bucket
+ movl cached_imp(%eax), %eax // bucket->imp is really first bucket
jmp 2f
// Clone scanning loop to miss instead of hang when cache is corrupt.
1:
// loop
- cmpq $$1, (%eax)
+ cmpl $$1, cached_sel(%eax)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addl $$8, %eax // bucket++
2:
- cmpl (%eax), %ecx // if (bucket->sel != sel)
+ cmpl cached_sel(%eax), %ecx // if (bucket->sel != sel)
jne 1b // scan more
// The `jne` above sets flags for CacheHit
CacheHit $0, $1 // call or return imp
.if $0 == NORMAL
ZeroReturn
- MESSENGER_END_NIL
ret
.elseif $0 == FPRET
ZeroReturnFPRET
- MESSENGER_END_NIL
ret
.elseif $0 == STRET
ZeroReturnSTRET
- MESSENGER_END_NIL
ret $$4
.else
.abort oops
ENTRY _objc_msgSend
UNWIND _objc_msgSend, NoFrame
- MESSENGER_START
movl selector(%esp), %ecx
movl self(%esp), %eax
LCacheMiss:
// isa still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend
/********************************************************************
*
* id objc_msgSendSuper(struct objc_super *super, SEL _cmd, ...);
- * IMP objc_msgLookupSuper(struct objc_super *super, SEL _cmd, ...);
*
********************************************************************/
ENTRY _objc_msgSendSuper
UNWIND _objc_msgSendSuper, NoFrame
- MESSENGER_START
movl selector(%esp), %ecx
movl super(%esp), %eax // struct objc_super
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper
-
- ENTRY _objc_msgLookupSuper
- UNWIND _objc_msgLookupSuper, NoFrame
-
- movl selector(%esp), %ecx
- movl super(%esp), %eax // struct objc_super
- movl class(%eax), %edx // struct objc_super->class
- movl receiver(%eax), %eax // struct objc_super->receiver
- movl %eax, super(%esp) // replace super arg with receiver
- CacheLookup NORMAL, LOOKUP // returns IMP on success
-
-LCacheMiss:
- // class still in edx
- jmp __objc_msgLookup_uncached
-
- END_ENTRY _objc_msgLookupSuper
-
-
/********************************************************************
*
* id objc_msgSendSuper2(struct objc_super *super, SEL _cmd, ...);
ENTRY _objc_msgSendSuper2
UNWIND _objc_msgSendSuper2, NoFrame
- MESSENGER_START
movl selector(%esp), %ecx
movl super(%esp), %eax // struct objc_super
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper2
ENTRY _objc_msgSend_fpret
UNWIND _objc_msgSend_fpret, NoFrame
- MESSENGER_START
movl selector(%esp), %ecx
movl self(%esp), %eax
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend_fpret
ENTRY _objc_msgSend_stret
UNWIND _objc_msgSend_stret, NoFrame
- MESSENGER_START
movl selector_stret(%esp), %ecx
movl self_stret(%esp), %eax
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSend_stret
/********************************************************************
*
* void objc_msgSendSuper_stret(void *st_addr, struct objc_super *super, SEL _cmd, ...);
- * IMP objc_msgLookupSuper_stret(void *st_addr, struct objc_super *super, SEL _cmd, ...);
*
********************************************************************/
ENTRY _objc_msgSendSuper_stret
UNWIND _objc_msgSendSuper_stret, NoFrame
- MESSENGER_START
movl selector_stret(%esp), %ecx
movl super_stret(%esp), %eax // struct objc_super
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper_stret
- ENTRY _objc_msgLookupSuper_stret
- UNWIND _objc_msgLookupSuper_stret, NoFrame
-
- movl selector_stret(%esp), %ecx
- movl super_stret(%esp), %eax // struct objc_super
- movl class(%eax), %edx // struct objc_super->class
- movl receiver(%eax), %eax // struct objc_super->receiver
- movl %eax, super_stret(%esp) // replace super arg with receiver
- CacheLookup STRET, LOOKUP // returns IMP on success
-
-LCacheMiss:
- // class still in edx
- jmp __objc_msgLookup_stret_uncached
-
- END_ENTRY _objc_msgLookupSuper_stret
-
-
/********************************************************************
*
* void objc_msgSendSuper2_stret(void *st_addr, struct objc_super *super, SEL _cmd, ...);
ENTRY _objc_msgSendSuper2_stret
UNWIND _objc_msgSendSuper2_stret, NoFrame
- MESSENGER_START
movl selector_stret(%esp), %ecx
movl super_stret(%esp), %eax // struct objc_super
// cache miss: go search the method lists
LCacheMiss:
// class still in edx
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper2_stret
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band condition register is NE for stret, EQ otherwise.
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
-
jne __objc_msgForward_stret
jmp __objc_msgForward
*/
#include <TargetConditionals.h>
-#if __x86_64__ && TARGET_OS_SIMULATOR
+#if __x86_64__ && TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC
/********************************************************************
********************************************************************
.quad 0
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad NIL_EXIT
- .text
-.endmacro
-
-
/********************************************************************
* Recommended multi-byte NOP instructions
* (Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2B)
#define method_name 0
#define method_imp 16
+// Method cache
+#define cached_sel 0
+#define cached_imp 8
+
//////////////////////////////////////////////////////////////////////
//
// r11 = found bucket
.if $1 == GETIMP
- movq 8(%r11), %rax // return imp
+ movq cached_imp(%r11), %rax // return imp
ret
.else
.endif
.if $1 == CALL
- MESSENGER_END_FAST
- jmp *8(%r11) // call imp
+ jmp *cached_imp(%r11) // call imp
.elseif $1 == LOOKUP
- movq 8(%r11), %r11 // return imp
+ movq cached_imp(%r11), %r11 // return imp
ret
.else
addq 16(%r10), %r11 // r11 = class->cache.buckets + offset
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1f // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
1:
// loop
- cmpq $$1, (%r11)
+ cmpq $$1, cached_sel(%r11)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addq $$16, %r11 // bucket++
2:
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1b // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
// wrap or miss
jb LCacheMiss_f // if (bucket->sel < 1) cache miss
// wrap
- movq 8(%r11), %r11 // bucket->imp is really first bucket
+ movq cached_imp(%r11), %r11 // bucket->imp is really first bucket
jmp 2f
// Clone scanning loop to miss instead of hang when cache is corrupt.
1:
// loop
- cmpq $$1, (%r11)
+ cmpq $$1, cached_sel(%r11)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addq $$16, %r11 // bucket++
2:
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1b // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
.else
movq %a2, %r11
.endif
- shrq $$60, %r11
- cmpl $$0xf, %r11d
- je 1f
// basic tagged
+ shrq $$60, %r11
leaq _objc_debug_taggedpointer_classes(%rip), %r10
movq (%r10, %r11, 8), %r10 // read isa from table
- jmp LGetIsaDone_b
-1:
+ leaq _OBJC_CLASS_$___NSUnrecognizedTaggedPointer(%rip), %r11
+ cmp %r10, %r11
+ jne LGetIsaDone_b
// ext tagged
.if $0 != STRET
movq %a1, %r11
.else
.abort oops
.endif
- MESSENGER_END_NIL
ret
.endmacro
ENTRY _objc_msgSend
UNWIND _objc_msgSend, NoFrame
- MESSENGER_START
GetIsaCheckNil NORMAL // r10 = self->isa, or return zero
CacheLookup NORMAL, CALL // calls IMP on success
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend
ENTRY _objc_msgSendSuper
UNWIND _objc_msgSendSuper, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a1)
movq class(%a1), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// class still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper
ENTRY _objc_msgSendSuper2
UNWIND _objc_msgSendSuper2, NoFrame
- MESSENGER_START
// objc_super->class is superclass of class to search
// cache miss: go search the method lists
LCacheMiss:
// superclass still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper2
ENTRY _objc_msgSend_fpret
UNWIND _objc_msgSend_fpret, NoFrame
- MESSENGER_START
GetIsaCheckNil FPRET // r10 = self->isa, or return zero
CacheLookup FPRET, CALL // calls IMP on success
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend_fpret
ENTRY _objc_msgSend_fp2ret
UNWIND _objc_msgSend_fp2ret, NoFrame
- MESSENGER_START
GetIsaCheckNil FP2RET // r10 = self->isa, or return zero
CacheLookup FP2RET, CALL // calls IMP on success
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend_fp2ret
ENTRY _objc_msgSend_stret
UNWIND _objc_msgSend_stret, NoFrame
- MESSENGER_START
GetIsaCheckNil STRET // r10 = self->isa, or return zero
CacheLookup STRET, CALL // calls IMP on success
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSend_stret
ENTRY _objc_msgSendSuper_stret
UNWIND _objc_msgSendSuper_stret, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a2)
movq class(%a2), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// class still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper_stret
ENTRY _objc_msgSendSuper2_stret
UNWIND _objc_msgSendSuper2_stret, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a2)
movq class(%a2), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// superclass still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper2_stret
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band condition register is NE for stret, EQ otherwise.
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
-
jne __objc_msgForward_stret
jmp __objc_msgForward
*/
#include <TargetConditionals.h>
-#if __x86_64__ && !TARGET_OS_SIMULATOR
+#if __x86_64__ && !(TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC)
+
+#include "isa.h"
/********************************************************************
********************************************************************
.quad 0
-/********************************************************************
-* List every exit insn from every messenger for debugger use.
-* Format:
-* (
-* 1 word instruction's address
-* 1 word type (ENTER or FAST_EXIT or SLOW_EXIT or NIL_EXIT)
-* )
-* 1 word zero
-*
-* ENTER is the start of a dispatcher
-* FAST_EXIT is method dispatch
-* SLOW_EXIT is uncached method lookup
-* NIL_EXIT is returning zero from a message sent to nil
-* These must match objc-gdb.h.
-********************************************************************/
-
-#define ENTER 1
-#define FAST_EXIT 2
-#define SLOW_EXIT 3
-#define NIL_EXIT 4
-
-.section __DATA,__objc_msg_break
-.globl _gdb_objc_messenger_breakpoints
-_gdb_objc_messenger_breakpoints:
-// contents populated by the macros below
-
-.macro MESSENGER_START
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad ENTER
- .text
-.endmacro
-.macro MESSENGER_END_FAST
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad FAST_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_SLOW
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad SLOW_EXIT
- .text
-.endmacro
-.macro MESSENGER_END_NIL
-4:
- .section __DATA,__objc_msg_break
- .quad 4b
- .quad NIL_EXIT
- .text
-.endmacro
-
-
/********************************************************************
* Recommended multi-byte NOP instructions
* (Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2B)
#define method_name 0
#define method_imp 16
+// Method cache
+#define cached_sel 0
+#define cached_imp 8
+
//////////////////////////////////////////////////////////////////////
//
// r11 = found bucket
.if $1 == GETIMP
- movq 8(%r11), %rax // return imp
+ movq cached_imp(%r11), %rax // return imp
ret
.else
.endif
.if $1 == CALL
- MESSENGER_END_FAST
- jmp *8(%r11) // call imp
+ jmp *cached_imp(%r11) // call imp
.elseif $1 == LOOKUP
- movq 8(%r11), %r11 // return imp
+ movq cached_imp(%r11), %r11 // return imp
ret
.else
addq 16(%r10), %r11 // r11 = class->cache.buckets + offset
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1f // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
1:
// loop
- cmpq $$1, (%r11)
+ cmpq $$1, cached_sel(%r11)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addq $$16, %r11 // bucket++
2:
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1b // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
// wrap or miss
jb LCacheMiss_f // if (bucket->sel < 1) cache miss
// wrap
- movq 8(%r11), %r11 // bucket->imp is really first bucket
+ movq cached_imp(%r11), %r11 // bucket->imp is really first bucket
jmp 2f
// Clone scanning loop to miss instead of hang when cache is corrupt.
1:
// loop
- cmpq $$1, (%r11)
+ cmpq $$1, cached_sel(%r11)
jbe 3f // if (bucket->sel <= 1) wrap or miss
addq $$16, %r11 // bucket++
2:
.if $0 != STRET
- cmpq (%r11), %a2 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a2 // if (bucket->sel != _cmd)
.else
- cmpq (%r11), %a3 // if (bucket->sel != _cmd)
+ cmpq cached_sel(%r11), %a3 // if (bucket->sel != _cmd)
.endif
jne 1b // scan more
// CacheHit must always be preceded by a not-taken `jne` instruction
testb $$1, %a1b
PN
jnz LGetIsaSlow_f
- movq $$0x00007ffffffffff8, %r10
+ movq $$ ISA_MASK, %r10
andq (%a1), %r10
.else
testb $$1, %a2b
PN
jnz LGetIsaSlow_f
- movq $$0x00007ffffffffff8, %r10
+ movq $$ ISA_MASK, %r10
andq (%a2), %r10
.endif
LGetIsaDone:
movl %a2d, %r11d
.endif
andl $$0xF, %r11d
- cmp $$0xF, %r11d
- je 1f
// basic tagged
leaq _objc_debug_taggedpointer_classes(%rip), %r10
movq (%r10, %r11, 8), %r10 // read isa from table
- jmp LGetIsaDone_b
-1:
+ leaq _OBJC_CLASS_$___NSUnrecognizedTaggedPointer(%rip), %r11
+ cmp %r10, %r11
+ jne LGetIsaDone_b
// extended tagged
.if $0 != STRET
movl %a1d, %r11d
.else
.abort oops
.endif
- MESSENGER_END_NIL
ret
.endmacro
ENTRY _objc_msgSend
UNWIND _objc_msgSend, NoFrame
- MESSENGER_START
NilTest NORMAL
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend
ENTRY _objc_msgSendSuper
UNWIND _objc_msgSendSuper, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a1)
movq class(%a1), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// class still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper
ENTRY _objc_msgSendSuper2
UNWIND _objc_msgSendSuper2, NoFrame
- MESSENGER_START
// objc_super->class is superclass of class to search
// cache miss: go search the method lists
LCacheMiss:
// superclass still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSendSuper2
ENTRY _objc_msgSend_fpret
UNWIND _objc_msgSend_fpret, NoFrame
- MESSENGER_START
NilTest FPRET
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend_fpret
ENTRY _objc_msgSend_fp2ret
UNWIND _objc_msgSend_fp2ret, NoFrame
- MESSENGER_START
NilTest FP2RET
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_uncached
END_ENTRY _objc_msgSend_fp2ret
ENTRY _objc_msgSend_stret
UNWIND _objc_msgSend_stret, NoFrame
- MESSENGER_START
NilTest STRET
// cache miss: go search the method lists
LCacheMiss:
// isa still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSend_stret
ENTRY _objc_msgSendSuper_stret
UNWIND _objc_msgSendSuper_stret, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a2)
movq class(%a2), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// class still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper_stret
ENTRY _objc_msgSendSuper2_stret
UNWIND _objc_msgSendSuper2_stret, NoFrame
- MESSENGER_START
// search the cache (objc_super in %a2)
movq class(%a2), %r10 // class = objc_super->class
// cache miss: go search the method lists
LCacheMiss:
// superclass still in r10
- MESSENGER_END_SLOW
jmp __objc_msgSend_stret_uncached
END_ENTRY _objc_msgSendSuper2_stret
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band condition register is NE for stret, EQ otherwise.
-
- MESSENGER_START
- nop
- MESSENGER_END_SLOW
jne __objc_msgForward_stret
jmp __objc_msgForward
#include <TargetConditionals.h>
#include <objc/objc.h>
-#if __LP64__ || (TARGET_OS_EMBEDDED && !TARGET_OS_IPHONE) || TARGET_OS_WIN32 || NS_BUILD_32_LIKE_64
+#if __LP64__ || TARGET_OS_WIN32 || NS_BUILD_32_LIKE_64
typedef long NSInteger;
typedef unsigned long NSUInteger;
#else
@end
-#if TARGET_OS_MAC
-
-// NSObject used to be in Foundation/CoreFoundation.
-
-#define SYMBOL_ELSEWHERE_IN_3(sym, vers, n) \
- OBJC_EXPORT const char elsewhere_ ##n __asm__("$ld$hide$os" #vers "$" #sym); const char elsewhere_ ##n = 0
-#define SYMBOL_ELSEWHERE_IN_2(sym, vers, n) \
- SYMBOL_ELSEWHERE_IN_3(sym, vers, n)
-#define SYMBOL_ELSEWHERE_IN(sym, vers) \
- SYMBOL_ELSEWHERE_IN_2(sym, vers, __COUNTER__)
-
-#if __OBJC2__
-# define NSOBJECT_ELSEWHERE_IN(vers) \
- SYMBOL_ELSEWHERE_IN(_OBJC_CLASS_$_NSObject, vers); \
- SYMBOL_ELSEWHERE_IN(_OBJC_METACLASS_$_NSObject, vers); \
- SYMBOL_ELSEWHERE_IN(_OBJC_IVAR_$_NSObject.isa, vers)
-#else
-# define NSOBJECT_ELSEWHERE_IN(vers) \
- SYMBOL_ELSEWHERE_IN(.objc_class_name_NSObject, vers)
-#endif
-
-#if TARGET_OS_IOS
- NSOBJECT_ELSEWHERE_IN(5.1);
- NSOBJECT_ELSEWHERE_IN(5.0);
- NSOBJECT_ELSEWHERE_IN(4.3);
- NSOBJECT_ELSEWHERE_IN(4.2);
- NSOBJECT_ELSEWHERE_IN(4.1);
- NSOBJECT_ELSEWHERE_IN(4.0);
- NSOBJECT_ELSEWHERE_IN(3.2);
- NSOBJECT_ELSEWHERE_IN(3.1);
- NSOBJECT_ELSEWHERE_IN(3.0);
- NSOBJECT_ELSEWHERE_IN(2.2);
- NSOBJECT_ELSEWHERE_IN(2.1);
- NSOBJECT_ELSEWHERE_IN(2.0);
-#elif TARGET_OS_OSX
- NSOBJECT_ELSEWHERE_IN(10.7);
- NSOBJECT_ELSEWHERE_IN(10.6);
- NSOBJECT_ELSEWHERE_IN(10.5);
- NSOBJECT_ELSEWHERE_IN(10.4);
- NSOBJECT_ELSEWHERE_IN(10.3);
- NSOBJECT_ELSEWHERE_IN(10.2);
- NSOBJECT_ELSEWHERE_IN(10.1);
- NSOBJECT_ELSEWHERE_IN(10.0);
-#else
- // NSObject has always been in libobjc on these platforms.
-#endif
-
-// TARGET_OS_MAC
-#endif
-
-
/***********************************************************************
* Weak ivar support
**********************************************************************/
#if __OBJC2__
-__OSX_AVAILABLE(10.0)
-__IOS_UNAVAILABLE __TVOS_UNAVAILABLE
-__WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE
-OBJC_ROOT_CLASS
-@interface Object {
- Class isa;
-}
-@end
-
@implementation Object
+ (id)initialize
+++ /dev/null
-#if __arm__
-
-#include <arm/arch.h>
-#include <mach/vm_param.h>
-
-.syntax unified
-
-.text
-
- .private_extern __a1a2_tramphead
- .private_extern __a1a2_firsttramp
- .private_extern __a1a2_trampend
-
-// Trampoline machinery assumes the trampolines are Thumb function pointers
-#if !__thumb2__
-# error sorry
-#endif
-
-.thumb
-.thumb_func __a1a2_tramphead
-.thumb_func __a1a2_firsttramp
-.thumb_func __a1a2_trampend
-
-.align PAGE_MAX_SHIFT
-__a1a2_tramphead:
- /*
- r0 == self
- r12 == pc of trampoline's first instruction + PC bias
- lr == original return address
- */
-
- mov r1, r0 // _cmd = self
-
- // Trampoline's data is one page before the trampoline text.
- // Also correct PC bias of 4 bytes.
- sub r12, #PAGE_MAX_SIZE
- ldr r0, [r12, #-4] // self = block object
- ldr pc, [r0, #12] // tail call block->invoke
- // not reached
-
- // Align trampolines to 8 bytes
-.align 3
-
-.macro TrampolineEntry
- mov r12, pc
- b __a1a2_tramphead
-.align 3
-.endmacro
-
-.macro TrampolineEntryX16
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-.endmacro
-
-.macro TrampolineEntryX256
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-.endmacro
-
-.private_extern __a1a2_firsttramp
-__a1a2_firsttramp:
- // 2048-2 trampolines to fill 16K page
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- // TrampolineEntry
- // TrampolineEntry
-
-.private_extern __a1a2_trampend
-__a1a2_trampend:
-
-#endif
+++ /dev/null
-#if __arm64__
-
-#include <mach/vm_param.h>
-
-.text
-
- .private_extern __a1a2_tramphead
- .private_extern __a1a2_firsttramp
- .private_extern __a1a2_trampend
-
-.align PAGE_MAX_SHIFT
-__a1a2_tramphead:
-L_a1a2_tramphead:
- /*
- x0 == self
- x17 == address of called trampoline's data (1 page before its code)
- lr == original return address
- */
-
- mov x1, x0 // _cmd = self
- ldr x0, [x17] // self = block object
- ldr x16, [x0, #16] // tail call block->invoke
- br x16
-
- // pad up to TrampolineBlockPagePair header size
- nop
- nop
-
-.macro TrampolineEntry
- // load address of trampoline data (one page before this instruction)
- adr x17, -PAGE_MAX_SIZE
- b L_a1a2_tramphead
-.endmacro
-
-.macro TrampolineEntryX16
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-.endmacro
-
-.macro TrampolineEntryX256
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-.endmacro
-
-.align 3
-.private_extern __a1a2_firsttramp
-__a1a2_firsttramp:
- // 2048-3 trampolines to fill 16K page
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- // TrampolineEntry
- // TrampolineEntry
- // TrampolineEntry
-
-.private_extern __a1a2_trampend
-__a1a2_trampend:
-
-#endif
+++ /dev/null
-/*
- * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
-
-#ifdef __i386__
-
-#include <mach/vm_param.h>
-
-.text
- .private_extern __a1a2_tramphead
- .private_extern __a1a2_firsttramp
- .private_extern __a1a2_nexttramp
- .private_extern __a1a2_trampend
-
-.align PAGE_SHIFT
-__a1a2_tramphead:
- popl %eax
- andl $0xFFFFFFF8, %eax
- subl $ PAGE_SIZE, %eax
- movl 4(%esp), %ecx // self -> ecx
- movl %ecx, 8(%esp) // ecx -> _cmd
- movl (%eax), %ecx // blockPtr -> ecx
- movl %ecx, 4(%esp) // ecx -> self
- jmp *12(%ecx) // tail to block->invoke
-
-.macro TrampolineEntry
- call __a1a2_tramphead
- nop
- nop
- nop
-.endmacro
-
-.align 5
-__a1a2_firsttramp:
- TrampolineEntry
-__a1a2_nexttramp: // used to calculate size of each trampoline
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
-__a1a2_trampend:
-
-#endif
+++ /dev/null
-/*
- * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
-
-#ifdef __x86_64__
-
-#include <mach/vm_param.h>
-
- .text
- .private_extern __a1a2_tramphead
- .private_extern __a1a2_firsttramp
- .private_extern __a1a2_nexttramp
- .private_extern __a1a2_trampend
-
-.align PAGE_SHIFT
-__a1a2_tramphead:
- popq %r10
- andq $0xFFFFFFFFFFFFFFF8, %r10
- subq $ PAGE_SIZE, %r10
- movq %rdi, %rsi // arg1 -> arg2
- movq (%r10), %rdi // block -> arg1
- jmp *16(%rdi)
-
-.macro TrampolineEntry
- callq __a1a2_tramphead
- nop
- nop
- nop
-.endmacro
-
-.align 5
-__a1a2_firsttramp:
- TrampolineEntry
-__a1a2_nexttramp:
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
-__a1a2_trampend:
-
-#endif
+++ /dev/null
-#if __arm__
-
-#include <arm/arch.h>
-#include <mach/vm_param.h>
-
-.syntax unified
-
-.text
-
- .private_extern __a2a3_tramphead
- .private_extern __a2a3_firsttramp
- .private_extern __a2a3_trampend
-
-// Trampoline machinery assumes the trampolines are Thumb function pointers
-#if !__thumb2__
-# error sorry
-#endif
-
-.thumb
-.thumb_func __a2a3_tramphead
-.thumb_func __a2a3_firsttramp
-.thumb_func __a2a3_trampend
-
-.align PAGE_MAX_SHIFT
-__a2a3_tramphead:
- /*
- r1 == self
- r12 == pc of trampoline's first instruction + PC bias
- lr == original return address
- */
-
- mov r2, r1 // _cmd = self
-
- // Trampoline's data is one page before the trampoline text.
- // Also correct PC bias of 4 bytes.
- sub r12, #PAGE_MAX_SIZE
- ldr r1, [r12, #-4] // self = block object
- ldr pc, [r1, #12] // tail call block->invoke
- // not reached
-
- // Align trampolines to 8 bytes
-.align 3
-
-.macro TrampolineEntry
- mov r12, pc
- b __a2a3_tramphead
-.align 3
-.endmacro
-
-.macro TrampolineEntryX16
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-.endmacro
-
-.macro TrampolineEntryX256
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-.endmacro
-
-.private_extern __a2a3_firsttramp
-__a2a3_firsttramp:
- // 2048-2 trampolines to fill 16K page
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX256
- TrampolineEntryX256
- TrampolineEntryX256
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntryX16
- TrampolineEntryX16
- TrampolineEntryX16
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
- TrampolineEntry
- TrampolineEntry
- // TrampolineEntry
- // TrampolineEntry
-
-.private_extern __a2a3_trampend
-__a2a3_trampend:
-
-#endif
+++ /dev/null
-/*
- * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
-
-#ifdef __i386__
-
-#include <mach/vm_param.h>
-
-.text
- .private_extern __a2a3_tramphead
- .private_extern __a2a3_firsttramp
- .private_extern __a2a3_nexttramp
- .private_extern __a2a3_trampend
-
-.align PAGE_SHIFT
-__a2a3_tramphead:
- popl %eax
- andl $0xFFFFFFF8, %eax
- subl $ PAGE_SIZE, %eax
- movl 8(%esp), %ecx // self -> ecx
- movl %ecx, 12(%esp) // ecx -> _cmd
- movl (%eax), %ecx // blockPtr -> ecx
- movl %ecx, 8(%esp) // ecx -> self
- jmp *12(%ecx) // tail to block->invoke
-
-.macro TrampolineEntry
- call __a2a3_tramphead
- nop
- nop
- nop
-.endmacro
-
-.align 5
-__a2a3_firsttramp:
- TrampolineEntry
-__a2a3_nexttramp:
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
-__a2a3_trampend:
-
-#endif
+++ /dev/null
-/*
- * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
-
-#ifdef __x86_64__
-
-#include <mach/vm_param.h>
-
- .text
- .private_extern __a2a3_tramphead
- .private_extern __a2a3_firsttramp
- .private_extern __a2a3_nexttramp
- .private_extern __a2a3_trampend
-
-.align PAGE_SHIFT
-__a2a3_tramphead:
- popq %r10
- andq $0xFFFFFFFFFFFFFFF8, %r10
- subq $ PAGE_SIZE, %r10
- // %rdi -- first arg -- is address of return value's space. Don't mess with it.
- movq %rsi, %rdx // arg2 -> arg3
- movq (%r10), %rsi // block -> arg2
- jmp *16(%rsi)
-
-.macro TrampolineEntry
- callq __a2a3_tramphead
- nop
- nop
- nop
-.endmacro
-
-.align 5
-__a2a3_firsttramp:
- TrampolineEntry
-__a2a3_nexttramp:
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
- TrampolineEntry
-
-__a2a3_trampend:
-
-#endif
--- /dev/null
+/*
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * Copyright (c) 2018 Apple Inc. All Rights Reserved.
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+/********************************************************************
+ *
+ * arm64-asm.h - asm tools for arm64/arm64_32 and ROP/JOP
+ *
+ ********************************************************************/
+
+#if __arm64__
+
+#if __LP64__
+// true arm64
+
+#define SUPPORT_TAGGED_POINTERS 1
+#define PTR .quad
+#define PTRSIZE 8
+#define PTRSHIFT 3 // 1<<PTRSHIFT == PTRSIZE
+// "p" registers are pointer-sized
+#define UXTP UXTX
+#define p0 x0
+#define p1 x1
+#define p2 x2
+#define p3 x3
+#define p4 x4
+#define p5 x5
+#define p6 x6
+#define p7 x7
+#define p8 x8
+#define p9 x9
+#define p10 x10
+#define p11 x11
+#define p12 x12
+#define p13 x13
+#define p14 x14
+#define p15 x15
+#define p16 x16
+#define p17 x17
+
+// true arm64
+#else
+// arm64_32
+
+#define SUPPORT_TAGGED_POINTERS 0
+#define PTR .long
+#define PTRSIZE 4
+#define PTRSHIFT 2 // 1<<PTRSHIFT == PTRSIZE
+// "p" registers are pointer-sized
+#define UXTP UXTW
+#define p0 w0
+#define p1 w1
+#define p2 w2
+#define p3 w3
+#define p4 w4
+#define p5 w5
+#define p6 w6
+#define p7 w7
+#define p8 w8
+#define p9 w9
+#define p10 w10
+#define p11 w11
+#define p12 w12
+#define p13 w13
+#define p14 w14
+#define p15 w15
+#define p16 w16
+#define p17 w17
+
+// arm64_32
+#endif
+
+
+#if __has_feature(ptrauth_returns)
+// ROP
+# define SignLR pacibsp
+# define AuthenticateLR autibsp
+#else
+// not ROP
+# define SignLR
+# define AuthenticateLR
+#endif
+
+#if __has_feature(ptrauth_calls)
+// JOP
+
+.macro TailCallFunctionPointer
+ // $0 = function pointer value
+ braaz $0
+.endmacro
+
+.macro TailCallCachedImp
+ // $0 = cached imp, $1 = address of cached imp
+ brab $0, $1
+.endmacro
+
+.macro TailCallMethodListImp
+ // $0 = method list imp, $1 = address of method list imp
+ braa $0, $1
+.endmacro
+
+.macro TailCallBlockInvoke
+ // $0 = invoke function, $1 = address of invoke function
+ braa $0, $1
+.endmacro
+
+.macro AuthAndResignAsIMP
+ // $0 = cached imp, $1 = address of cached imp
+ autib $0, $1 // authenticate cached imp
+ paciza $0 // resign cached imp as IMP
+.endmacro
+
+// JOP
+#else
+// not JOP
+
+.macro TailCallFunctionPointer
+ // $0 = function pointer value
+ br $0
+.endmacro
+
+.macro TailCallCachedImp
+ // $0 = cached imp, $1 = address of cached imp
+ br $0
+.endmacro
+
+.macro TailCallMethodListImp
+ // $0 = method list imp, $1 = address of method list imp
+ br $0
+.endmacro
+
+.macro TailCallBlockInvoke
+ // $0 = invoke function, $1 = address of invoke function
+ br $0
+.endmacro
+
+.macro AuthAndResignAsIMP
+ // empty
+.endmacro
+
+// not JOP
+#endif
+
+#define TailCallBlockInvoke TailCallMethodListImp
+
+
+// __arm64__
+#endif
--- /dev/null
+/*
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * Copyright (c) 2018 Apple Inc. All Rights Reserved.
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+/********************************************************************
+ *
+ * isa.h - Definitions of isa fields for C and assembly code.
+ *
+ ********************************************************************/
+
+#ifndef _OBJC_ISA_H_
+#define _OBJC_ISA_H_
+
+#include "objc-config.h"
+
+
+#if (!SUPPORT_NONPOINTER_ISA && !SUPPORT_PACKED_ISA && !SUPPORT_INDEXED_ISA) ||\
+ ( SUPPORT_NONPOINTER_ISA && SUPPORT_PACKED_ISA && !SUPPORT_INDEXED_ISA) ||\
+ ( SUPPORT_NONPOINTER_ISA && !SUPPORT_PACKED_ISA && SUPPORT_INDEXED_ISA)
+ // good config
+#else
+# error bad config
+#endif
+
+
+#if SUPPORT_PACKED_ISA
+
+ // extra_rc must be the MSB-most field (so it matches carry/overflow flags)
+ // nonpointer must be the LSB (fixme or get rid of it)
+ // shiftcls must occupy the same bits that a real class pointer would
+ // bits + RC_ONE is equivalent to extra_rc + 1
+ // RC_HALF is the high bit of extra_rc (i.e. half of its range)
+
+ // future expansion:
+ // uintptr_t fast_rr : 1; // no r/r overrides
+ // uintptr_t lock : 2; // lock for atomic property, @synch
+ // uintptr_t extraBytes : 1; // allocated with extra bytes
+
+# if __arm64__
+# define ISA_MASK 0x0000000ffffffff8ULL
+# define ISA_MAGIC_MASK 0x000003f000000001ULL
+# define ISA_MAGIC_VALUE 0x000001a000000001ULL
+# define ISA_BITFIELD \
+ uintptr_t nonpointer : 1; \
+ uintptr_t has_assoc : 1; \
+ uintptr_t has_cxx_dtor : 1; \
+ uintptr_t shiftcls : 33; /*MACH_VM_MAX_ADDRESS 0x1000000000*/ \
+ uintptr_t magic : 6; \
+ uintptr_t weakly_referenced : 1; \
+ uintptr_t deallocating : 1; \
+ uintptr_t has_sidetable_rc : 1; \
+ uintptr_t extra_rc : 19
+# define RC_ONE (1ULL<<45)
+# define RC_HALF (1ULL<<18)
+
+# elif __x86_64__
+# define ISA_MASK 0x00007ffffffffff8ULL
+# define ISA_MAGIC_MASK 0x001f800000000001ULL
+# define ISA_MAGIC_VALUE 0x001d800000000001ULL
+# define ISA_BITFIELD \
+ uintptr_t nonpointer : 1; \
+ uintptr_t has_assoc : 1; \
+ uintptr_t has_cxx_dtor : 1; \
+ uintptr_t shiftcls : 44; /*MACH_VM_MAX_ADDRESS 0x7fffffe00000*/ \
+ uintptr_t magic : 6; \
+ uintptr_t weakly_referenced : 1; \
+ uintptr_t deallocating : 1; \
+ uintptr_t has_sidetable_rc : 1; \
+ uintptr_t extra_rc : 8
+# define RC_ONE (1ULL<<56)
+# define RC_HALF (1ULL<<7)
+
+# else
+# error unknown architecture for packed isa
+# endif
+
+// SUPPORT_PACKED_ISA
+#endif
+
+
+#if SUPPORT_INDEXED_ISA
+
+# if __ARM_ARCH_7K__ >= 2 || (__arm64__ && !__LP64__)
+ // armv7k or arm64_32
+
+# define ISA_INDEX_IS_NPI_BIT 0
+# define ISA_INDEX_IS_NPI_MASK 0x00000001
+# define ISA_INDEX_MASK 0x0001FFFC
+# define ISA_INDEX_SHIFT 2
+# define ISA_INDEX_BITS 15
+# define ISA_INDEX_COUNT (1 << ISA_INDEX_BITS)
+# define ISA_INDEX_MAGIC_MASK 0x001E0001
+# define ISA_INDEX_MAGIC_VALUE 0x001C0001
+# define ISA_BITFIELD \
+ uintptr_t nonpointer : 1; \
+ uintptr_t has_assoc : 1; \
+ uintptr_t indexcls : 15; \
+ uintptr_t magic : 4; \
+ uintptr_t has_cxx_dtor : 1; \
+ uintptr_t weakly_referenced : 1; \
+ uintptr_t deallocating : 1; \
+ uintptr_t has_sidetable_rc : 1; \
+ uintptr_t extra_rc : 7
+# define RC_ONE (1ULL<<25)
+# define RC_HALF (1ULL<<6)
+
+# else
+# error unknown architecture for indexed isa
+# endif
+
+// SUPPORT_INDEXED_ISA
+#endif
+
+
+// _OBJC_ISA_H_
+#endif
unsigned NXCountMapTable(NXMapTable *table) { return table->count; }
+#if __x86_64__
+extern "C" void __NXMAPTABLE_CORRUPTED__
+(const void *table, const void *buckets, uint64_t count,
+ uint64_t nbBucketsMinusOne, uint64_t badkeys, uint64_t index,
+ uint64_t index2, uint64_t pairIndexes, const void *key1,
+ const void *value1, const void *key2, const void *value2,
+ const void *key3, const void *value3);
+
+static int _mapStrIsEqual(NXMapTable *table, const void *key1, const void *key2);
+
+asm("\n .text"
+ "\n .private_extern ___NXMAPTABLE_CORRUPTED__"
+ "\n ___NXMAPTABLE_CORRUPTED__:"
+ // push a frame for the unwinder to see
+ "\n pushq %rbp"
+ "\n mov %rsp, %rbp"
+ // push register parameters to the stack in reverse order
+ "\n pushq %r9"
+ "\n pushq %r8"
+ "\n pushq %rcx"
+ "\n pushq %rdx"
+ "\n pushq %rsi"
+ "\n pushq %rdi"
+ // pop the pushed register parameters into their destinations
+ "\n popq %rax" // table
+ "\n popq %rbx" // buckets
+ "\n popq %rcx" // count
+ "\n popq %rdx" // nbBucketsMinusOne
+ "\n popq %rdi" // badkeys
+ "\n popq %rsi" // index
+ // read stack parameters into their destinations
+ "\n mov 0*8+16(%rbp), %r8" // index2
+ "\n mov 1*8+16(%rbp), %r9" // pairIndexes
+ "\n mov 2*8+16(%rbp), %r10" // key1
+ "\n mov 3*8+16(%rbp), %r11" // value1
+ "\n mov 4*8+16(%rbp), %r12" // key2
+ "\n mov 5*8+16(%rbp), %r13" // value2
+ "\n mov 6*8+16(%rbp), %r14" // key3
+ "\n mov 7*8+16(%rbp), %r15" // value3
+ "\n ud2");
+#endif
+
+// Look for a particular case of data corruption (rdar://36373000)
+// and investigate it further before crashing.
+static void validateKey(NXMapTable *table, MapPair *pair,
+ unsigned index, unsigned index2)
+{
+#if __x86_64__
+# define BADKEY ((void * _Nonnull)(0xfffffffffffffffeULL))
+ if (pair->key != BADKEY ||
+ table->prototype->isEqual != _mapStrIsEqual)
+ {
+ return;
+ }
+
+ _objc_inform_now_and_on_crash
+ ("NXMapTable %p (%p) has invalid key/value pair %p->%p (%p)",
+ table, table->buckets, pair->key, pair->value, pair);
+ _objc_inform_now_and_on_crash
+ ("table %p, buckets %p, count %u, nbNucketsMinusOne %u, "
+ "prototype %p (hash %p, isEqual %p, free %p)",
+ table, table->buckets, table->count, table->nbBucketsMinusOne,
+ table->prototype, table->prototype->hash, table->prototype->isEqual,
+ table->prototype->free);
+
+ // Count the number of bad keys in the table.
+ MapPair *pairs = (MapPair *)table->buckets;
+ unsigned badKeys = 0;
+ for (unsigned i = 0; i < table->nbBucketsMinusOne+1; i++) {
+ if (pairs[i].key == BADKEY) badKeys++;
+ }
+
+ _objc_inform_now_and_on_crash("%u invalid keys in table", badKeys);
+
+ // Record some additional key pairs for posterity.
+ unsigned pair2Index = nextIndex(table, index);
+ unsigned pair3Index = nextIndex(table, pair2Index);
+ MapPair *pair2 = pairs + pair2Index;
+ MapPair *pair3 = pairs + pair3Index;
+ uint64_t pairIndexes = ((uint64_t)pair2Index << 32) | pair3Index;
+
+ // Save a bunch of values to registers so we can see them in the crash log.
+ __NXMAPTABLE_CORRUPTED__
+ (// rax, rbx, rcx, rdx
+ table, table->buckets, table->count, table->nbBucketsMinusOne,
+ // rdi, rsi, skip rbp, skip rsp
+ badKeys, index,
+ // r8, r9, r10, r11
+ index2, pairIndexes, pair->key, pair->value,
+ // r12, r13, r14, r15
+ pair2->key, pair2->value, pair3->key, pair3->value);
+#endif
+}
+
static INLINE void *_NXMapMember(NXMapTable *table, const void *key, void **value) {
MapPair *pairs = (MapPair *)table->buckets;
unsigned index = bucketOf(table, key);
MapPair *pair = pairs + index;
if (pair->key == NX_MAPNOTAKEY) return NX_MAPNOTAKEY;
+ validateKey(table, pair, index, index);
+
if (isEqual(table, pair->key, key)) {
*value = (void *)pair->value;
return (void *)pair->key;
while ((index2 = nextIndex(table, index2)) != index) {
pair = pairs + index2;
if (pair->key == NX_MAPNOTAKEY) return NX_MAPNOTAKEY;
+ validateKey(table, pair, index, index2);
if (isEqual(table, pair->key, key)) {
*value = (void *)pair->value;
return (void *)pair->key;
while ((index2 = nextIndex(table, index2)) != index) {
pair = pairs + index2;
if (pair->key == NX_MAPNOTAKEY) {
- pair->key = key; pair->value = value;
+ pair->key = key; pair->value = value;
table->count++;
if (table->count * 4 > numBuckets * 3) _NXMapRehash(table);
return NULL;
#include <objc/runtime.h>
#include <objc/message.h>
+/* Linker metadata symbols */
+
+// NSObject was in Foundation/CF on macOS < 10.8.
+#if TARGET_OS_OSX
+#if __OBJC2__
+
+OBJC_EXPORT const char __objc_nsobject_class_10_5
+ __asm__("$ld$hide$os10.5$_OBJC_CLASS_$_NSObject");
+OBJC_EXPORT const char __objc_nsobject_class_10_6
+ __asm__("$ld$hide$os10.6$_OBJC_CLASS_$_NSObject");
+OBJC_EXPORT const char __objc_nsobject_class_10_7
+ __asm__("$ld$hide$os10.7$_OBJC_CLASS_$_NSObject");
+
+OBJC_EXPORT const char __objc_nsobject_metaclass_10_5
+ __asm__("$ld$hide$os10.5$_OBJC_METACLASS_$_NSObject");
+OBJC_EXPORT const char __objc_nsobject_metaclass_10_6
+ __asm__("$ld$hide$os10.6$_OBJC_METACLASS_$_NSObject");
+OBJC_EXPORT const char __objc_nsobject_metaclass_10_7
+ __asm__("$ld$hide$os10.7$_OBJC_METACLASS_$_NSObject");
+
+OBJC_EXPORT const char __objc_nsobject_isa_10_5
+ __asm__("$ld$hide$os10.5$_OBJC_IVAR_$_NSObject.isa");
+OBJC_EXPORT const char __objc_nsobject_isa_10_6
+ __asm__("$ld$hide$os10.6$_OBJC_IVAR_$_NSObject.isa");
+OBJC_EXPORT const char __objc_nsobject_isa_10_7
+ __asm__("$ld$hide$os10.7$_OBJC_IVAR_$_NSObject.isa");
+
+#else
+
+OBJC_EXPORT const char __objc_nsobject_class_10_5
+ __asm__("$ld$hide$os10.5$.objc_class_name_NSObject");
+OBJC_EXPORT const char __objc_nsobject_class_10_6
+ __asm__("$ld$hide$os10.6$.objc_class_name_NSObject");
+OBJC_EXPORT const char __objc_nsobject_class_10_7
+ __asm__("$ld$hide$os10.7$.objc_class_name_NSObject");
+
+#endif
+#endif
+
/* Runtime startup. */
// Old static initializer. Used by old crt1.o and old bug workarounds.
#endif
-#if TARGET_OS_OSX && defined(__x86_64__)
-// objc_msgSend_fixup() is used for vtable-dispatchable call sites.
+#if (TARGET_OS_OSX || TARGET_OS_SIMULATOR) && defined(__x86_64__)
+// objc_msgSend_fixup() was used for vtable-dispatchable call sites.
+// The symbols remain exported on macOS for binary compatibility.
+// The symbols can probably be removed from iOS simulator but we haven't tried.
OBJC_EXPORT void
objc_msgSend_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
OBJC_EXPORT void
objc_msgSend_stret_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
OBJC_EXPORT void
objc_msgSendSuper2_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
OBJC_EXPORT void
objc_msgSendSuper2_stret_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
OBJC_EXPORT void
objc_msgSend_fpret_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
OBJC_EXPORT void
objc_msgSend_fp2ret_fixup(void)
- __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ __OSX_DEPRECATED(10.5, 10.8, "fixup dispatch is no longer optimized");
#endif
/* C++-compatible exception handling. */
#if __OBJC2__
-// fixme these conflict with C++ compiler's internal definitions
-#if !defined(__cplusplus)
-
// Vtable for C++ exception typeinfo for Objective-C types.
-OBJC_EXPORT const void * _Nullableobjc_ehtype_vtable[]
+OBJC_EXPORT const void * _Nullable objc_ehtype_vtable[]
OBJC_AVAILABLE(10.5, 2.0, 9.0, 1.0, 2.0);
// C++ exception typeinfo for type `id`.
OBJC_EXPORT struct objc_typeinfo OBJC_EHTYPE_id
OBJC_AVAILABLE(10.5, 2.0, 9.0, 1.0, 2.0);
-#endif
-
// Exception personality function for Objective-C and Objective-C++ code.
struct _Unwind_Exception;
struct _Unwind_Context;
// Extract class pointer from an isa field.
-#if TARGET_OS_SIMULATOR
+#if TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC
// No simulators use nonpointer isa yet.
#elif __LP64__
OBJC_EXPORT const struct { char c; } objc_absolute_packed_isa_class_mask
OBJC_AVAILABLE(10.12, 10.0, 10.0, 3.0, 2.0);
-#elif __ARM_ARCH_7K__ >= 2
+#elif (__ARM_ARCH_7K__ >= 2 || (__arm64__ && !__LP64__))
# define OBJC_HAVE_NONPOINTER_ISA 1
# define OBJC_HAVE_INDEXED_NONPOINTER_ISA 1
#endif
-// OBJC2
#endif
+
+/* Object class */
+
+// This symbol might be required for binary compatibility, so we
+// declare it here where TAPI will see it.
+#if __OBJC__ && __OBJC2__
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wobjc-interface-ivars"
+#if !defined(OBJC_DECLARE_SYMBOLS)
+__OSX_AVAILABLE(10.0)
+__IOS_UNAVAILABLE __TVOS_UNAVAILABLE
+__WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE
+#endif
+OBJC_ROOT_CLASS
+@interface Object {
+ Class isa;
+}
+@end
+#pragma clang diagnostic pop
+#endif
+
+
// _OBJC_ABI_H
#endif
# endif
#endif
-#ifndef __BRIDGEOS_AVAILABLE
-# define __BRIDGEOS_AVAILABLE(v)
-#endif
-#ifndef __BRIDGEOS_DEPRECATED
-# define __BRIDGEOS_DEPRECATED(v1, v2, m)
-#endif
-#ifndef __BRIDGEOS_UNAVAILABLE
-# define __BRIDGEOS_UNAVAILABLE
+#ifndef __APPLE_BLEACH_SDK__
+# if __has_feature(attribute_availability_bridgeos)
+# ifndef __BRIDGEOS_AVAILABLE
+# define __BRIDGEOS_AVAILABLE(_vers) __OS_AVAILABILITY(bridgeos,introduced=_vers)
+# endif
+# ifndef __BRIDGEOS_DEPRECATED
+# define __BRIDGEOS_DEPRECATED(_start, _dep, _msg) __BRIDGEOS_AVAILABLE(_start) __OS_AVAILABILITY_MSG(bridgeos,deprecated=_dep,_msg)
+# endif
+# ifndef __BRIDGEOS_UNAVAILABLE
+# define __BRIDGEOS_UNAVAILABLE __OS_AVAILABILITY(bridgeos,unavailable)
+# endif
+# else
+# ifndef __BRIDGEOS_AVAILABLE
+# define __BRIDGEOS_AVAILABLE(_vers)
+# endif
+# ifndef __BRIDGEOS_DEPRECATED
+# define __BRIDGEOS_DEPRECATED(_start, _dep, _msg)
+# endif
+# ifndef __BRIDGEOS_UNAVAILABLE
+# define __BRIDGEOS_UNAVAILABLE
+# endif
+# endif
#endif
/*
/* OBJC_OLD_DISPATCH_PROTOTYPES == 0 enforces the rule that the dispatch
* functions must be cast to an appropriate function pointer type. */
#if !defined(OBJC_OLD_DISPATCH_PROTOTYPES)
-# define OBJC_OLD_DISPATCH_PROTOTYPES 1
+# if __swift__
+ // Existing Swift code expects IMP to be Comparable.
+ // Variadic IMP is comparable via OpaquePointer; non-variadic IMP isn't.
+# define OBJC_OLD_DISPATCH_PROTOTYPES 1
+# else
+# define OBJC_OLD_DISPATCH_PROTOTYPES 1
+# endif
#endif
};
-#ifndef OBJC_NO_GC
+#if !defined(OBJC_NO_GC) || \
+ (OBJC_DECLARE_SYMBOLS && !defined(OBJC_NO_GC_API))
/* Out-of-line declarations */
* @APPLE_LICENSE_HEADER_END@
*/
+#define OBJC_DECLARE_SYMBOLS 1
#include "objc-private.h"
+#include "objc-auto.h"
// GC is no longer supported.
#else
// No GC but we do need to export GC symbols.
-// These are mostly the same as the OBJC_NO_GC inline versions in objc-auto.h.
# if !SUPPORT_GC_COMPAT
# error inconsistent config settings
# endif
-OBJC_EXPORT void objc_collect(unsigned long options __unused) { }
-OBJC_EXPORT BOOL objc_collectingEnabled(void) { return NO; }
-OBJC_EXPORT void objc_setCollectionThreshold(size_t threshold __unused) { }
-OBJC_EXPORT void objc_setCollectionRatio(size_t ratio __unused) { }
-OBJC_EXPORT void objc_startCollectorThread(void) { }
+void objc_collect(unsigned long options __unused) { }
+BOOL objc_collectingEnabled(void) { return NO; }
+void objc_setCollectionThreshold(size_t threshold __unused) { }
+void objc_setCollectionRatio(size_t ratio __unused) { }
+void objc_startCollectorThread(void) { }
#if TARGET_OS_WIN32
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation)
{ void *original = InterlockedCompareExchangePointer((void * volatile *)objectLocation, (void *)replacement, (void *)predicate); return (original == predicate); }
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation)
{ void *original = InterlockedCompareExchangePointer((void * volatile *)objectLocation, (void *)replacement, (void *)predicate); return (original == predicate); }
#else
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation)
{ return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation); }
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation)
{ return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation); }
#endif
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation)
{ return objc_atomicCompareAndSwapPtr(predicate, replacement, objectLocation); }
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation)
{ return objc_atomicCompareAndSwapPtrBarrier(predicate, replacement, objectLocation); }
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation)
{ return objc_atomicCompareAndSwapPtr(predicate, replacement, objectLocation); }
-OBJC_EXPORT BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation)
+BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation)
{ return objc_atomicCompareAndSwapPtrBarrier(predicate, replacement, objectLocation); }
-OBJC_EXPORT id objc_assign_strongCast(id val, id *dest)
+id objc_assign_strongCast(id val, id *dest)
{ return (*dest = val); }
-OBJC_EXPORT id objc_assign_global(id val, id *dest)
+id objc_assign_global(id val, id *dest)
{ return (*dest = val); }
-OBJC_EXPORT id objc_assign_threadlocal(id val, id *dest)
+id objc_assign_threadlocal(id val, id *dest)
{ return (*dest = val); }
-OBJC_EXPORT id objc_assign_ivar(id val, id dest, ptrdiff_t offset)
+id objc_assign_ivar(id val, id dest, ptrdiff_t offset)
{ return (*(id*)((char *)dest+offset) = val); }
-OBJC_EXPORT id objc_read_weak(id *location)
+id objc_read_weak(id *location)
{ return *location; }
-OBJC_EXPORT id objc_assign_weak(id value, id *location)
+id objc_assign_weak(id value, id *location)
{ return (*location = value); }
-OBJC_EXPORT void *objc_memmove_collectable(void *dst, const void *src, size_t size)
+void *objc_memmove_collectable(void *dst, const void *src, size_t size)
{ return memmove(dst, src, size); }
-OBJC_EXPORT void objc_finalizeOnMainThread(Class cls __unused) { }
-OBJC_EXPORT BOOL objc_is_finalized(void *ptr __unused) { return NO; }
-OBJC_EXPORT void objc_clear_stack(unsigned long options __unused) { }
+void objc_finalizeOnMainThread(Class cls __unused) { }
+BOOL objc_is_finalized(void *ptr __unused) { return NO; }
+void objc_clear_stack(unsigned long options __unused) { }
-OBJC_EXPORT BOOL objc_collecting_enabled(void) { return NO; }
-OBJC_EXPORT void objc_set_collection_threshold(size_t threshold __unused) { }
-OBJC_EXPORT void objc_set_collection_ratio(size_t ratio __unused) { }
-OBJC_EXPORT void objc_start_collector_thread(void) { }
+BOOL objc_collecting_enabled(void) { return NO; }
+void objc_set_collection_threshold(size_t threshold __unused) { }
+void objc_set_collection_ratio(size_t ratio __unused) { }
+void objc_start_collector_thread(void) { }
-OBJC_EXPORT id objc_allocate_object(Class cls, int extra)
+id objc_allocate_object(Class cls, int extra)
{ return class_createInstance(cls, extra); }
-OBJC_EXPORT void objc_registerThreadWithCollector() { }
-OBJC_EXPORT void objc_unregisterThreadWithCollector() { }
-OBJC_EXPORT void objc_assertRegisteredThreadWithCollector() { }
+void objc_registerThreadWithCollector() { }
+void objc_unregisterThreadWithCollector() { }
+void objc_assertRegisteredThreadWithCollector() { }
-OBJC_EXPORT malloc_zone_t* objc_collect_init(int(*callback)() __unused) { return nil; }
-OBJC_EXPORT void* objc_collectableZone() { return nil; }
+malloc_zone_t* objc_collect_init(int(*callback)() __unused) { return nil; }
+malloc_zone_t* objc_collectableZone() { return nil; }
-OBJC_EXPORT BOOL objc_isAuto(id object __unused) { return NO; }
-OBJC_EXPORT BOOL objc_dumpHeap(char *filename __unused, unsigned long length __unused)
+BOOL objc_isAuto(id object __unused) { return NO; }
+BOOL objc_dumpHeap(char *filename __unused, unsigned long length __unused)
{ return NO; }
// not OBJC_NO_GC_API
--- /dev/null
+/*
+ * Copyright (c) 2018 Apple Inc. All Rights Reserved.
+ *
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+
+
+#ifndef _OBJC_TRAMPOLINES_H
+#define _OBJC_TRAMPOLINES_H
+
+/*
+ * WARNING DANGER HAZARD BEWARE EEK
+ *
+ * Everything in this file is for Apple Internal use only.
+ * These will change in arbitrary OS updates and in unpredictable ways.
+ * When your program breaks, you get to keep both pieces.
+ */
+
+/*
+ * objc-block-trampolines.h: Symbols for IMP block trampolines
+ */
+
+#include <objc/objc-api.h>
+
+OBJC_EXPORT const char _objc_blockTrampolineImpl
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
+OBJC_EXPORT const char _objc_blockTrampolineStart
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
+OBJC_EXPORT const char _objc_blockTrampolineLast
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
+
+OBJC_EXPORT const char _objc_blockTrampolineImpl_stret
+OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0)
+ OBJC_ARM64_UNAVAILABLE;
+
+OBJC_EXPORT const char _objc_blockTrampolineStart_stret
+OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0)
+ OBJC_ARM64_UNAVAILABLE;
+
+OBJC_EXPORT const char _objc_blockTrampolineLast_stret
+OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0)
+ OBJC_ARM64_UNAVAILABLE;
+
+#endif
#include <Block.h>
#include <Block_private.h>
#include <mach/mach.h>
+#include <objc/objc-block-trampolines.h>
-// symbols defined in assembly files
-// Don't use the symbols directly; they're thumb-biased on some ARM archs.
-#define TRAMP(tramp) \
- static inline __unused uintptr_t tramp(void) { \
- extern void *_##tramp; \
- return ((uintptr_t)&_##tramp) & ~1UL; \
- }
-// Scalar return
-TRAMP(a1a2_tramphead); // trampoline header code
-TRAMP(a1a2_firsttramp); // first trampoline
-TRAMP(a1a2_trampend); // after the last trampoline
+// fixme C++ compilers don't implemement memory_order_consume efficiently.
+// Use memory_order_relaxed and cross our fingers.
+#define MEMORY_ORDER_CONSUME std::memory_order_relaxed
-#if SUPPORT_STRET
-// Struct return
-TRAMP(a2a3_tramphead);
-TRAMP(a2a3_firsttramp);
-TRAMP(a2a3_trampend);
+// 8 bytes of text and data per trampoline on all architectures.
+#define SLOT_SIZE 8
+
+// The trampolines are defined in assembly files in libobjc-trampolines.dylib.
+// We can't link to libobjc-trampolines.dylib directly because
+// for security reasons it isn't in the dyld shared cache.
+
+// Trampoline addresses are lazily looked up.
+// All of them are hidden behind a single atomic pointer for lock-free init.
+
+#ifdef __PTRAUTH_INTRINSICS__
+# define TrampolinePtrauth __ptrauth(ptrauth_key_function_pointer, 1, 0x3af1)
+#else
+# define TrampolinePtrauth
+#endif
+
+class TrampolinePointerWrapper {
+ struct TrampolinePointers {
+ class TrampolineAddress {
+ const void * TrampolinePtrauth storage;
+
+ public:
+ TrampolineAddress(void *dylib, const char *name) {
+#define PREFIX "_objc_blockTrampoline"
+ char symbol[strlen(PREFIX) + strlen(name) + 1];
+ strcpy(symbol, PREFIX);
+ strcat(symbol, name);
+ // dlsym() from a text segment returns a signed pointer
+ // Authenticate it manually and let the compiler re-sign it.
+ storage = ptrauth_auth_data(dlsym(dylib, symbol),
+ ptrauth_key_function_pointer, 0);
+ if (!storage) {
+ _objc_fatal("couldn't dlsym %s", symbol);
+ }
+ }
+
+ uintptr_t address() {
+ return (uintptr_t)(void*)storage;
+ }
+ };
+
+ TrampolineAddress impl; // trampoline header code
+ TrampolineAddress start; // first trampoline
+#if DEBUG
+ // These symbols are only used in assertions.
+ // fixme might be able to move the assertions to libobjc-trampolines itself
+ TrampolineAddress last; // start of the last trampoline
+ // We don't use the address after the last trampoline because that
+ // address might be in a different section, and then dlsym() would not
+ // sign it as a function pointer.
+# if SUPPORT_STRET
+ TrampolineAddress impl_stret;
+ TrampolineAddress start_stret;
+ TrampolineAddress last_stret;
+# endif
+#endif
+
+ uintptr_t textSegment;
+ uintptr_t textSegmentSize;
+
+ void check() {
+#if DEBUG
+ assert(impl.address() == textSegment + PAGE_MAX_SIZE);
+ assert(impl.address() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE
+ assert(impl.address() + PAGE_MAX_SIZE ==
+ last.address() + SLOT_SIZE);
+ assert(last.address()+8 < textSegment + textSegmentSize);
+ assert((last.address() - start.address()) % SLOT_SIZE == 0);
+# if SUPPORT_STRET
+ assert(impl_stret.address() == textSegment + 2*PAGE_MAX_SIZE);
+ assert(impl_stret.address() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE
+ assert(impl_stret.address() + PAGE_MAX_SIZE ==
+ last_stret.address() + SLOT_SIZE);
+ assert(start.address() - impl.address() ==
+ start_stret.address() - impl_stret.address());
+ assert(last_stret.address() + SLOT_SIZE <
+ textSegment + textSegmentSize);
+ assert((last_stret.address() - start_stret.address())
+ % SLOT_SIZE == 0);
+# endif
+#endif
+ }
+
+
+ TrampolinePointers(void *dylib)
+ : impl(dylib, "Impl")
+ , start(dylib, "Start")
+#if DEBUG
+ , last(dylib, "Last")
+# if SUPPORT_STRET
+ , impl_stret(dylib, "Impl_stret")
+ , start_stret(dylib, "Start_stret")
+ , last_stret(dylib, "Last_stret")
+# endif
#endif
+ {
+ const auto *mh =
+ dyld_image_header_containing_address((void *)impl.address());
+ unsigned long size = 0;
+ textSegment = (uintptr_t)
+ getsegmentdata((headerType *)mh, "__TEXT", &size);
+ textSegmentSize = size;
+
+ check();
+ }
+ };
+
+ std::atomic<TrampolinePointers *> trampolines{nil};
+
+ TrampolinePointers *get() {
+ return trampolines.load(MEMORY_ORDER_CONSUME);
+ }
+
+public:
+ void Initialize() {
+ if (get()) return;
+
+ // This code may be called concurrently.
+ // In the worst case we perform extra dyld operations.
+ void *dylib = dlopen("/usr/lib/libobjc-trampolines.dylib",
+ RTLD_NOW | RTLD_LOCAL | RTLD_FIRST);
+ if (!dylib) {
+ _objc_fatal("couldn't dlopen libobjc-trampolines.dylib");
+ }
+
+ auto t = new TrampolinePointers(dylib);
+ TrampolinePointers *old = nil;
+ if (! trampolines.compare_exchange_strong(old, t, memory_order_release))
+ {
+ delete t; // Lost an initialization race.
+ }
+ }
+
+ uintptr_t textSegment() { return get()->textSegment; }
+ uintptr_t textSegmentSize() { return get()->textSegmentSize; }
+
+ uintptr_t impl() { return get()->impl.address(); }
+ uintptr_t start() { return get()->start.address(); }
+};
+
+static TrampolinePointerWrapper Trampolines;
// argument mode identifier
typedef enum {
ArgumentModeCount
} ArgumentMode;
-
// We must take care with our data layout on architectures that support
// multiple page sizes.
//
// The trampoline template in __TEXT is sized and aligned with PAGE_MAX_SIZE.
// On some platforms this requires additional linker flags.
//
-// When we allocate a page pair, we use PAGE_MAX_SIZE size.
+// When we allocate a page group, we use PAGE_MAX_SIZE size.
// This allows trampoline code to find its data by subtracting PAGE_MAX_SIZE.
//
-// When we allocate a page pair, we use the process's page alignment.
+// When we allocate a page group, we use the process's page alignment.
// This simplifies allocation because we don't need to force greater than
// default alignment when running with small pages, but it also means
// the trampoline code MUST NOT look for its data by masking with PAGE_MAX_MASK.
-struct TrampolineBlockPagePair
+struct TrampolineBlockPageGroup
{
- TrampolineBlockPagePair *nextPagePair; // linked list of all pages
- TrampolineBlockPagePair *nextAvailablePage; // linked list of pages with available slots
+ TrampolineBlockPageGroup *nextPageGroup; // linked list of all pages
+ TrampolineBlockPageGroup *nextAvailablePage; // linked list of pages with available slots
uintptr_t nextAvailable; // index of next available slot, endIndex() if no more available
// Payload data: block pointers and free list.
// Bytes parallel with trampoline header code are the fields above or unused
- // uint8_t blocks[ PAGE_MAX_SIZE - sizeof(TrampolineBlockPagePair) ]
-
- // Code: trampoline header followed by trampolines.
- // uint8_t trampolines[PAGE_MAX_SIZE];
+ // uint8_t payloads[PAGE_MAX_SIZE - sizeof(TrampolineBlockPageGroup)]
+
+ // Code: Mach-O header, then trampoline header followed by trampolines.
+ // On platforms with struct return we have non-stret trampolines and
+ // stret trampolines. The stret and non-stret trampolines at a given
+ // index share the same data page.
+ // uint8_t macho[PAGE_MAX_SIZE];
+ // uint8_t trampolines[ArgumentModeCount][PAGE_MAX_SIZE];
// Per-trampoline block data format:
// initial value is 0 while page data is filled sequentially
};
static uintptr_t headerSize() {
- return (uintptr_t) (a1a2_firsttramp() - a1a2_tramphead());
+ return (uintptr_t) (Trampolines.start() - Trampolines.impl());
}
static uintptr_t slotSize() {
- return 8;
+ return SLOT_SIZE;
}
static uintptr_t startIndex() {
return (Payload *)((char *)this + index*slotSize());
}
- IMP trampoline(uintptr_t index) {
+ uintptr_t trampolinesForMode(int aMode) {
+ // Skip over data page and Mach-O page.
+ return (uintptr_t)this + PAGE_MAX_SIZE * (2 + aMode);
+ }
+
+ IMP trampoline(int aMode, uintptr_t index) {
assert(validIndex(index));
- char *imp = (char *)this + index*slotSize() + PAGE_MAX_SIZE;
+ char *base = (char *)trampolinesForMode(aMode);
+ char *imp = base + index*slotSize();
#if __arm__
imp++; // trampoline is Thumb instructions
+#endif
+#if __has_feature(ptrauth_calls)
+ imp = ptrauth_sign_unauthenticated(imp,
+ ptrauth_key_function_pointer, 0);
#endif
return (IMP)imp;
}
- uintptr_t indexForTrampoline(IMP tramp) {
- uintptr_t tramp0 = (uintptr_t)this + PAGE_MAX_SIZE;
- uintptr_t start = tramp0 + headerSize();
- uintptr_t end = tramp0 + PAGE_MAX_SIZE;
- uintptr_t address = (uintptr_t)tramp;
- if (address >= start && address < end) {
- return (uintptr_t)(address - tramp0) / slotSize();
+ uintptr_t indexForTrampoline(uintptr_t tramp) {
+ for (int aMode = 0; aMode < ArgumentModeCount; aMode++) {
+ uintptr_t base = trampolinesForMode(aMode);
+ uintptr_t start = base + startIndex() * slotSize();
+ uintptr_t end = base + endIndex() * slotSize();
+ if (tramp >= start && tramp < end) {
+ return (uintptr_t)(tramp - base) / slotSize();
+ }
}
return 0;
}
static void check() {
- assert(TrampolineBlockPagePair::slotSize() == 8);
- assert(TrampolineBlockPagePair::headerSize() >= sizeof(TrampolineBlockPagePair));
- assert(TrampolineBlockPagePair::headerSize() % TrampolineBlockPagePair::slotSize() == 0);
-
- // _objc_inform("%p %p %p", a1a2_tramphead(), a1a2_firsttramp(),
- // a1a2_trampend());
- assert(a1a2_tramphead() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE
- assert(a1a2_tramphead() + PAGE_MAX_SIZE == a1a2_trampend());
-#if SUPPORT_STRET
- // _objc_inform("%p %p %p", a2a3_tramphead(), a2a3_firsttramp(),
- // a2a3_trampend());
- assert(a2a3_tramphead() % PAGE_SIZE == 0); // not PAGE_MAX_SIZE
- assert(a2a3_tramphead() + PAGE_MAX_SIZE == a2a3_trampend());
-#endif
-
-#if __arm__
- // make sure trampolines are Thumb
- extern void *_a1a2_firsttramp;
- extern void *_a2a3_firsttramp;
- assert(((uintptr_t)&_a1a2_firsttramp) % 2 == 1);
- assert(((uintptr_t)&_a2a3_firsttramp) % 2 == 1);
-#endif
+ assert(TrampolineBlockPageGroup::headerSize() >= sizeof(TrampolineBlockPageGroup));
+ assert(TrampolineBlockPageGroup::headerSize() % TrampolineBlockPageGroup::slotSize() == 0);
}
};
-// two sets of trampoline pages; one for stack returns and one for register returns
-static TrampolineBlockPagePair *headPagePairs[ArgumentModeCount];
+static TrampolineBlockPageGroup *HeadPageGroup;
#pragma mark Utility Functions
-static inline void _lock() {
-#if __OBJC2__
- runtimeLock.write();
-#else
- classLock.lock();
-#endif
-}
-
-static inline void _unlock() {
-#if __OBJC2__
- runtimeLock.unlockWrite();
-#else
- classLock.unlock();
-#endif
-}
-
-static inline void _assert_locked() {
-#if __OBJC2__
- runtimeLock.assertWriting();
-#else
- classLock.assertLocked();
+#if !__OBJC2__
+#define runtimeLock classLock
#endif
-}
#pragma mark Trampoline Management Functions
-static TrampolineBlockPagePair *_allocateTrampolinesAndData(ArgumentMode aMode)
+static TrampolineBlockPageGroup *_allocateTrampolinesAndData()
{
- _assert_locked();
+ runtimeLock.assertLocked();
vm_address_t dataAddress;
- TrampolineBlockPagePair::check();
-
- TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
-
- assert(headPagePair == nil || headPagePair->nextAvailablePage == nil);
-
+ TrampolineBlockPageGroup::check();
+
+ // Our final mapping will look roughly like this:
+ // r/w data
+ // r/o text mapped from libobjc-trampolines.dylib
+ // with fixed offsets from the text to the data embedded in the text.
+ //
+ // More precisely it will look like this:
+ // 1 page r/w data
+ // 1 page libobjc-trampolines.dylib Mach-O header
+ // N pages trampoline code, one for each ArgumentMode
+ // M pages for the rest of libobjc-trampolines' TEXT segment.
+ // The kernel requires that we remap the entire TEXT segment every time.
+ // We assume that our code begins on the second TEXT page, but are robust
+ // against other additions to the end of the TEXT segment.
+
+ assert(HeadPageGroup == nil || HeadPageGroup->nextAvailablePage == nil);
+
+ auto textSource = Trampolines.textSegment();
+ auto textSourceSize = Trampolines.textSegmentSize();
+ auto dataSize = PAGE_MAX_SIZE;
+
+ // Allocate a single contiguous region big enough to hold data+text.
kern_return_t result;
- result = vm_allocate(mach_task_self(), &dataAddress, PAGE_MAX_SIZE * 2,
+ result = vm_allocate(mach_task_self(), &dataAddress,
+ dataSize + textSourceSize,
VM_FLAGS_ANYWHERE | VM_MAKE_TAG(VM_MEMORY_FOUNDATION));
if (result != KERN_SUCCESS) {
_objc_fatal("vm_allocate trampolines failed (%d)", result);
}
- vm_address_t codeAddress = dataAddress + PAGE_MAX_SIZE;
-
- uintptr_t codePage;
- switch(aMode) {
- case ReturnValueInRegisterArgumentMode:
- codePage = a1a2_tramphead();
- break;
-#if SUPPORT_STRET
- case ReturnValueOnStackArgumentMode:
- codePage = a2a3_tramphead();
- break;
-#endif
- default:
- _objc_fatal("unknown return mode %d", (int)aMode);
- break;
- }
-
+ // Remap libobjc-trampolines' TEXT segment atop all
+ // but the first of the pages we just allocated:
+ vm_address_t textDest = dataAddress + dataSize;
vm_prot_t currentProtection, maxProtection;
- result = vm_remap(mach_task_self(), &codeAddress, PAGE_MAX_SIZE,
+ result = vm_remap(mach_task_self(), &textDest,
+ textSourceSize,
0, VM_FLAGS_FIXED | VM_FLAGS_OVERWRITE,
- mach_task_self(), codePage, TRUE,
+ mach_task_self(), textSource, TRUE,
¤tProtection, &maxProtection, VM_INHERIT_SHARE);
if (result != KERN_SUCCESS) {
- // vm_deallocate(mach_task_self(), dataAddress, PAGE_MAX_SIZE * 2);
_objc_fatal("vm_remap trampolines failed (%d)", result);
}
+
+ TrampolineBlockPageGroup *pageGroup = (TrampolineBlockPageGroup *) dataAddress;
+ pageGroup->nextAvailable = pageGroup->startIndex();
+ pageGroup->nextPageGroup = nil;
+ pageGroup->nextAvailablePage = nil;
- TrampolineBlockPagePair *pagePair = (TrampolineBlockPagePair *) dataAddress;
- pagePair->nextAvailable = pagePair->startIndex();
- pagePair->nextPagePair = nil;
- pagePair->nextAvailablePage = nil;
-
- if (headPagePair) {
- TrampolineBlockPagePair *lastPagePair = headPagePair;
- while(lastPagePair->nextPagePair) {
- lastPagePair = lastPagePair->nextPagePair;
+ if (HeadPageGroup) {
+ TrampolineBlockPageGroup *lastPageGroup = HeadPageGroup;
+ while(lastPageGroup->nextPageGroup) {
+ lastPageGroup = lastPageGroup->nextPageGroup;
}
- lastPagePair->nextPagePair = pagePair;
- headPagePairs[aMode]->nextAvailablePage = pagePair;
+ lastPageGroup->nextPageGroup = pageGroup;
+ HeadPageGroup->nextAvailablePage = pageGroup;
} else {
- headPagePairs[aMode] = pagePair;
+ HeadPageGroup = pageGroup;
}
- return pagePair;
+ return pageGroup;
}
-static TrampolineBlockPagePair *
-_getOrAllocatePagePairWithNextAvailable(ArgumentMode aMode)
+static TrampolineBlockPageGroup *
+getOrAllocatePageGroupWithNextAvailable()
{
- _assert_locked();
+ runtimeLock.assertLocked();
- TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
-
- if (!headPagePair)
- return _allocateTrampolinesAndData(aMode);
+ if (!HeadPageGroup)
+ return _allocateTrampolinesAndData();
// make sure head page is filled first
- if (headPagePair->nextAvailable != headPagePair->endIndex())
- return headPagePair;
+ if (HeadPageGroup->nextAvailable != HeadPageGroup->endIndex())
+ return HeadPageGroup;
- if (headPagePair->nextAvailablePage) // check if there is a page w/a hole
- return headPagePair->nextAvailablePage;
+ if (HeadPageGroup->nextAvailablePage) // check if there is a page w/a hole
+ return HeadPageGroup->nextAvailablePage;
- return _allocateTrampolinesAndData(aMode); // tack on a new one
+ return _allocateTrampolinesAndData(); // tack on a new one
}
-static TrampolineBlockPagePair *
-_pageAndIndexContainingIMP(IMP anImp, uintptr_t *outIndex,
- TrampolineBlockPagePair **outHeadPagePair)
+static TrampolineBlockPageGroup *
+pageAndIndexContainingIMP(IMP anImp, uintptr_t *outIndex)
{
- _assert_locked();
+ runtimeLock.assertLocked();
- for (int arg = 0; arg < ArgumentModeCount; arg++) {
- for (TrampolineBlockPagePair *pagePair = headPagePairs[arg];
- pagePair;
- pagePair = pagePair->nextPagePair)
- {
- uintptr_t index = pagePair->indexForTrampoline(anImp);
- if (index) {
- if (outIndex) *outIndex = index;
- if (outHeadPagePair) *outHeadPagePair = headPagePairs[arg];
- return pagePair;
- }
+ // Authenticate as a function pointer, returning an un-signed address.
+ uintptr_t trampAddress =
+ (uintptr_t)ptrauth_auth_data((const char *)anImp,
+ ptrauth_key_function_pointer, 0);
+
+ for (TrampolineBlockPageGroup *pageGroup = HeadPageGroup;
+ pageGroup;
+ pageGroup = pageGroup->nextPageGroup)
+ {
+ uintptr_t index = pageGroup->indexForTrampoline(trampAddress);
+ if (index) {
+ if (outIndex) *outIndex = index;
+ return pageGroup;
}
}
static ArgumentMode
-_argumentModeForBlock(id block)
+argumentModeForBlock(id block)
{
ArgumentMode aMode = ReturnValueInRegisterArgumentMode;
IMP
_imp_implementationWithBlockNoCopy(id block)
{
- _assert_locked();
+ runtimeLock.assertLocked();
- ArgumentMode aMode = _argumentModeForBlock(block);
+ TrampolineBlockPageGroup *pageGroup =
+ getOrAllocatePageGroupWithNextAvailable();
- TrampolineBlockPagePair *pagePair =
- _getOrAllocatePagePairWithNextAvailable(aMode);
- if (!headPagePairs[aMode])
- headPagePairs[aMode] = pagePair;
-
- uintptr_t index = pagePair->nextAvailable;
- assert(index >= pagePair->startIndex() && index < pagePair->endIndex());
- TrampolineBlockPagePair::Payload *payload = pagePair->payload(index);
+ uintptr_t index = pageGroup->nextAvailable;
+ assert(index >= pageGroup->startIndex() && index < pageGroup->endIndex());
+ TrampolineBlockPageGroup::Payload *payload = pageGroup->payload(index);
uintptr_t nextAvailableIndex = payload->nextAvailable;
if (nextAvailableIndex == 0) {
// If the page is now full this will now be endIndex(), handled below.
nextAvailableIndex = index + 1;
}
- pagePair->nextAvailable = nextAvailableIndex;
- if (nextAvailableIndex == pagePair->endIndex()) {
- // PagePair is now full (free list or wilderness exhausted)
+ pageGroup->nextAvailable = nextAvailableIndex;
+ if (nextAvailableIndex == pageGroup->endIndex()) {
+ // PageGroup is now full (free list or wilderness exhausted)
// Remove from available page linked list
- TrampolineBlockPagePair *iterator = headPagePairs[aMode];
- while(iterator && (iterator->nextAvailablePage != pagePair)) {
+ TrampolineBlockPageGroup *iterator = HeadPageGroup;
+ while(iterator && (iterator->nextAvailablePage != pageGroup)) {
iterator = iterator->nextAvailablePage;
}
if (iterator) {
- iterator->nextAvailablePage = pagePair->nextAvailablePage;
- pagePair->nextAvailablePage = nil;
+ iterator->nextAvailablePage = pageGroup->nextAvailablePage;
+ pageGroup->nextAvailablePage = nil;
}
}
payload->block = block;
- return pagePair->trampoline(index);
+ return pageGroup->trampoline(argumentModeForBlock(block), index);
}
#pragma mark Public API
IMP imp_implementationWithBlock(id block)
{
+ // Block object must be copied outside runtimeLock
+ // because it performs arbitrary work.
block = Block_copy(block);
- _lock();
- IMP returnIMP = _imp_implementationWithBlockNoCopy(block);
- _unlock();
- return returnIMP;
+
+ // Trampolines must be initialized outside runtimeLock
+ // because it calls dlopen().
+ Trampolines.Initialize();
+
+ mutex_locker_t lock(runtimeLock);
+
+ return _imp_implementationWithBlockNoCopy(block);
}
id imp_getBlock(IMP anImp) {
uintptr_t index;
- TrampolineBlockPagePair *pagePair;
+ TrampolineBlockPageGroup *pageGroup;
if (!anImp) return nil;
- _lock();
+ mutex_locker_t lock(runtimeLock);
- pagePair = _pageAndIndexContainingIMP(anImp, &index, nil);
+ pageGroup = pageAndIndexContainingIMP(anImp, &index);
- if (!pagePair) {
- _unlock();
+ if (!pageGroup) {
return nil;
}
- TrampolineBlockPagePair::Payload *payload = pagePair->payload(index);
+ TrampolineBlockPageGroup::Payload *payload = pageGroup->payload(index);
- if (payload->nextAvailable <= TrampolineBlockPagePair::endIndex()) {
+ if (payload->nextAvailable <= TrampolineBlockPageGroup::endIndex()) {
// unallocated
- _unlock();
return nil;
}
- _unlock();
-
return payload->block;
}
BOOL imp_removeBlock(IMP anImp) {
- TrampolineBlockPagePair *pagePair;
- TrampolineBlockPagePair *headPagePair;
- uintptr_t index;
if (!anImp) return NO;
-
- _lock();
- pagePair = _pageAndIndexContainingIMP(anImp, &index, &headPagePair);
-
- if (!pagePair) {
- _unlock();
- return NO;
- }
- TrampolineBlockPagePair::Payload *payload = pagePair->payload(index);
- id block = payload->block;
- // block is released below
+ id block;
- payload->nextAvailable = pagePair->nextAvailable;
- pagePair->nextAvailable = index;
-
- // make sure this page is on available linked list
- TrampolineBlockPagePair *pagePairIterator = headPagePair;
-
- // see if page is the next available page for any existing pages
- while (pagePairIterator->nextAvailablePage &&
- pagePairIterator->nextAvailablePage != pagePair)
{
- pagePairIterator = pagePairIterator->nextAvailablePage;
- }
+ mutex_locker_t lock(runtimeLock);
- if (! pagePairIterator->nextAvailablePage) {
- // if iteration stopped because nextAvail was nil
- // add to end of list.
- pagePairIterator->nextAvailablePage = pagePair;
- pagePair->nextAvailablePage = nil;
+ uintptr_t index;
+ TrampolineBlockPageGroup *pageGroup =
+ pageAndIndexContainingIMP(anImp, &index);
+
+ if (!pageGroup) {
+ return NO;
+ }
+
+ TrampolineBlockPageGroup::Payload *payload = pageGroup->payload(index);
+ block = payload->block;
+ // block is released below, outside the lock
+
+ payload->nextAvailable = pageGroup->nextAvailable;
+ pageGroup->nextAvailable = index;
+
+ // make sure this page is on available linked list
+ TrampolineBlockPageGroup *pageGroupIterator = HeadPageGroup;
+
+ // see if page is the next available page for any existing pages
+ while (pageGroupIterator->nextAvailablePage &&
+ pageGroupIterator->nextAvailablePage != pageGroup)
+ {
+ pageGroupIterator = pageGroupIterator->nextAvailablePage;
+ }
+
+ if (! pageGroupIterator->nextAvailablePage) {
+ // if iteration stopped because nextAvail was nil
+ // add to end of list.
+ pageGroupIterator->nextAvailablePage = pageGroup;
+ pageGroup->nextAvailablePage = nil;
+ }
}
-
- _unlock();
+
+ // do this AFTER dropping the lock
Block_release(block);
return YES;
}
--- /dev/null
+#if __arm__
+
+#include <arm/arch.h>
+#include <mach/vm_param.h>
+
+.syntax unified
+
+.text
+.globl __objc_blockTrampolineImpl
+.globl __objc_blockTrampolineStart
+.globl __objc_blockTrampolineLast
+
+// Trampoline machinery assumes the trampolines are Thumb function pointers
+#if !__thumb2__
+# error sorry
+#endif
+
+.thumb
+
+// Exported symbols are not marked as functions.
+// The trampoline construction code assumes that the Thumb bit is not set.
+.thumb_func L__objc_blockTrampolineImpl_func
+
+.align PAGE_MAX_SHIFT
+__objc_blockTrampolineImpl:
+L__objc_blockTrampolineImpl_func:
+ /*
+ r0 == self
+ r12 == pc of trampoline's first instruction + PC bias
+ lr == original return address
+ */
+
+ mov r1, r0 // _cmd = self
+
+ // Trampoline's data is one page before the trampoline text.
+ // Also correct PC bias of 4 bytes.
+ sub r12, # 2*PAGE_MAX_SIZE
+ ldr r0, [r12, #-4] // self = block object
+ ldr pc, [r0, #12] // tail call block->invoke
+ // not reached
+
+ // Align trampolines to 8 bytes
+.align 3
+
+.macro TrampolineEntry
+ mov r12, pc
+ b L__objc_blockTrampolineImpl_func
+.align 3
+.endmacro
+
+.macro TrampolineEntryX16
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+.endmacro
+
+.macro TrampolineEntryX256
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+.endmacro
+
+__objc_blockTrampolineStart:
+ // 2048-2 trampolines to fill 16K page
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+__objc_blockTrampolineLast:
+ TrampolineEntry
+ // TrampolineEntry
+ // TrampolineEntry
+
+
+
+.text
+.globl __objc_blockTrampolineImpl_stret
+.globl __objc_blockTrampolineStart_stret
+.globl __objc_blockTrampolineLast_stret
+
+// Trampoline machinery assumes the trampolines are Thumb function pointers
+#if !__thumb2__
+# error sorry
+#endif
+
+.thumb
+
+// Exported symbols are not marked as functions.
+// The trampoline construction code assumes that the Thumb bit is not set.
+.thumb_func L__objc_blockTrampolineImpl_stret_func
+
+.align PAGE_MAX_SHIFT
+__objc_blockTrampolineImpl_stret:
+L__objc_blockTrampolineImpl_stret_func:
+ /*
+ r1 == self
+ r12 == pc of trampoline's first instruction + PC bias
+ lr == original return address
+ */
+
+ mov r2, r1 // _cmd = self
+
+ // Trampoline's data is one page before the trampoline text.
+ // Also correct PC bias of 4 bytes.
+ sub r12, # 3*PAGE_MAX_SIZE
+ ldr r1, [r12, #-4] // self = block object
+ ldr pc, [r1, #12] // tail call block->invoke
+ // not reached
+
+ // Align trampolines to 8 bytes
+.align 3
+
+.macro TrampolineEntry_stret
+ mov r12, pc
+ b L__objc_blockTrampolineImpl_stret_func
+.align 3
+.endmacro
+
+.macro TrampolineEntryX16_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+.endmacro
+
+.macro TrampolineEntryX256_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+.endmacro
+
+__objc_blockTrampolineStart_stret:
+ // 2048-2 trampolines to fill 16K page
+ TrampolineEntryX256_stret
+ TrampolineEntryX256_stret
+ TrampolineEntryX256_stret
+ TrampolineEntryX256_stret
+
+ TrampolineEntryX256_stret
+ TrampolineEntryX256_stret
+ TrampolineEntryX256_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+ TrampolineEntryX16_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+
+ TrampolineEntry_stret
+__objc_blockTrampolineLast_stret:
+ TrampolineEntry_stret
+ // TrampolineEntry_stret
+ // TrampolineEntry_stret
+
+#endif
--- /dev/null
+#if __arm64__
+
+#include <mach/vm_param.h>
+#include "arm64-asm.h"
+
+// Offset of block->invoke field.
+#if __LP64__
+ // true arm64
+# define BLOCK_INVOKE 16
+#else
+ // arm64_32
+# define BLOCK_INVOKE 12
+#endif
+
+.text
+.globl __objc_blockTrampolineImpl
+.globl __objc_blockTrampolineStart
+.globl __objc_blockTrampolineLast
+
+.align PAGE_MAX_SHIFT
+__objc_blockTrampolineImpl:
+L_objc_blockTrampolineImpl:
+ /*
+ x0 == self
+ x17 == address of called trampoline's data (2 pages before its code)
+ lr == original return address
+ */
+
+ mov x1, x0 // _cmd = self
+ ldr p0, [x17] // self = block object
+ add p15, p0, #BLOCK_INVOKE // x15 = &block->invoke
+ ldr p16, [x15] // x16 = block->invoke
+ TailCallBlockInvoke x16, x15
+
+ // pad up to TrampolineBlockPagePair header size
+ nop
+
+.macro TrampolineEntry
+ // load address of trampoline data (two pages before this instruction)
+ adr x17, -2*PAGE_MAX_SIZE
+ b L_objc_blockTrampolineImpl
+.endmacro
+
+.macro TrampolineEntryX16
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+.endmacro
+
+.macro TrampolineEntryX256
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+.endmacro
+
+.align 3
+__objc_blockTrampolineStart:
+ // 2048-3 trampolines to fill 16K page
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+
+ TrampolineEntryX256
+ TrampolineEntryX256
+ TrampolineEntryX256
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntryX16
+ TrampolineEntryX16
+ TrampolineEntryX16
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+
+__objc_blockTrampolineLast:
+ TrampolineEntry
+ // TrampolineEntry
+ // TrampolineEntry
+ // TrampolineEntry
+
+#endif
--- /dev/null
+/*
+ * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
+ *
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+
+#ifdef __i386__
+
+#include <mach/vm_param.h>
+
+.text
+.globl __objc_blockTrampolineImpl
+.globl __objc_blockTrampolineStart
+.globl __objc_blockTrampolineLast
+
+.align PAGE_SHIFT
+__objc_blockTrampolineImpl:
+ popl %eax
+ andl $0xFFFFFFF8, %eax
+ subl $ 2*PAGE_SIZE, %eax
+ movl 4(%esp), %ecx // self -> ecx
+ movl %ecx, 8(%esp) // ecx -> _cmd
+ movl (%eax), %ecx // blockPtr -> ecx
+ movl %ecx, 4(%esp) // ecx -> self
+ jmp *12(%ecx) // tail to block->invoke
+
+.macro TrampolineEntry
+ call __objc_blockTrampolineImpl
+ nop
+ nop
+ nop
+.endmacro
+
+.align 5
+__objc_blockTrampolineStart:
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+__objc_blockTrampolineLast:
+ TrampolineEntry
+
+
+.text
+.globl __objc_blockTrampolineImpl_stret
+.globl __objc_blockTrampolineStart_stret
+.globl __objc_blockTrampolineLast_stret
+
+.align PAGE_SHIFT
+__objc_blockTrampolineImpl_stret:
+ popl %eax
+ andl $0xFFFFFFF8, %eax
+ subl $ 3*PAGE_SIZE, %eax
+ movl 8(%esp), %ecx // self -> ecx
+ movl %ecx, 12(%esp) // ecx -> _cmd
+ movl (%eax), %ecx // blockPtr -> ecx
+ movl %ecx, 8(%esp) // ecx -> self
+ jmp *12(%ecx) // tail to block->invoke
+
+.macro TrampolineEntry_stret
+ call __objc_blockTrampolineImpl_stret
+ nop
+ nop
+ nop
+.endmacro
+
+.align 5
+__objc_blockTrampolineStart_stret:
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+__objc_blockTrampolineLast_stret:
+ TrampolineEntry_stret
+
+#endif
--- /dev/null
+/*
+ * Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
+ *
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+
+#ifdef __x86_64__
+
+#include <mach/vm_param.h>
+
+.text
+.globl __objc_blockTrampolineImpl
+.globl __objc_blockTrampolineStart
+.globl __objc_blockTrampolineLast
+
+.align PAGE_SHIFT
+__objc_blockTrampolineImpl:
+ popq %r10
+ andq $0xFFFFFFFFFFFFFFF8, %r10
+ subq $ 2*PAGE_SIZE, %r10
+ movq %rdi, %rsi // arg1 -> arg2
+ movq (%r10), %rdi // block -> arg1
+ jmp *16(%rdi)
+
+.macro TrampolineEntry
+ callq __objc_blockTrampolineImpl
+ nop
+ nop
+ nop
+.endmacro
+
+.align 5
+__objc_blockTrampolineStart:
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+ TrampolineEntry
+__objc_blockTrampolineLast:
+ TrampolineEntry
+
+
+.text
+.globl __objc_blockTrampolineImpl_stret
+.globl __objc_blockTrampolineStart_stret
+.globl __objc_blockTrampolineLast_stret
+
+.align PAGE_SHIFT
+__objc_blockTrampolineImpl_stret:
+ popq %r10
+ andq $0xFFFFFFFFFFFFFFF8, %r10
+ subq $ 3*PAGE_SIZE, %r10
+ // %rdi -- first arg -- is address of return value's space. Don't mess with it.
+ movq %rsi, %rdx // arg2 -> arg3
+ movq (%r10), %rsi // block -> arg2
+ jmp *16(%rsi)
+
+.macro TrampolineEntry_stret
+ callq __objc_blockTrampolineImpl_stret
+ nop
+ nop
+ nop
+.endmacro
+
+.align 5
+__objc_blockTrampolineStart_stret:
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+ TrampolineEntry_stret
+__objc_blockTrampolineLast_stret:
+ TrampolineEntry_stret
+
+#endif
* reading function is in progress because it might still be using
* the garbage memory.
**********************************************************************/
-OBJC_EXPORT uintptr_t objc_entryPoints[];
-OBJC_EXPORT uintptr_t objc_exitPoints[];
+extern "C" uintptr_t objc_entryPoints[];
+extern "C" uintptr_t objc_exitPoints[];
static int _collecting_in_critical(void)
{
#if __arm64__
+// Pointer-size register prefix for inline asm
+# if __LP64__
+# define p "x" // true arm64
+# else
+# define p "w" // arm64_32
+# endif
+
// Use atomic double-word instructions to update cache entries.
// This requires cache buckets not cross cache line boundaries.
-#define stp(onep, twop, destp) \
- __asm__ ("stp %[one], %[two], [%[dest]]" \
- : "=m" (((uint64_t *)(destp))[0]), \
- "=m" (((uint64_t *)(destp))[1]) \
- : [one] "r" (onep), \
- [two] "r" (twop), \
- [dest] "r" (destp) \
- : /* no clobbers */ \
- )
-#define ldp(onep, twop, srcp) \
- __asm__ ("ldp %[one], %[two], [%[src]]" \
- : [one] "=r" (onep), \
- [two] "=r" (twop) \
- : "m" (((uint64_t *)(srcp))[0]), \
- "m" (((uint64_t *)(srcp))[1]), \
- [src] "r" (srcp) \
- : /* no clobbers */ \
- )
+static ALWAYS_INLINE void
+stp(uintptr_t onep, uintptr_t twop, void *destp)
+{
+ __asm__ ("stp %" p "[one], %" p "[two], [%x[dest]]"
+ : "=m" (((uintptr_t *)(destp))[0]),
+ "=m" (((uintptr_t *)(destp))[1])
+ : [one] "r" (onep),
+ [two] "r" (twop),
+ [dest] "r" (destp)
+ : /* no clobbers */
+ );
+}
+
+static ALWAYS_INLINE void __unused
+ldp(uintptr_t& onep, uintptr_t& twop, const void *srcp)
+{
+ __asm__ ("ldp %" p "[one], %" p "[two], [%x[src]]"
+ : [one] "=r" (onep),
+ [two] "=r" (twop)
+ : "m" (((const uintptr_t *)(srcp))[0]),
+ "m" (((const uintptr_t *)(srcp))[1]),
+ [src] "r" (srcp)
+ : /* no clobbers */
+ );
+}
+#undef p
#endif
{
assert(_key == 0 || _key == newKey);
- // LDP/STP guarantees that all observers get
- // either key/imp or newKey/newImp
- stp(newKey, newImp, this);
+ static_assert(offsetof(bucket_t,_imp) == 0 && offsetof(bucket_t,_key) == sizeof(void *),
+ "bucket_t doesn't match arm64 bucket_t::set()");
+
+#if __has_feature(ptrauth_calls)
+ // Authenticate as a C function pointer and re-sign for the cache bucket.
+ uintptr_t signedImp = _imp.prepareWrite(newImp);
+#else
+ // No function pointer signing.
+ uintptr_t signedImp = (uintptr_t)newImp;
+#endif
+
+ // Write to the bucket.
+ // LDP/STP guarantees that all observers get
+ // either imp/key or newImp/newKey
+ stp(signedImp, newKey, this);
}
#else
arm_thread_state64_t state;
unsigned int count = ARM_THREAD_STATE64_COUNT;
kern_return_t okay = thread_get_state (thread, ARM_THREAD_STATE64, (thread_state_t)&state, &count);
- return (okay == KERN_SUCCESS) ? state.__pc : PC_SENTINEL;
+ return (okay == KERN_SUCCESS) ? arm_thread_state64_get_pc(state) : PC_SENTINEL;
}
#else
{
* reading function is in progress because it might still be using
* the garbage memory.
**********************************************************************/
-OBJC_EXPORT uintptr_t objc_entryPoints[];
-OBJC_EXPORT uintptr_t objc_exitPoints[];
+extern "C" uintptr_t objc_entryPoints[];
+extern "C" uintptr_t objc_exitPoints[];
static int _collecting_in_critical(void)
{
// Mach-O bundles are fixed up in place.
// This prevents leaks when a bundle is unloaded.
}
- sel_lock();
+ mutex_locker_t lock(selLock);
for ( i = 0; i < mlist->method_count; i += 1 ) {
method = &mlist->method_list[i];
method->method_name =
sel_registerNameNoLock((const char *)method->method_name, isBundle); // Always copy selector data from bundles.
}
- sel_unlock();
mlist->obsolete = fixed_up_method_list;
}
return mlist;
cls->ivar_layout = ustrdupMaybeNil(layout);
}
-// SPI: Instance-specific object layout.
-
-void _class_setIvarLayoutAccessor(Class cls, const uint8_t* (*accessor) (id object)) {
- if (!cls) return;
-
- if (! (cls->info & CLS_EXT)) {
- _objc_inform("class '%s' needs to be recompiled", cls->name);
- return;
- }
-
- // fixme leak
- cls->ivar_layout = (const uint8_t *)accessor;
- cls->setInfo(CLS_HAS_INSTANCE_SPECIFIC_LAYOUT);
-}
-
-const uint8_t *_object_getIvarLayout(Class cls, id object) {
- if (cls && (cls->info & CLS_EXT)) {
- const uint8_t* layout = cls->ivar_layout;
- if (cls->info & CLS_HAS_INSTANCE_SPECIFIC_LAYOUT) {
- const uint8_t* (*accessor) (id object) = (const uint8_t* (*)(id))layout;
- layout = accessor(object);
- }
- return layout;
- } else {
- return nil;
- }
-}
/***********************************************************************
* class_setWeakIvarLayout
* If methods are removed between calls to class_nextMethodList(), it may
* omit surviving method lists or simply crash.
**********************************************************************/
-OBJC_EXPORT struct objc_method_list *class_nextMethodList(Class cls, void **it)
+struct objc_method_list *class_nextMethodList(Class cls, void **it)
{
OBJC_WARN_DEPRECATED;
*
* Formerly class_addInstanceMethods ()
**********************************************************************/
-OBJC_EXPORT void class_addMethods(Class cls, struct objc_method_list *meths)
+void class_addMethods(Class cls, struct objc_method_list *meths)
{
OBJC_WARN_DEPRECATED;
/***********************************************************************
* class_removeMethods.
**********************************************************************/
-OBJC_EXPORT void class_removeMethods(Class cls, struct objc_method_list *meths)
+void class_removeMethods(Class cls, struct objc_method_list *meths)
{
OBJC_WARN_DEPRECATED;
return encoding_getSizeOfArguments(method_getTypeEncoding(m));
}
+// This function was accidentally un-exported beginning in macOS 10.9.
+// As of macOS 10.13 nobody had complained.
+/*
unsigned int method_getArgumentInfo(Method m, int arg,
const char **type, int *offset)
{
return encoding_getArgumentInfo(method_getTypeEncoding(m),
arg, type, offset);
}
+*/
spinlock_t impLock;
#include "objc-abi.h"
#include <objc/message.h>
-
-/* overriding the default object allocation and error handling routines */
-
-OBJC_EXPORT id (*_alloc)(Class, size_t);
-OBJC_EXPORT id (*_copy)(id, size_t);
-OBJC_EXPORT id (*_realloc)(id, size_t);
-OBJC_EXPORT id (*_dealloc)(id);
-OBJC_EXPORT id (*_zoneAlloc)(Class, size_t, void *);
-OBJC_EXPORT id (*_zoneRealloc)(id, size_t, void *);
-OBJC_EXPORT id (*_zoneCopy)(id, size_t, void *);
-
-
/***********************************************************************
* Information about multi-thread support:
*
#include <TargetConditionals.h>
+// Define __OBJC2__ for the benefit of our asm files.
+#ifndef __OBJC2__
+# if TARGET_OS_OSX && !TARGET_OS_IOSMAC && __i386__
+ // old ABI
+# else
+# define __OBJC2__ 1
+# endif
+#endif
+
// Avoid the !NDEBUG double negative.
#if !NDEBUG
# define DEBUG 1
#endif
// Define SUPPORT_ZONES=1 to enable malloc zone support in NXHashTable.
-#if !TARGET_OS_OSX
+#if !(TARGET_OS_OSX || TARGET_OS_IOSMAC)
# define SUPPORT_ZONES 0
#else
# define SUPPORT_ZONES 1
// Define SUPPORT_MSB_TAGGED_POINTERS to use the MSB
// as the tagged pointer marker instead of the LSB.
// Be sure to edit tagged pointer SPI in objc-internal.h as well.
-#if !SUPPORT_TAGGED_POINTERS || !TARGET_OS_IPHONE
+#if !SUPPORT_TAGGED_POINTERS || (TARGET_OS_OSX || TARGET_OS_IOSMAC)
# define SUPPORT_MSB_TAGGED_POINTERS 0
#else
# define SUPPORT_MSB_TAGGED_POINTERS 1
// field as an index into a class table.
// Note, keep this in sync with any .s files which also define it.
// Be sure to edit objc-abi.h as well.
-#if __ARM_ARCH_7K__ >= 2
+#if __ARM_ARCH_7K__ >= 2 || (__arm64__ && !__LP64__)
# define SUPPORT_INDEXED_ISA 1
#else
# define SUPPORT_INDEXED_ISA 0
// Define SUPPORT_PACKED_ISA=1 on platforms that store the class in the isa
// field as a maskable pointer with other data around it.
-#if (!__LP64__ || TARGET_OS_WIN32 || TARGET_OS_SIMULATOR)
+#if (!__LP64__ || TARGET_OS_WIN32 || \
+ (TARGET_OS_SIMULATOR && !TARGET_OS_IOSMAC))
# define SUPPORT_PACKED_ISA 0
#else
# define SUPPORT_PACKED_ISA 1
// Define SUPPORT_ALT_HANDLERS if you're using zero-cost exceptions
// but also need to support AppKit's alt-handler scheme
// Be sure to edit objc-exception.h as well (objc_add/removeExceptionHandler)
-#if !SUPPORT_ZEROCOST_EXCEPTIONS || TARGET_OS_IPHONE || TARGET_OS_EMBEDDED
+#if !SUPPORT_ZEROCOST_EXCEPTIONS || !TARGET_OS_OSX
# define SUPPORT_ALT_HANDLERS 0
#else
# define SUPPORT_ALT_HANDLERS 1
#endif
// Define SUPPORT_MESSAGE_LOGGING to enable NSObjCMessageLoggingEnabled
-#if TARGET_OS_WIN32 || TARGET_OS_EMBEDDED
+#if !TARGET_OS_OSX
# define SUPPORT_MESSAGE_LOGGING 0
#else
# define SUPPORT_MESSAGE_LOGGING 1
#endif
-// Define SUPPORT_QOS_HACK to work around deadlocks due to QoS bugs.
-#if !__OBJC2__ || TARGET_OS_WIN32
-# define SUPPORT_QOS_HACK 0
-#else
-# define SUPPORT_QOS_HACK 1
-#endif
-
// OBJC_INSTRUMENTED controls whether message dispatching is dynamically
// monitored. Monitoring introduces substantial overhead.
// NOTE: To define this condition, do so in the build command, NOT by
OPTION( DisableVtables, OBJC_DISABLE_VTABLES, "disable vtable dispatch")
OPTION( DisablePreopt, OBJC_DISABLE_PREOPTIMIZATION, "disable preoptimization courtesy of dyld shared cache")
OPTION( DisableTaggedPointers, OBJC_DISABLE_TAGGED_POINTERS, "disable tagged pointer optimization of NSNumber et al.")
+OPTION( DisableTaggedPointerObfuscation, OBJC_DISABLE_TAG_OBFUSCATION, "disable obfuscation of tagged pointers")
OPTION( DisableNonpointerIsa, OBJC_DISABLE_NONPOINTER_ISA, "disable non-pointer isa fields")
OPTION( DisableInitializeForkSafety, OBJC_DISABLE_INITIALIZE_FORK_SAFETY, "disable safety checks for +initialize after fork")
#include <_simple.h>
-OBJC_EXPORT void (*_error)(id, const char *, va_list);
-
// Return true if c is a UTF8 continuation byte
static bool isUTF8Continuation(char c)
{
**********************************************************************/
#include "objc-private.h"
+#include <objc/objc-abi.h>
#include <objc/objc-exception.h>
#include <objc/NSObject.h>
#include <execinfo.h>
struct objc_typeinfo {
// Position of vtable and name fields must match C++ typeinfo object
- const void **vtable; // always objc_ehtype_vtable+2
+ const void ** __ptrauth_cxx_vtable_pointer vtable; // objc_ehtype_vtable+2
const char *name; // c++ typeinfo string
Class cls_unremapped;
};
-static void _objc_exception_noop(void) { }
-static bool _objc_exception_false(void) { return 0; }
-// static bool _objc_exception_true(void) { return 1; }
-static void _objc_exception_abort1(void) {
+extern "C" void _objc_exception_noop(void) { }
+extern "C" bool _objc_exception_false(void) { return 0; }
+// extern "C" bool _objc_exception_true(void) { return 1; }
+extern "C" void _objc_exception_abort1(void) {
_objc_fatal("unexpected call into objc exception typeinfo vtable %d", 1);
}
-static void _objc_exception_abort2(void) {
+extern "C" void _objc_exception_abort2(void) {
_objc_fatal("unexpected call into objc exception typeinfo vtable %d", 2);
}
-static void _objc_exception_abort3(void) {
+extern "C" void _objc_exception_abort3(void) {
_objc_fatal("unexpected call into objc exception typeinfo vtable %d", 3);
}
-static void _objc_exception_abort4(void) {
+extern "C" void _objc_exception_abort4(void) {
_objc_fatal("unexpected call into objc exception typeinfo vtable %d", 4);
}
-static bool _objc_exception_do_catch(struct objc_typeinfo *catch_tinfo,
- struct objc_typeinfo *throw_tinfo,
- void **throw_obj_p,
- unsigned outer);
+extern "C" bool _objc_exception_do_catch(struct objc_typeinfo *catch_tinfo,
+ struct objc_typeinfo *throw_tinfo,
+ void **throw_obj_p,
+ unsigned outer);
+
+// C++ pointers to vtables are signed with no extra data.
+// C++ vtable entries are signed with a number derived from the function name.
+// For this fake vtable, we hardcode number as deciphered from the
+// assembly output during libc++abi's build.
+#if __has_feature(ptrauth_calls)
+# define VTABLE_PTR_AUTH "@AUTH(da, 0)"
+# define VTABLE_ENTRY_AUTH(x) "@AUTH(ia," #x ",addr)"
+#else
+# define VTABLE_PTR_AUTH ""
+# define VTABLE_ENTRY_AUTH(x) ""
+#endif
-// forward declaration
-OBJC_EXPORT struct objc_typeinfo OBJC_EHTYPE_id;
-
-OBJC_EXPORT
-const void *objc_ehtype_vtable[] = {
- nil, // typeinfo's vtable? - fixme
- (void*)&OBJC_EHTYPE_id, // typeinfo's typeinfo - hack
- (void*)_objc_exception_noop, // in-place destructor?
- (void*)_objc_exception_noop, // destructor?
- (void*)_objc_exception_false, // OLD __is_pointer_p
- (void*)_objc_exception_false, // OLD __is_function_p
- (void*)_objc_exception_do_catch, // OLD __do_catch, NEW can_catch
- (void*)_objc_exception_false, // OLD __do_upcast, NEW search_above_dst
- (void*)_objc_exception_false, // NEW search_below_dst
- (void*)_objc_exception_abort1, // paranoia: blow up if libc++abi
- (void*)_objc_exception_abort2, // adds something new
- (void*)_objc_exception_abort3,
- (void*)_objc_exception_abort4,
-};
+#if __LP64__
+# define PTR ".quad "
+# define TWOPTRSIZE "16"
+#else
+# define PTR ".long "
+# define TWOPTRSIZE "8"
+#endif
-OBJC_EXPORT
-struct objc_typeinfo OBJC_EHTYPE_id = {
- objc_ehtype_vtable+2,
- "id",
- nil
-};
+// Hand-built vtable for objc exception typeinfo.
+// "OLD" is GNU libcpp, "NEW" is libc++abi.
+
+asm(
+ "\n .cstring"
+ "\n l_.id_str: .asciz \"id\""
+
+ "\n .section __DATA,__const"
+ "\n .globl _OBJC_EHTYPE_id"
+ "\n .globl _objc_ehtype_vtable"
+ "\n .p2align 4"
+
+ "\n _OBJC_EHTYPE_id:"
+ "\n " PTR "(_objc_ehtype_vtable+" TWOPTRSIZE ") " VTABLE_PTR_AUTH
+ "\n " PTR "l_.id_str"
+ "\n " PTR "0"
+
+ "\n _objc_ehtype_vtable:"
+ "\n " PTR "0"
+ // typeinfo's typeinfo - fixme hack
+ "\n " PTR "_OBJC_EHTYPE_id"
+ // destructor and in-place destructor
+ "\n " PTR "__objc_exception_noop" VTABLE_ENTRY_AUTH(52634)
+ "\n " PTR "__objc_exception_noop" VTABLE_ENTRY_AUTH(10344)
+ // OLD __is_pointer_p
+ "\n " PTR "__objc_exception_noop" VTABLE_ENTRY_AUTH(6889)
+ // OLD __is_function_p
+ "\n " PTR "__objc_exception_noop" VTABLE_ENTRY_AUTH(23080)
+ // OLD __do_catch, NEW can_catch
+ "\n " PTR "__objc_exception_do_catch" VTABLE_ENTRY_AUTH(27434)
+ // OLD __do_upcast, NEW search_above_dst
+ "\n " PTR "__objc_exception_false" VTABLE_ENTRY_AUTH(48481)
+ // NEW search_below_dst
+ "\n " PTR "__objc_exception_false" VTABLE_ENTRY_AUTH(41165)
+ // NEW has_unambiguous_public_base (fixme need this?)
+ "\n " PTR "__objc_exception_abort1" VTABLE_ENTRY_AUTH(14357)
+ // paranoia: die if libcxxabi adds anything else
+ "\n " PTR "__objc_exception_abort2"
+ "\n " PTR "__objc_exception_abort3"
+ "\n " PTR "__objc_exception_abort4"
+ );
// `outer` is not passed by the new libcxxabi
-static bool _objc_exception_do_catch(struct objc_typeinfo *catch_tinfo,
- struct objc_typeinfo *throw_tinfo,
- void **throw_obj_p,
- unsigned outer UNAVAILABLE_ATTRIBUTE)
+bool _objc_exception_do_catch(struct objc_typeinfo *catch_tinfo,
+ struct objc_typeinfo *throw_tinfo,
+ void **throw_obj_p,
+ unsigned outer UNAVAILABLE_ATTRIBUTE)
{
id exception;
extern Class *_getObjcClassRefs(const header_info *hi, size_t *nclasses);
extern const char *_getObjcClassNames(const header_info *hi, size_t *size);
-using Initializer = void(*)(void);
-extern Initializer* getLibobjcInitializers(const headerType *mhdr, size_t *count);
+using UnsignedInitializer = void(*)(void);
+extern UnsignedInitializer* getLibobjcInitializers(const headerType *mhdr, size_t *count);
__END_DECLS
GETSECT(_getObjcClassRefs, Class, "__OBJC", "__cls_refs");
GETSECT(_getObjcClassNames, const char, "__OBJC", "__class_names");
// __OBJC,__class_names section only emitted by CodeWarrior rdar://4951638
-GETSECT(getLibobjcInitializers, Initializer, "__DATA", "__objc_init_func");
+GETSECT(getLibobjcInitializers, UnsignedInitializer, "__DATA", "__objc_init_func");
objc_image_info *
extern category_t **_getObjc2NonlazyCategoryList(const header_info *hi, size_t *count);
extern protocol_t **_getObjc2ProtocolList(const header_info *hi, size_t *count);
extern protocol_t **_getObjc2ProtocolRefs(const header_info *hi, size_t *count);
-using Initializer = void(*)(void);
-extern Initializer* getLibobjcInitializers(const header_info *hi, size_t *count);
+
+// FIXME: rdar://29241917&33734254 clang doesn't sign static initializers.
+struct UnsignedInitializer {
+private:
+ uintptr_t storage;
+public:
+ void operator () () const {
+ using Initializer = void(*)();
+ Initializer init =
+ ptrauth_sign_unauthenticated((Initializer)storage,
+ ptrauth_key_function_pointer, 0);
+ init();
+ }
+};
+
+extern UnsignedInitializer *getLibobjcInitializers(const header_info *hi, size_t *count);
extern classref_t *_getObjc2NonlazyClassList(const headerType *mhdr, size_t *count);
extern category_t **_getObjc2NonlazyCategoryList(const headerType *mhdr, size_t *count);
-extern Initializer* getLibobjcInitializers(const headerType *mhdr, size_t *count);
+extern UnsignedInitializer *getLibobjcInitializers(const headerType *mhdr, size_t *count);
+
+static inline void
+foreach_data_segment(const headerType *mhdr,
+ std::function<void(const segmentType *, intptr_t slide)> code)
+{
+ intptr_t slide = 0;
+
+ // compute VM slide
+ const segmentType *seg = (const segmentType *) (mhdr + 1);
+ for (unsigned long i = 0; i < mhdr->ncmds; i++) {
+ if (seg->cmd == SEGMENT_CMD &&
+ segnameEquals(seg->segname, "__TEXT"))
+ {
+ slide = (char *)mhdr - (char *)seg->vmaddr;
+ break;
+ }
+ seg = (const segmentType *)((char *)seg + seg->cmdsize);
+ }
+
+ // enumerate __DATA* segments
+ seg = (const segmentType *) (mhdr + 1);
+ for (unsigned long i = 0; i < mhdr->ncmds; i++) {
+ if (seg->cmd == SEGMENT_CMD &&
+ segnameStartsWith(seg->segname, "__DATA"))
+ {
+ code(seg, slide);
+ }
+ seg = (const segmentType *)((char *)seg + seg->cmdsize);
+ }
+}
#endif
GETSECT(_getObjc2NonlazyCategoryList, category_t *, "__objc_nlcatlist");
GETSECT(_getObjc2ProtocolList, protocol_t *, "__objc_protolist");
GETSECT(_getObjc2ProtocolRefs, protocol_t *, "__objc_protorefs");
-GETSECT(getLibobjcInitializers, Initializer, "__objc_init_func");
+GETSECT(getLibobjcInitializers, UnsignedInitializer, "__objc_init_func");
objc_image_info *
outBytes, nil);
}
-
-static const segmentType *
-getsegbynamefromheader(const headerType *mhdr, const char *segname)
-{
- const segmentType *seg = (const segmentType *) (mhdr + 1);
- for (unsigned long i = 0; i < mhdr->ncmds; i++){
- if (seg->cmd == SEGMENT_CMD && segnameEquals(seg->segname, segname)) {
- return seg;
- }
- seg = (const segmentType *)((char *)seg + seg->cmdsize);
- }
- return nil;
-}
-
// Look for an __objc* section other than __objc_imageinfo
static bool segmentHasObjcContents(const segmentType *seg)
{
- if (seg) {
- for (uint32_t i = 0; i < seg->nsects; i++) {
- const sectionType *sect = ((const sectionType *)(seg+1))+i;
- if (sectnameStartsWith(sect->sectname, "__objc_") &&
- !sectnameEquals(sect->sectname, "__objc_imageinfo"))
- {
- return true;
- }
+ for (uint32_t i = 0; i < seg->nsects; i++) {
+ const sectionType *sect = ((const sectionType *)(seg+1))+i;
+ if (sectnameStartsWith(sect->sectname, "__objc_") &&
+ !sectnameEquals(sect->sectname, "__objc_imageinfo"))
+ {
+ return true;
}
}
bool
_hasObjcContents(const header_info *hi)
{
- const segmentType *data =
- getsegbynamefromheader(hi->mhdr(), "__DATA");
- const segmentType *data_const =
- getsegbynamefromheader(hi->mhdr(), "__DATA_CONST");
- const segmentType *data_dirty =
- getsegbynamefromheader(hi->mhdr(), "__DATA_DIRTY");
+ bool foundObjC = false;
+
+ foreach_data_segment(hi->mhdr(), [&](const segmentType *seg, intptr_t slide)
+ {
+ if (segmentHasObjcContents(seg)) foundObjC = true;
+ });
+
+ return foundObjC;
- return segmentHasObjcContents(data)
- || segmentHasObjcContents(data_const)
- || segmentHasObjcContents(data_dirty);
}
#ifdef __APPLE_API_PRIVATE
-#define _OBJC_PRIVATE_H_
+#ifndef _OBJC_PRIVATE_H_
+# define _OBJC_PRIVATE_H_
+#endif
#include <stdint.h>
#include <objc/hashtable.h>
#include <objc/maptable.h>
OBJC_EXPORT uintptr_t objc_debug_taggedpointer_mask
OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0);
+// tagged pointers are obfuscated by XORing with a random value
+// decoded_obj = (obj ^ obfuscator)
+OBJC_EXPORT uintptr_t objc_debug_taggedpointer_obfuscator
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
+
// tag_slot = (obj >> slot_shift) & slot_mask
OBJC_EXPORT unsigned int objc_debug_taggedpointer_slot_shift
OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0);
OBJC_EXPORT Class _Nullable objc_debug_taggedpointer_classes[]
OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0);
-// payload = (obj << payload_lshift) >> payload_rshift
+// payload = (decoded_obj << payload_lshift) >> payload_rshift
// Payload signedness is determined by the signedness of the right-shift.
OBJC_EXPORT unsigned int objc_debug_taggedpointer_payload_lshift
OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0);
// tagged pointer scheme alone, it will appear to have an isa
// that is either nil or class __NSUnrecognizedTaggedPointer.
-// if (ext_mask != 0 && (obj & ext_mask) == ext_mask)
+// if (ext_mask != 0 && (decoded_obj & ext_mask) == ext_mask)
// obj is a ext tagged pointer object
OBJC_EXPORT uintptr_t objc_debug_taggedpointer_ext_mask
OBJC_AVAILABLE(10.12, 10.0, 10.0, 3.0, 2.0);
OBJC_EXPORT Class _Nullable objc_debug_taggedpointer_ext_classes[]
OBJC_AVAILABLE(10.12, 10.0, 10.0, 3.0, 2.0);
-// payload = (obj << ext_payload_lshift) >> ext_payload_rshift
+// payload = (decoded_obj << ext_payload_lshift) >> ext_payload_rshift
// Payload signedness is determined by the signedness of the right-shift.
OBJC_EXPORT unsigned int objc_debug_taggedpointer_ext_payload_lshift
OBJC_AVAILABLE(10.12, 10.0, 10.0, 3.0, 2.0);
#endif
-
-/***********************************************************************
-* Breakpoints in objc_msgSend for debugger stepping.
-* The array is a {0,0} terminated list of addresses.
-* Each address is one of the following:
-* OBJC_MESSENGER_START: Address is the start of a messenger function.
-* OBJC_MESSENGER_END_FAST: Address is a jump insn that calls an IMP.
-* OBJC_MESSENGER_END_SLOW: Address is some insn in the slow lookup path.
-* OBJC_MESSENGER_END_NIL: Address is a return insn for messages to nil.
-*
-* Every path from OBJC_MESSENGER_START should reach some OBJC_MESSENGER_END.
-* At all ENDs, the stack and parameter register state is the same as START.
-*
-* In some cases, the END_FAST case jumps to something other than the
-* method's implementation. In those cases the jump's destination will
-* be another function that is marked OBJC_MESSENGER_START.
-**********************************************************************/
-#if __OBJC2__
-
-#define OBJC_MESSENGER_START 1
-#define OBJC_MESSENGER_END_FAST 2
-#define OBJC_MESSENGER_END_SLOW 3
-#define OBJC_MESSENGER_END_NIL 4
-
-struct objc_messenger_breakpoint {
- uintptr_t address;
- uintptr_t kind;
-};
-
-OBJC_EXPORT struct objc_messenger_breakpoint
-gdb_objc_messenger_breakpoints[]
- OBJC_AVAILABLE(10.9, 7.0, 9.0, 1.0, 2.0);
-
-#endif
-
-
__END_DECLS
+// APPLE_API_PRIVATE
#endif
+// _OBJC_GDB_H
#endif
#include <objc/runtime.h>
#include <Availability.h>
#include <malloc/malloc.h>
+#include <mach-o/loader.h>
#include <dispatch/dispatch.h>
-__BEGIN_DECLS
// Termination reasons in the OS_REASON_OBJC namespace.
#define OBJC_EXIT_REASON_UNSPECIFIED 1
// The runtime's class structure will never grow beyond this.
#define OBJC_MAX_CLASS_SIZE (32*sizeof(void*))
+
+__BEGIN_DECLS
+
// In-place construction of an Objective-C class.
// cls and metacls must each be OBJC_MAX_CLASS_SIZE bytes.
// Returns nil if a class with the same name already exists.
__IOS_UNAVAILABLE __TVOS_UNAVAILABLE
__WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+// GC debugging
+OBJC_EXPORT BOOL
+objc_dumpHeap(char * _Nonnull filename, unsigned long length)
+ __OSX_DEPRECATED(10.4, 10.8, "it always returns NO")
+ __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
+ __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+
// GC startup callback from Foundation
OBJC_EXPORT malloc_zone_t * _Nullable
objc_collect_init(int (* _Nonnull callback)(void))
_objc_setClassLoader(BOOL (* _Nonnull newClassLoader)(const char * _Nonnull))
OBJC2_UNAVAILABLE;
+#if !(TARGET_OS_OSX && !TARGET_OS_IOSMAC && __i386__)
+OBJC_EXPORT void
+_objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
+ (Class _Nonnull oldClass, Class _Nonnull newClass));
+// fixme work around bug in Swift
+// OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0)
+#endif
+
// Install handler for allocation failures.
// Handler may abort, or throw, or provide an object to return.
OBJC_EXPORT void
(Class _Nullable isa))
OBJC_AVAILABLE(10.8, 6.0, 9.0, 1.0, 2.0);
-// This can go away when AppKit stops calling it (rdar://7811851)
-#if __OBJC2__
-OBJC_EXPORT void
-objc_setMultithreaded (BOOL flag)
- __OSX_DEPRECATED(10.0, 10.5, "multithreading is always available")
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
-#endif
-
// Used by ExceptionHandling.framework
#if !__OBJC2__
OBJC_EXPORT void
#endif
+/**
+ * Returns the names of all the classes within a library.
+ *
+ * @param image The mach header for library or framework you are inquiring about.
+ * @param outCount The number of class names returned.
+ *
+ * @return An array of C strings representing the class names.
+ */
+OBJC_EXPORT const char * _Nonnull * _Nullable
+objc_copyClassNamesForImageHeader(const struct mach_header * _Nonnull mh,
+ unsigned int * _Nullable outCount)
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
// Tagged pointer objects.
#if __LP64__
enum
#endif
{
+ // 60-bit payloads
OBJC_TAG_NSAtom = 0,
OBJC_TAG_1 = 1,
OBJC_TAG_NSString = 2,
OBJC_TAG_NSNumber = 3,
OBJC_TAG_NSIndexPath = 4,
OBJC_TAG_NSManagedObjectID = 5,
- OBJC_TAG_NSDate = 6,
+ OBJC_TAG_NSDate = 6,
+
+ // 60-bit reserved
OBJC_TAG_RESERVED_7 = 7,
+ // 52-bit payloads
+ OBJC_TAG_Photos_1 = 8,
+ OBJC_TAG_Photos_2 = 9,
+ OBJC_TAG_Photos_3 = 10,
+ OBJC_TAG_Photos_4 = 11,
+ OBJC_TAG_XPC_1 = 12,
+ OBJC_TAG_XPC_2 = 13,
+ OBJC_TAG_XPC_3 = 14,
+ OBJC_TAG_XPC_4 = 15,
+
OBJC_TAG_First60BitPayload = 0,
OBJC_TAG_Last60BitPayload = 6,
OBJC_TAG_First52BitPayload = 8,
// Don't use the values below. Use the declarations above.
-#if TARGET_OS_OSX && __x86_64__
+#if (TARGET_OS_OSX || TARGET_OS_IOSMAC) && __x86_64__
// 64-bit Mac - tag bit is LSB
# define OBJC_MSB_TAGGED_POINTERS 0
#else
# define _OBJC_TAG_EXT_PAYLOAD_RSHIFT 12
#endif
+extern uintptr_t objc_debug_taggedpointer_obfuscator;
+
+static inline void * _Nonnull
+_objc_encodeTaggedPointer(uintptr_t ptr)
+{
+ return (void *)(objc_debug_taggedpointer_obfuscator ^ ptr);
+}
+
+static inline uintptr_t
+_objc_decodeTaggedPointer(const void * _Nullable ptr)
+{
+ return (uintptr_t)ptr ^ objc_debug_taggedpointer_obfuscator;
+}
+
static inline bool
_objc_taggedPointersEnabled(void)
{
// assert(_objc_taggedPointersEnabled());
if (tag <= OBJC_TAG_Last60BitPayload) {
// assert(((value << _OBJC_TAG_PAYLOAD_RSHIFT) >> _OBJC_TAG_PAYLOAD_LSHIFT) == value);
- return (void *)
+ uintptr_t result =
(_OBJC_TAG_MASK |
((uintptr_t)tag << _OBJC_TAG_INDEX_SHIFT) |
((value << _OBJC_TAG_PAYLOAD_RSHIFT) >> _OBJC_TAG_PAYLOAD_LSHIFT));
+ return _objc_encodeTaggedPointer(result);
} else {
// assert(tag >= OBJC_TAG_First52BitPayload);
// assert(tag <= OBJC_TAG_Last52BitPayload);
// assert(((value << _OBJC_TAG_EXT_PAYLOAD_RSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_LSHIFT) == value);
- return (void *)
+ uintptr_t result =
(_OBJC_TAG_EXT_MASK |
((uintptr_t)(tag - OBJC_TAG_First52BitPayload) << _OBJC_TAG_EXT_INDEX_SHIFT) |
((value << _OBJC_TAG_EXT_PAYLOAD_RSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_LSHIFT));
+ return _objc_encodeTaggedPointer(result);
}
}
static inline bool
-_objc_isTaggedPointer(const void * _Nullable ptr)
+_objc_isTaggedPointer(const void * _Nullable ptr)
{
return ((uintptr_t)ptr & _OBJC_TAG_MASK) == _OBJC_TAG_MASK;
}
_objc_getTaggedPointerTag(const void * _Nullable ptr)
{
// assert(_objc_isTaggedPointer(ptr));
- uintptr_t basicTag = ((uintptr_t)ptr >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
- uintptr_t extTag = ((uintptr_t)ptr >> _OBJC_TAG_EXT_INDEX_SHIFT) & _OBJC_TAG_EXT_INDEX_MASK;
+ uintptr_t value = _objc_decodeTaggedPointer(ptr);
+ uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
+ uintptr_t extTag = (value >> _OBJC_TAG_EXT_INDEX_SHIFT) & _OBJC_TAG_EXT_INDEX_MASK;
if (basicTag == _OBJC_TAG_INDEX_MASK) {
return (objc_tag_index_t)(extTag + OBJC_TAG_First52BitPayload);
} else {
_objc_getTaggedPointerValue(const void * _Nullable ptr)
{
// assert(_objc_isTaggedPointer(ptr));
- uintptr_t basicTag = ((uintptr_t)ptr >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
+ uintptr_t value = _objc_decodeTaggedPointer(ptr);
+ uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
if (basicTag == _OBJC_TAG_INDEX_MASK) {
- return ((uintptr_t)ptr << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
+ return (value << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
} else {
- return ((uintptr_t)ptr << _OBJC_TAG_PAYLOAD_LSHIFT) >> _OBJC_TAG_PAYLOAD_RSHIFT;
+ return (value << _OBJC_TAG_PAYLOAD_LSHIFT) >> _OBJC_TAG_PAYLOAD_RSHIFT;
}
}
_objc_getTaggedPointerSignedValue(const void * _Nullable ptr)
{
// assert(_objc_isTaggedPointer(ptr));
- uintptr_t basicTag = ((uintptr_t)ptr >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
+ uintptr_t value = _objc_decodeTaggedPointer(ptr);
+ uintptr_t basicTag = (value >> _OBJC_TAG_INDEX_SHIFT) & _OBJC_TAG_INDEX_MASK;
if (basicTag == _OBJC_TAG_INDEX_MASK) {
- return ((intptr_t)ptr << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
+ return ((intptr_t)value << _OBJC_TAG_EXT_PAYLOAD_LSHIFT) >> _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
} else {
- return ((intptr_t)ptr << _OBJC_TAG_PAYLOAD_LSHIFT) >> _OBJC_TAG_PAYLOAD_RSHIFT;
+ return ((intptr_t)value << _OBJC_TAG_PAYLOAD_LSHIFT) >> _OBJC_TAG_PAYLOAD_RSHIFT;
}
}
OBJC_ARM64_UNAVAILABLE;
-// Instance-specific instance variable layout.
+/**
+ * Adds multiple methods to a class in bulk. This amortizes overhead that can be
+ * expensive when adding methods one by one with class_addMethod.
+ *
+ * @param cls The class to which to add the methods.
+ * @param names An array of selectors for the methods to add.
+ * @param imps An array of functions which implement the new methods.
+ * @param types An array of strings that describe the types of each method's
+ * arguments.
+ * @param count The number of items in the names, imps, and types arrays.
+ * @param outFiledCount Upon return, contains the number of failed selectors in
+ * the returned array.
+ *
+ * @return A NULL-terminated C array of selectors which could not be added. A
+ * method cannot be added when a method of that name already exists on that
+ * class. When no failures occur, the return value is \c NULL. When a non-NULL
+ * value is returned, the caller must free the array with \c free().
+ *
+ */
+#if __OBJC2__
+OBJC_EXPORT _Nullable SEL * _Nullable
+class_addMethodsBulk(_Nullable Class cls, _Nonnull const SEL * _Nonnull names,
+ _Nonnull const IMP * _Nonnull imps,
+ const char * _Nonnull * _Nonnull types, uint32_t count,
+ uint32_t * _Nullable outFailedCount)
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+#endif
+
+/**
+ * Replaces multiple methods in a class in bulk. This amortizes overhead that
+ * can be expensive when adding methods one by one with class_replaceMethod.
+ *
+ * @param cls The class to modify.
+ * @param names An array of selectors for the methods to replace.
+ * @param imps An array of functions will be the new method implementantations.
+ * @param types An array of strings that describe the types of each method's
+ * arguments.
+ * @param count The number of items in the names, imps, and types arrays.
+ */
+#if __OBJC2__
+OBJC_EXPORT void
+class_replaceMethodsBulk(_Nullable Class cls,
+ _Nonnull const SEL * _Nonnull names,
+ _Nonnull const IMP * _Nonnull imps,
+ const char * _Nonnull * _Nonnull types,
+ uint32_t count)
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+#endif
+
+
+// Instance-specific instance variable layout. This is no longer implemented.
OBJC_EXPORT void
_class_setIvarLayoutAccessor(Class _Nullable cls,
const uint8_t* _Nullable (* _Nonnull accessor)
(id _Nullable object))
- __OSX_AVAILABLE(10.7)
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ UNAVAILABLE_ATTRIBUTE;
OBJC_EXPORT const uint8_t * _Nullable
_object_getIvarLayout(Class _Nullable cls, id _Nullable object)
- __OSX_AVAILABLE(10.7)
- __IOS_UNAVAILABLE __TVOS_UNAVAILABLE
- __WATCHOS_UNAVAILABLE __BRIDGEOS_UNAVAILABLE;
+ UNAVAILABLE_ATTRIBUTE;
+
/*
"Unknown" includes non-object ivars and non-ARC non-__weak ivars
extern void lockdebug_setInForkPrepare(bool);
extern void lockdebug_lock_precedes_lock(const void *oldlock, const void *newlock);
#else
-static inline void lockdebug_assert_all_locks_locked() { }
-static inline void lockdebug_assert_no_locks_locked() { }
-static inline void lockdebug_setInForkPrepare(bool) { }
-static inline void lockdebug_lock_precedes_lock(const void *, const void *) { }
+static constexpr inline void lockdebug_assert_all_locks_locked() { }
+static constexpr inline void lockdebug_assert_no_locks_locked() { }
+static constexpr inline void lockdebug_setInForkPrepare(bool) { }
+static constexpr inline void lockdebug_lock_precedes_lock(const void *, const void *) { }
#endif
extern void lockdebug_remember_mutex(mutex_tt<true> *lock);
extern void lockdebug_mutex_assert_locked(mutex_tt<true> *lock);
extern void lockdebug_mutex_assert_unlocked(mutex_tt<true> *lock);
-static inline void lockdebug_remember_mutex(mutex_tt<false> *lock) { }
-static inline void lockdebug_mutex_lock(mutex_tt<false> *lock) { }
-static inline void lockdebug_mutex_try_lock(mutex_tt<false> *lock) { }
-static inline void lockdebug_mutex_unlock(mutex_tt<false> *lock) { }
-static inline void lockdebug_mutex_assert_locked(mutex_tt<false> *lock) { }
-static inline void lockdebug_mutex_assert_unlocked(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_remember_mutex(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_mutex_lock(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_mutex_try_lock(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_mutex_unlock(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_mutex_assert_locked(mutex_tt<false> *lock) { }
+static constexpr inline void lockdebug_mutex_assert_unlocked(mutex_tt<false> *lock) { }
extern void lockdebug_remember_monitor(monitor_tt<true> *lock);
extern void lockdebug_monitor_assert_locked(monitor_tt<true> *lock);
extern void lockdebug_monitor_assert_unlocked(monitor_tt<true> *lock);
-static inline void lockdebug_remember_monitor(monitor_tt<false> *lock) { }
-static inline void lockdebug_monitor_enter(monitor_tt<false> *lock) { }
-static inline void lockdebug_monitor_leave(monitor_tt<false> *lock) { }
-static inline void lockdebug_monitor_wait(monitor_tt<false> *lock) { }
-static inline void lockdebug_monitor_assert_locked(monitor_tt<false> *lock) { }
-static inline void lockdebug_monitor_assert_unlocked(monitor_tt<false> *lock) {}
+static constexpr inline void lockdebug_remember_monitor(monitor_tt<false> *lock) { }
+static constexpr inline void lockdebug_monitor_enter(monitor_tt<false> *lock) { }
+static constexpr inline void lockdebug_monitor_leave(monitor_tt<false> *lock) { }
+static constexpr inline void lockdebug_monitor_wait(monitor_tt<false> *lock) { }
+static constexpr inline void lockdebug_monitor_assert_locked(monitor_tt<false> *lock) { }
+static constexpr inline void lockdebug_monitor_assert_unlocked(monitor_tt<false> *lock) {}
extern void
extern void
lockdebug_recursive_mutex_assert_unlocked(recursive_mutex_tt<true> *lock);
-static inline void
+static constexpr inline void
lockdebug_remember_recursive_mutex(recursive_mutex_tt<false> *lock) { }
-static inline void
+static constexpr inline void
lockdebug_recursive_mutex_lock(recursive_mutex_tt<false> *lock) { }
-static inline void
+static constexpr inline void
lockdebug_recursive_mutex_unlock(recursive_mutex_tt<false> *lock) { }
-static inline void
+static constexpr inline void
lockdebug_recursive_mutex_assert_locked(recursive_mutex_tt<false> *lock) { }
-static inline void
+static constexpr inline void
lockdebug_recursive_mutex_assert_unlocked(recursive_mutex_tt<false> *lock) { }
-
-
-extern void lockdebug_remember_rwlock(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_read(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_try_read_success(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_unlock_read(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_write(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_try_write_success(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_unlock_write(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_assert_reading(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_assert_writing(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_assert_locked(rwlock_tt<true> *lock);
-extern void lockdebug_rwlock_assert_unlocked(rwlock_tt<true> *lock);
-
-static inline void lockdebug_remember_rwlock(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_read(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_try_read_success(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_unlock_read(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_write(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_try_write_success(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_unlock_write(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_assert_reading(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_assert_writing(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_assert_locked(rwlock_tt<false> *) { }
-static inline void lockdebug_rwlock_assert_unlocked(rwlock_tt<false> *) { }
setLock(AllLocks(), lock, MONITOR);
}
-void
-lockdebug_remember_rwlock(rwlock_t *lock)
-{
- setLock(AllLocks(), lock, WRLOCK);
-}
-
void
lockdebug_assert_all_locks_locked()
{
}
}
-
-/***********************************************************************
-* rwlock checking
-**********************************************************************/
-
-void
-lockdebug_rwlock_read(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (hasLock(locks, lock, RDLOCK)) {
- // Recursive rwlock read is bad (may deadlock vs pending writer)
- _objc_fatal("recursive rwlock read");
- }
- if (hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("deadlock: read after write for rwlock");
- }
- setLock(locks, lock, RDLOCK);
-}
-
-// try-read success is the only case with lockdebug effects.
-// try-read when already reading is OK (won't deadlock)
-// try-read when already writing is OK (will fail)
-// try-read failure does nothing.
-void
-lockdebug_rwlock_try_read_success(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
- setLock(locks, lock, RDLOCK);
-}
-
-void
-lockdebug_rwlock_unlock_read(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (!hasLock(locks, lock, RDLOCK)) {
- _objc_fatal("un-reading unowned rwlock");
- }
- clearLock(locks, lock, RDLOCK);
-}
-
-
-void
-lockdebug_rwlock_write(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (hasLock(locks, lock, RDLOCK)) {
- // Lock promotion not allowed (may deadlock)
- _objc_fatal("deadlock: write after read for rwlock");
- }
- if (hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("recursive rwlock write");
- }
- setLock(locks, lock, WRLOCK);
-}
-
-// try-write success is the only case with lockdebug effects.
-// try-write when already reading is OK (will fail)
-// try-write when already writing is OK (will fail)
-// try-write failure does nothing.
-void
-lockdebug_rwlock_try_write_success(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
- setLock(locks, lock, WRLOCK);
-}
-
-void
-lockdebug_rwlock_unlock_write(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (!hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("un-writing unowned rwlock");
- }
- clearLock(locks, lock, WRLOCK);
-}
-
-
-void
-lockdebug_rwlock_assert_reading(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (!hasLock(locks, lock, RDLOCK)) {
- _objc_fatal("rwlock incorrectly not reading");
- }
-}
-
-void
-lockdebug_rwlock_assert_writing(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (!hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("rwlock incorrectly not writing");
- }
-}
-
-void
-lockdebug_rwlock_assert_locked(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (!hasLock(locks, lock, RDLOCK) && !hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("rwlock incorrectly neither reading nor writing");
- }
-}
-
-void
-lockdebug_rwlock_assert_unlocked(rwlock_t *lock)
-{
- auto& locks = ownedLocks();
-
- if (hasLock(locks, lock, RDLOCK) || hasLock(locks, lock, WRLOCK)) {
- _objc_fatal("rwlock incorrectly not unlocked");
- }
-}
-
-
#endif
// fork() safety requires careful tracking of all locks used in the runtime.
// Thou shalt not declare any locks outside this file.
-extern rwlock_t runtimeLock;
+extern mutex_t runtimeLock;
extern mutex_t DemangleCacheLock;
#endif
// and is enforced by lockdebug.
extern monitor_t classInitLock;
-extern rwlock_t selLock;
+extern mutex_t selLock;
extern mutex_t cacheUpdateLock;
extern recursive_mutex_t loadMethodLock;
extern mutex_t crashlog_lock;
inline bool
objc_object::isExtTaggedPointer()
{
- return ((uintptr_t)this & _OBJC_TAG_EXT_MASK) == _OBJC_TAG_EXT_MASK;
+ uintptr_t ptr = _objc_decodeTaggedPointer(this);
+ return (ptr & _OBJC_TAG_EXT_MASK) == _OBJC_TAG_EXT_MASK;
}
return nil;
}
+bool sharedRegionContains(const void *ptr)
+{
+ return false;
+}
+
header_info *preoptimizedHinfoForHeader(const headerType *mhdr)
{
return nil;
// opt is initialized to ~0 to detect incorrect use before preopt_init()
static const objc_opt_t *opt = (objc_opt_t *)~0;
+static uintptr_t shared_cache_start;
+static uintptr_t shared_cache_end;
static bool preoptimized;
extern const objc_opt_t _objc_opt_data; // in __TEXT, __objc_opt_ro
return nil;
}
+/***********************************************************************
+* Return YES if the given pointer lies within the shared cache.
+* If the shared cache is not set up or is not valid,
+**********************************************************************/
+bool sharedRegionContains(const void *ptr)
+{
+ uintptr_t address = (uintptr_t)ptr;
+ return shared_cache_start <= address && address < shared_cache_end;
+}
+
namespace objc_opt {
struct objc_headeropt_ro_t {
uint32_t count;
void preopt_init(void)
{
+ // Get the memory region occupied by the shared cache.
+ size_t length;
+ const void *start = _dyld_get_shared_cache_range(&length);
+ if (start) {
+ shared_cache_start = (uintptr_t)start;
+ shared_cache_end = shared_cache_start + length;
+ } else {
+ shared_cache_start = shared_cache_end = 0;
+ }
+
// `opt` not set at compile time in order to detect too-early usage
const char *failure = nil;
opt = &_objc_opt_data;
// One of the tables is missing.
failure = "(dyld shared cache is absent or out of date)";
}
-
+
if (failure) {
// All preoptimized selector references are invalid.
preoptimized = NO;
nocopy_t(const nocopy_t&) = delete;
const nocopy_t& operator=(const nocopy_t&) = delete;
protected:
- nocopy_t() { }
- ~nocopy_t() { }
+ constexpr nocopy_t() = default;
+ ~nocopy_t() = default;
};
#if __arm64__
+// Pointer-size register prefix for inline asm
+# if __LP64__
+# define p "x" // true arm64
+# else
+# define p "w" // arm64_32
+# endif
+
static ALWAYS_INLINE
uintptr_t
LoadExclusive(uintptr_t *src)
{
uintptr_t result;
- asm("ldxr %x0, [%x1]"
+ asm("ldxr %" p "0, [%x1]"
: "=r" (result)
: "r" (src), "m" (*src));
return result;
StoreExclusive(uintptr_t *dst, uintptr_t oldvalue __unused, uintptr_t value)
{
uint32_t result;
- asm("stxr %w0, %x2, [%x3]"
- : "=r" (result), "=m" (*dst)
+ asm("stxr %w0, %" p "2, [%x3]"
+ : "=&r" (result), "=m" (*dst)
: "r" (value), "r" (dst));
return !result;
}
StoreReleaseExclusive(uintptr_t *dst, uintptr_t oldvalue __unused, uintptr_t value)
{
uint32_t result;
- asm("stlxr %w0, %x2, [%x3]"
- : "=r" (result), "=m" (*dst)
+ asm("stlxr %w0, %" p "2, [%x3]"
+ : "=&r" (result), "=m" (*dst)
: "r" (value), "r" (dst));
return !result;
}
asm("clrex" : "=m" (*dst));
}
+#undef p
#elif __arm__
# if SUPPORT_RETURN_AUTORELEASE
# define RETURN_DISPOSITION_KEY ((tls_key_t)__PTK_FRAMEWORK_OBJC_KEY4)
# endif
-# if SUPPORT_QOS_HACK
-# define QOS_KEY ((tls_key_t)__PTK_FRAMEWORK_OBJC_KEY5)
-# endif
#else
# define SUPPORT_DIRECT_THREAD_KEYS 0
#endif
}
-// fixme no rwlock yet
-
-
typedef IMAGE_DOS_HEADER headerType;
// fixme YES bundle? NO bundle? sometimes?
#define headerIsBundle(hi) YES
|| k == AUTORELEASE_POOL_KEY
# if SUPPORT_RETURN_AUTORELEASE
|| k == RETURN_DISPOSITION_KEY
-# endif
-# if SUPPORT_QOS_HACK
- || k == QOS_KEY
# endif
);
}
_pthread_getspecific_direct(_PTHREAD_TSD_SLOT_MACH_THREAD_SELF);
}
-#if SUPPORT_QOS_HACK
-static inline pthread_priority_t pthread_self_priority_direct()
-{
- pthread_priority_t pri = (pthread_priority_t)
- _pthread_getspecific_direct(_PTHREAD_TSD_SLOT_PTHREAD_QOS_CLASS);
- return pri & ~_PTHREAD_PRIORITY_FLAGS_MASK;
-}
-#endif
-
template <bool Debug> class mutex_tt;
template <bool Debug> class monitor_tt;
-template <bool Debug> class rwlock_tt;
template <bool Debug> class recursive_mutex_tt;
#if DEBUG
using spinlock_t = mutex_tt<LOCKDEBUG>;
using mutex_t = mutex_tt<LOCKDEBUG>;
using monitor_t = monitor_tt<LOCKDEBUG>;
-using rwlock_t = rwlock_tt<LOCKDEBUG>;
using recursive_mutex_t = recursive_mutex_tt<LOCKDEBUG>;
// Use fork_unsafe_lock to get a lock that isn't
// acquired and released around fork().
// All fork-safe locks are checked in debug builds.
-struct fork_unsafe_lock_t { };
+struct fork_unsafe_lock_t {
+ constexpr fork_unsafe_lock_t() = default;
+};
extern const fork_unsafe_lock_t fork_unsafe_lock;
#include "objc-lockdebug.h"
class mutex_tt : nocopy_t {
os_unfair_lock mLock;
public:
- mutex_tt() : mLock(OS_UNFAIR_LOCK_INIT) {
+ constexpr mutex_tt() : mLock(OS_UNFAIR_LOCK_INIT) {
lockdebug_remember_mutex(this);
}
- mutex_tt(const fork_unsafe_lock_t unsafe) : mLock(OS_UNFAIR_LOCK_INIT) { }
+ constexpr mutex_tt(const fork_unsafe_lock_t unsafe) : mLock(OS_UNFAIR_LOCK_INIT) { }
void lock() {
lockdebug_mutex_lock(this);
: lock(newLock) { lock.lock(); }
~locker() { lock.unlock(); }
};
+
+ // Either scoped lock and unlock, or NOP.
+ class conditional_locker : nocopy_t {
+ mutex_tt& lock;
+ bool didLock;
+ public:
+ conditional_locker(mutex_tt& newLock, bool shouldLock)
+ : lock(newLock), didLock(shouldLock)
+ {
+ if (shouldLock) lock.lock();
+ }
+ ~conditional_locker() { if (didLock) lock.unlock(); }
+ };
};
using mutex_locker_t = mutex_tt<LOCKDEBUG>::locker;
+using conditional_mutex_locker_t = mutex_tt<LOCKDEBUG>::conditional_locker;
template <bool Debug>
class recursive_mutex_tt : nocopy_t {
- pthread_mutex_t mLock;
+ os_unfair_recursive_lock mLock;
public:
- recursive_mutex_tt() : mLock(PTHREAD_RECURSIVE_MUTEX_INITIALIZER) {
+ constexpr recursive_mutex_tt() : mLock(OS_UNFAIR_RECURSIVE_LOCK_INIT) {
lockdebug_remember_recursive_mutex(this);
}
- recursive_mutex_tt(const fork_unsafe_lock_t unsafe)
- : mLock(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
+ constexpr recursive_mutex_tt(const fork_unsafe_lock_t unsafe)
+ : mLock(OS_UNFAIR_RECURSIVE_LOCK_INIT)
{ }
void lock()
{
lockdebug_recursive_mutex_lock(this);
-
- int err = pthread_mutex_lock(&mLock);
- if (err) _objc_fatal("pthread_mutex_lock failed (%d)", err);
+ os_unfair_recursive_lock_lock(&mLock);
}
void unlock()
{
lockdebug_recursive_mutex_unlock(this);
- int err = pthread_mutex_unlock(&mLock);
- if (err) _objc_fatal("pthread_mutex_unlock failed (%d)", err);
+ os_unfair_recursive_lock_unlock(&mLock);
}
void forceReset()
lockdebug_recursive_mutex_unlock(this);
bzero(&mLock, sizeof(mLock));
- mLock = pthread_mutex_t PTHREAD_RECURSIVE_MUTEX_INITIALIZER;
+ mLock = os_unfair_recursive_lock OS_UNFAIR_RECURSIVE_LOCK_INIT;
}
bool tryUnlock()
{
- int err = pthread_mutex_unlock(&mLock);
- if (err == 0) {
+ if (os_unfair_recursive_lock_tryunlock4objc(&mLock)) {
lockdebug_recursive_mutex_unlock(this);
return true;
- } else if (err == EPERM) {
- return false;
- } else {
- _objc_fatal("pthread_mutex_unlock failed (%d)", err);
}
+ return false;
}
-
void assertLocked() {
lockdebug_recursive_mutex_assert_locked(this);
}
pthread_cond_t cond;
public:
- monitor_tt()
+ constexpr monitor_tt()
: mutex(PTHREAD_MUTEX_INITIALIZER), cond(PTHREAD_COND_INITIALIZER)
{
lockdebug_remember_monitor(this);
}
-#if SUPPORT_QOS_HACK
-// Override QOS class to avoid priority inversion in rwlocks
-// <rdar://17697862> do a qos override before taking rw lock in objc
-
-#include <pthread/workqueue_private.h>
-extern pthread_priority_t BackgroundPriority;
-extern pthread_priority_t MainPriority;
-
-static inline void qosStartOverride()
-{
- uintptr_t overrideRefCount = (uintptr_t)tls_get_direct(QOS_KEY);
- if (overrideRefCount > 0) {
- // If there is a qos override, increment the refcount and continue
- tls_set_direct(QOS_KEY, (void *)(overrideRefCount + 1));
- }
- else {
- pthread_priority_t currentPriority = pthread_self_priority_direct();
- // Check if override is needed. Only override if we are background qos
- if (currentPriority != 0 && currentPriority <= BackgroundPriority) {
- int res __unused = _pthread_override_qos_class_start_direct(mach_thread_self_direct(), MainPriority);
- assert(res == 0);
- // Once we override, we set the reference count in the tsd
- // to know when to end the override
- tls_set_direct(QOS_KEY, (void *)1);
- }
- }
-}
-
-static inline void qosEndOverride()
-{
- uintptr_t overrideRefCount = (uintptr_t)tls_get_direct(QOS_KEY);
- if (overrideRefCount == 0) return;
-
- if (overrideRefCount == 1) {
- // end the override
- int res __unused = _pthread_override_qos_class_end_direct(mach_thread_self_direct());
- assert(res == 0);
- }
-
- // decrement refcount
- tls_set_direct(QOS_KEY, (void *)(overrideRefCount - 1));
-}
-
-// SUPPORT_QOS_HACK
-#else
-// not SUPPORT_QOS_HACK
-
-static inline void qosStartOverride() { }
-static inline void qosEndOverride() { }
-
-// not SUPPORT_QOS_HACK
-#endif
-
-
-template <bool Debug>
-class rwlock_tt : nocopy_t {
- pthread_rwlock_t mLock;
-
- public:
- rwlock_tt() : mLock(PTHREAD_RWLOCK_INITIALIZER) {
- lockdebug_remember_rwlock(this);
- }
-
- rwlock_tt(const fork_unsafe_lock_t unsafe)
- : mLock(PTHREAD_RWLOCK_INITIALIZER)
- { }
-
- void read()
- {
- lockdebug_rwlock_read(this);
-
- qosStartOverride();
- int err = pthread_rwlock_rdlock(&mLock);
- if (err) _objc_fatal("pthread_rwlock_rdlock failed (%d)", err);
- }
-
- void unlockRead()
- {
- lockdebug_rwlock_unlock_read(this);
-
- int err = pthread_rwlock_unlock(&mLock);
- if (err) _objc_fatal("pthread_rwlock_unlock failed (%d)", err);
- qosEndOverride();
- }
-
- bool tryRead()
- {
- qosStartOverride();
- int err = pthread_rwlock_tryrdlock(&mLock);
- if (err == 0) {
- lockdebug_rwlock_try_read_success(this);
- return true;
- } else if (err == EBUSY) {
- qosEndOverride();
- return false;
- } else {
- _objc_fatal("pthread_rwlock_tryrdlock failed (%d)", err);
- }
- }
-
- void write()
- {
- lockdebug_rwlock_write(this);
-
- qosStartOverride();
- int err = pthread_rwlock_wrlock(&mLock);
- if (err) _objc_fatal("pthread_rwlock_wrlock failed (%d)", err);
- }
-
- void unlockWrite()
- {
- lockdebug_rwlock_unlock_write(this);
-
- int err = pthread_rwlock_unlock(&mLock);
- if (err) _objc_fatal("pthread_rwlock_unlock failed (%d)", err);
- qosEndOverride();
- }
-
- bool tryWrite()
- {
- qosStartOverride();
- int err = pthread_rwlock_trywrlock(&mLock);
- if (err == 0) {
- lockdebug_rwlock_try_write_success(this);
- return true;
- } else if (err == EBUSY) {
- qosEndOverride();
- return false;
- } else {
- _objc_fatal("pthread_rwlock_trywrlock failed (%d)", err);
- }
- }
-
- void forceReset()
- {
- lockdebug_rwlock_unlock_write(this);
-
- bzero(&mLock, sizeof(mLock));
- mLock = pthread_rwlock_t PTHREAD_RWLOCK_INITIALIZER;
- }
-
-
- void assertReading() {
- lockdebug_rwlock_assert_reading(this);
- }
-
- void assertWriting() {
- lockdebug_rwlock_assert_writing(this);
- }
-
- void assertLocked() {
- lockdebug_rwlock_assert_locked(this);
- }
-
- void assertUnlocked() {
- lockdebug_rwlock_assert_unlocked(this);
- }
-};
-
-
#ifndef __LP64__
typedef struct mach_header headerType;
typedef struct segment_command segmentType;
static void static_init()
{
size_t count;
- Initializer *inits = getLibobjcInitializers(&_mh_dylib_header, &count);
+ auto inits = getLibobjcInitializers(&_mh_dylib_header, &count);
for (size_t i = 0; i < count; i++) {
inits[i]();
}
SideTableLockAll();
classInitLock.enter();
#if __OBJC2__
- runtimeLock.write();
+ runtimeLock.lock();
DemangleCacheLock.lock();
#else
methodListLock.lock();
NXUniqueStringLock.lock();
impLock.lock();
#endif
- selLock.write();
+ selLock.lock();
cacheUpdateLock.lock();
objcMsgLogLock.lock();
AltHandlerDebugLock.lock();
crashlog_lock.unlock();
loadMethodLock.unlock();
cacheUpdateLock.unlock();
- selLock.unlockWrite();
+ selLock.unlock();
SideTableUnlockAll();
#if __OBJC2__
DemangleCacheLock.unlock();
- runtimeLock.unlockWrite();
+ runtimeLock.unlock();
#else
impLock.unlock();
NXUniqueStringLock.unlock();
* @APPLE_LICENSE_HEADER_END@
*/
/*
- * objc-private.h
- * Copyright 1988-1996, NeXT Software, Inc.
+ * objc-private.h
+ * Copyright 1988-1996, NeXT Software, Inc.
*/
#ifndef _OBJC_PRIVATE_H_
#endif
#define OBJC_TYPES_DEFINED 1
+#undef OBJC_OLD_DISPATCH_PROTOTYPES
#define OBJC_OLD_DISPATCH_PROTOTYPES 0
#include <cstddef> // for nullptr_t
struct SideTable;
};
+#include "isa.h"
-#if (!SUPPORT_NONPOINTER_ISA && !SUPPORT_PACKED_ISA && !SUPPORT_INDEXED_ISA) ||\
- ( SUPPORT_NONPOINTER_ISA && SUPPORT_PACKED_ISA && !SUPPORT_INDEXED_ISA) ||\
- ( SUPPORT_NONPOINTER_ISA && !SUPPORT_PACKED_ISA && SUPPORT_INDEXED_ISA)
- // good config
-#else
-# error bad config
-#endif
-
-
-union isa_t
-{
+union isa_t {
isa_t() { }
isa_t(uintptr_t value) : bits(value) { }
Class cls;
uintptr_t bits;
-
-#if SUPPORT_PACKED_ISA
-
- // extra_rc must be the MSB-most field (so it matches carry/overflow flags)
- // nonpointer must be the LSB (fixme or get rid of it)
- // shiftcls must occupy the same bits that a real class pointer would
- // bits + RC_ONE is equivalent to extra_rc + 1
- // RC_HALF is the high bit of extra_rc (i.e. half of its range)
-
- // future expansion:
- // uintptr_t fast_rr : 1; // no r/r overrides
- // uintptr_t lock : 2; // lock for atomic property, @synch
- // uintptr_t extraBytes : 1; // allocated with extra bytes
-
-# if __arm64__
-# define ISA_MASK 0x0000000ffffffff8ULL
-# define ISA_MAGIC_MASK 0x000003f000000001ULL
-# define ISA_MAGIC_VALUE 0x000001a000000001ULL
- struct {
- uintptr_t nonpointer : 1;
- uintptr_t has_assoc : 1;
- uintptr_t has_cxx_dtor : 1;
- uintptr_t shiftcls : 33; // MACH_VM_MAX_ADDRESS 0x1000000000
- uintptr_t magic : 6;
- uintptr_t weakly_referenced : 1;
- uintptr_t deallocating : 1;
- uintptr_t has_sidetable_rc : 1;
- uintptr_t extra_rc : 19;
-# define RC_ONE (1ULL<<45)
-# define RC_HALF (1ULL<<18)
- };
-
-# elif __x86_64__
-# define ISA_MASK 0x00007ffffffffff8ULL
-# define ISA_MAGIC_MASK 0x001f800000000001ULL
-# define ISA_MAGIC_VALUE 0x001d800000000001ULL
+#if defined(ISA_BITFIELD)
struct {
- uintptr_t nonpointer : 1;
- uintptr_t has_assoc : 1;
- uintptr_t has_cxx_dtor : 1;
- uintptr_t shiftcls : 44; // MACH_VM_MAX_ADDRESS 0x7fffffe00000
- uintptr_t magic : 6;
- uintptr_t weakly_referenced : 1;
- uintptr_t deallocating : 1;
- uintptr_t has_sidetable_rc : 1;
- uintptr_t extra_rc : 8;
-# define RC_ONE (1ULL<<56)
-# define RC_HALF (1ULL<<7)
+ ISA_BITFIELD; // defined in isa.h
};
-
-# else
-# error unknown architecture for packed isa
-# endif
-
-// SUPPORT_PACKED_ISA
-#endif
-
-
-#if SUPPORT_INDEXED_ISA
-
-# if __ARM_ARCH_7K__ >= 2
-
-# define ISA_INDEX_IS_NPI 1
-# define ISA_INDEX_MASK 0x0001FFFC
-# define ISA_INDEX_SHIFT 2
-# define ISA_INDEX_BITS 15
-# define ISA_INDEX_COUNT (1 << ISA_INDEX_BITS)
-# define ISA_INDEX_MAGIC_MASK 0x001E0001
-# define ISA_INDEX_MAGIC_VALUE 0x001C0001
- struct {
- uintptr_t nonpointer : 1;
- uintptr_t has_assoc : 1;
- uintptr_t indexcls : 15;
- uintptr_t magic : 4;
- uintptr_t has_cxx_dtor : 1;
- uintptr_t weakly_referenced : 1;
- uintptr_t deallocating : 1;
- uintptr_t has_sidetable_rc : 1;
- uintptr_t extra_rc : 7;
-# define RC_ONE (1ULL<<25)
-# define RC_HALF (1ULL<<6)
- };
-
-# else
-# error unknown architecture for indexed isa
-# endif
-
-// SUPPORT_INDEXED_ISA
#endif
-
};
// Private headers
+#include "objc-ptrauth.h"
+
#if __OBJC2__
#include "objc-runtime-new.h"
#else
/* selectors */
extern void sel_init(size_t selrefCount);
extern SEL sel_registerNameNoLock(const char *str, bool copy);
-extern void sel_lock(void);
-extern void sel_unlock(void);
extern SEL SEL_load;
extern SEL SEL_initialize;
extern Class getPreoptimizedClass(const char *name);
extern Class* copyPreoptimizedClasses(const char *name, int *outCount);
+extern bool sharedRegionContains(const void *ptr);
+
extern Class _calloc_class(size_t size);
/* method lookup */
~recursive_mutex_locker_t() { lock.unlock(); }
};
-class rwlock_reader_t : nocopy_t {
- rwlock_t& lock;
- public:
- rwlock_reader_t(rwlock_t& newLock) : lock(newLock) { lock.read(); }
- ~rwlock_reader_t() { lock.unlockRead(); }
-};
-
-class rwlock_writer_t : nocopy_t {
- rwlock_t& lock;
- public:
- rwlock_writer_t(rwlock_t& newLock) : lock(newLock) { lock.write(); }
- ~rwlock_writer_t() { lock.unlockWrite(); }
-};
-
/* Exceptions */
struct alt_handler_list;
extern void unmap_image_nolock(const struct mach_header *mh);
extern void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClass);
extern void _unload_image(header_info *hi);
-extern const char ** _objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount);
-
extern const header_info *_headerForClass(Class cls);
static __inline uint32_t _objc_strhash(const char *s) {
uint32_t hash = 0;
for (;;) {
- int a = *s++;
- if (0 == a) break;
- hash += (hash << 8) + a;
+ int a = *s++;
+ if (0 == a) break;
+ hash += (hash << 8) + a;
}
return hash;
}
}
};
+enum { CacheLineSize = 64 };
// StripedMap<T> is a map of void* -> T, sized appropriately
// for cache-friendly lock striping.
// or as StripedMap<SomeStruct> where SomeStruct stores a spin lock.
template<typename T>
class StripedMap {
-
- enum { CacheLineSize = 64 };
-
-#if TARGET_OS_EMBEDDED
+#if TARGET_OS_IPHONE && !TARGET_OS_SIMULATOR
enum { StripeCount = 8 };
#else
enum { StripeCount = 64 };
assert(delta % CacheLineSize == 0);
assert(base % CacheLineSize == 0);
}
+#else
+ constexpr StripedMap() {}
#endif
};
}
+// Storage for a thread-safe chained hook function.
+// get() returns the value for calling.
+// set() installs a new function and returns the old one for chaining.
+// More precisely, set() writes the old value to a variable supplied by
+// the caller. get() and set() use appropriate barriers so that the
+// old value is safely written to the variable before the new value is
+// called to use it.
+//
+// T1: store to old variable; store-release to hook variable
+// T2: load-acquire from hook variable; call it; called hook loads old variable
+
+template <typename Fn>
+class ChainedHookFunction {
+ std::atomic<Fn> hook{nil};
+
+public:
+ ChainedHookFunction(Fn f) : hook{f} { };
+
+ Fn get() {
+ return hook.load(std::memory_order_acquire);
+ }
+
+ void set(Fn newValue, Fn *oldVariable)
+ {
+ Fn oldValue = hook.load(std::memory_order_relaxed);
+ do {
+ *oldVariable = oldValue;
+ } while (!hook.compare_exchange_weak(oldValue, newValue,
+ std::memory_order_release,
+ std::memory_order_relaxed));
+ }
+};
+
+
// Pointer hash function.
// This is not a terrific hash, but it is fast
// and not outrageously flawed for our purposes.
--- /dev/null
+/*
+ * Copyright (c) 2017 Apple Inc. All Rights Reserved.
+ *
+ * @APPLE_LICENSE_HEADER_START@
+ *
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this
+ * file.
+ *
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
+ * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
+ *
+ * @APPLE_LICENSE_HEADER_END@
+ */
+
+#ifndef _OBJC_PTRAUTH_H_
+#define _OBJC_PTRAUTH_H_
+
+#include <objc/objc.h>
+
+// On some architectures, method lists and method caches store signed IMPs.
+
+// StorageSignedFunctionPointer is declared by libclosure.
+#include <Block_private.h>
+
+// fixme simply include ptrauth.h once all build trains have it
+#if __has_include (<ptrauth.h>)
+#include <ptrauth.h>
+#else
+#define ptrauth_strip(__value, __key) __value
+#define ptrauth_blend_discriminator(__pointer, __integer) ((uintptr_t)0)
+#define ptrauth_sign_constant(__value, __key, __data) __value
+#define ptrauth_sign_unauthenticated(__value, __key, __data) __value
+#define ptrauth_auth_and_resign(__value, __old_key, __old_data, __new_key, __new_data) __value
+#define ptrauth_auth_function(__value, __old_key, __old_data) __value
+#define ptrauth_auth_data(__value, __old_key, __old_data) __value
+#define ptrauth_string_discriminator(__string) ((int)0)
+#define ptrauth_sign_generic_data(__value, __data) ((ptrauth_generic_signature_t)0)
+
+#define __ptrauth_function_pointer
+#define __ptrauth_return_address
+#define __ptrauth_block_invocation_pointer
+#define __ptrauth_block_copy_helper
+#define __ptrauth_block_destroy_helper
+#define __ptrauth_block_byref_copy_helper
+#define __ptrauth_block_byref_destroy_helper
+#define __ptrauth_objc_method_list_imp
+#define __ptrauth_cxx_vtable_pointer
+#define __ptrauth_cxx_vtt_vtable_pointer
+#define __ptrauth_swift_heap_object_destructor
+#define __ptrauth_cxx_virtual_function_pointer(__declkey)
+#define __ptrauth_swift_function_pointer(__typekey)
+#define __ptrauth_swift_class_method_pointer(__declkey)
+#define __ptrauth_swift_protocol_witness_function_pointer(__declkey)
+#define __ptrauth_swift_value_witness_function_pointer(__key)
+#endif
+
+
+#if __has_feature(ptrauth_calls)
+
+// Method lists use process-independent signature for compatibility.
+// Method caches use process-dependent signature for extra protection.
+// (fixme not yet __ptrauth(...) because of `stp` inline asm in objc-cache.mm)
+using MethodListIMP = IMP __ptrauth_objc_method_list_imp;
+using MethodCacheIMP =
+ StorageSignedFunctionPointer<IMP, ptrauth_key_process_dependent_code>;
+
+#else
+
+using MethodListIMP = IMP;
+using MethodCacheIMP = IMP;
+
+#endif
+
+// _OBJC_PTRAUTH_H_
+#endif
struct bucket_t {
private:
+ // IMP-first is better for arm64e ptrauth and no worse for arm64.
+ // SEL-first is better for armv7* and i386 and x86_64.
+#if __arm64__
+ MethodCacheIMP _imp;
cache_key_t _key;
- IMP _imp;
+#else
+ cache_key_t _key;
+ MethodCacheIMP _imp;
+#endif
public:
inline cache_key_t key() const { return _key; }
}
size_t byteSize() const {
- return sizeof(*this) + (count-1)*entsize();
+ return byteSize(entsize(), count);
+ }
+
+ static size_t byteSize(uint32_t entsize, uint32_t count) {
+ return sizeof(entsize_list_tt) + (count-1)*entsize;
}
List *duplicate() const {
- return (List *)memdup(this, this->byteSize());
+ auto *dup = (List *)calloc(this->byteSize(), 1);
+ dup->entsizeAndFlags = this->entsizeAndFlags;
+ dup->count = this->count;
+ std::copy(begin(), end(), dup->begin());
+ return dup;
}
struct iterator;
struct method_t {
SEL name;
const char *types;
- IMP imp;
+ MethodListIMP imp;
struct SortBySELAddress :
public std::binary_function<const method_t&,
#if SUPPORT_NONPOINTER_ISA
#define RW_REQUIRES_RAW_ISA (1<<15)
#endif
-
-// class is a Swift class
-#define FAST_IS_SWIFT (1UL<<0)
// class or superclass has default retain/release/autorelease/retainCount/
// _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
-#define FAST_HAS_DEFAULT_RR (1UL<<1)
+#define RW_HAS_DEFAULT_RR (1<<14)
+
+// class is a Swift class from the pre-stable Swift ABI
+#define FAST_IS_SWIFT_LEGACY (1UL<<0)
+// class is a Swift class from the stable Swift ABI
+#define FAST_IS_SWIFT_STABLE (1UL<<1)
// data pointer
#define FAST_DATA_MASK 0xfffffffcUL
// class or superclass has default alloc/allocWithZone: implementation
// Note this is is stored in the metaclass.
#define RW_HAS_DEFAULT_AWZ (1<<16)
+// class's instances requires raw isa
+#define RW_REQUIRES_RAW_ISA (1<<15)
-// class is a Swift class
-#define FAST_IS_SWIFT (1UL<<0)
+// class is a Swift class from the pre-stable Swift ABI
+#define FAST_IS_SWIFT_LEGACY (1UL<<0)
+// class is a Swift class from the stable Swift ABI
+#define FAST_IS_SWIFT_STABLE (1UL<<1)
// class or superclass has default retain/release/autorelease/retainCount/
// _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
-#define FAST_HAS_DEFAULT_RR (1UL<<1)
-// class's instances requires raw isa
-#define FAST_REQUIRES_RAW_ISA (1UL<<2)
+#define FAST_HAS_DEFAULT_RR (1UL<<2)
// data pointer
#define FAST_DATA_MASK 0x00007ffffffffff8UL
#else
// Leaks-incompatible version that steals lots of bits.
-// class is a Swift class
-#define FAST_IS_SWIFT (1UL<<0)
-// class's instances requires raw isa
-#define FAST_REQUIRES_RAW_ISA (1UL<<1)
-// class or superclass has .cxx_destruct implementation
-// This bit is aligned with isa_t->hasCxxDtor to save an instruction.
-#define FAST_HAS_CXX_DTOR (1UL<<2)
+// class is a Swift class from the pre-stable Swift ABI
+#define FAST_IS_SWIFT_LEGACY (1UL<<0)
+// class is a Swift class from the stable Swift ABI
+#define FAST_IS_SWIFT_STABLE (1UL<<1)
+// summary bit for fast alloc path: !hasCxxCtor and
+// !instancesRequireRawIsa and instanceSize fits into shiftedSize
+#define FAST_ALLOC (1UL<<2)
// data pointer
#define FAST_DATA_MASK 0x00007ffffffffff8UL
// class or superclass has .cxx_construct implementation
// class or superclass has default retain/release/autorelease/retainCount/
// _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
#define FAST_HAS_DEFAULT_RR (1UL<<49)
-// summary bit for fast alloc path: !hasCxxCtor and
-// !instancesRequireRawIsa and instanceSize fits into shiftedSize
-#define FAST_ALLOC (1UL<<50)
+// class's instances requires raw isa
+// This bit is aligned with isa_t->hasCxxDtor to save an instruction.
+#define FAST_REQUIRES_RAW_ISA (1UL<<50)
+// class or superclass has .cxx_destruct implementation
+#define FAST_HAS_CXX_DTOR (1UL<<51)
// instance size in units of 16 bytes
// or 0 if the instance size is too big in this field
// This field must be LAST
-#define FAST_SHIFTED_SIZE_SHIFT 51
+#define FAST_SHIFTED_SIZE_SHIFT 52
// FAST_ALLOC means
// FAST_HAS_CXX_CTOR is set
#endif
+// The Swift ABI requires that these bits be defined like this on all platforms.
+static_assert(FAST_IS_SWIFT_LEGACY == 1, "resistance is futile");
+static_assert(FAST_IS_SWIFT_STABLE == 2, "resistance is futile");
+
struct class_ro_t {
uint32_t flags;
bits = newBits;
}
+#if FAST_HAS_DEFAULT_RR
bool hasDefaultRR() {
return getBit(FAST_HAS_DEFAULT_RR);
}
void setHasCustomRR() {
clearBits(FAST_HAS_DEFAULT_RR);
}
+#else
+ bool hasDefaultRR() {
+ return data()->flags & RW_HAS_DEFAULT_RR;
+ }
+ void setHasDefaultRR() {
+ data()->setFlags(RW_HAS_DEFAULT_RR);
+ }
+ void setHasCustomRR() {
+ data()->clearFlags(RW_HAS_DEFAULT_RR);
+ }
+#endif
#if FAST_HAS_DEFAULT_AWZ
bool hasDefaultAWZ() {
#endif
}
- bool isSwift() {
- return getBit(FAST_IS_SWIFT);
+ bool isAnySwift() {
+ return isSwiftStable() || isSwiftLegacy();
}
- void setIsSwift() {
- setBits(FAST_IS_SWIFT);
+ bool isSwiftStable() {
+ return getBit(FAST_IS_SWIFT_STABLE);
+ }
+ void setIsSwiftStable() {
+ setBits(FAST_IS_SWIFT_STABLE);
+ }
+
+ bool isSwiftLegacy() {
+ return getBit(FAST_IS_SWIFT_LEGACY);
+ }
+ void setIsSwiftLegacy() {
+ setBits(FAST_IS_SWIFT_LEGACY);
}
};
}
- bool isSwift() {
- return bits.isSwift();
+ bool isSwiftStable() {
+ return bits.isSwiftStable();
+ }
+
+ bool isSwiftLegacy() {
+ return bits.isSwiftLegacy();
+ }
+
+ bool isAnySwift() {
+ return bits.isAnySwift();
}
foreach_realized_class_and_subclass_2(Class top, unsigned& count,
std::function<bool (Class)> code)
{
- // runtimeLock.assertWriting();
+ // runtimeLock.assertLocked();
assert(top);
Class cls = top;
while (1) {
static void updateCustomRR_AWZ(Class cls, method_t *meth);
static method_t *search_method_list(const method_list_t *mlist, SEL sel);
static void flushCaches(Class cls);
+static void initializeTaggedPointerObfuscator(void);
#if SUPPORT_FIXUP
static void fixupMessageRef(message_ref_t *msg);
#endif
/***********************************************************************
* Lock management
**********************************************************************/
-rwlock_t runtimeLock;
-rwlock_t selLock;
+mutex_t runtimeLock;
+mutex_t selLock;
mutex_t cacheUpdateLock;
recursive_mutex_t loadMethodLock;
-#if SUPPORT_QOS_HACK
-pthread_priority_t BackgroundPriority = 0;
-pthread_priority_t MainPriority = 0;
-# if DEBUG
-static __unused void destroyQOSKey(void *arg) {
- _objc_fatal("QoS override level at thread exit is %zu instead of zero",
- (size_t)(uintptr_t)arg);
-}
-# endif
-#endif
-
void lock_init(void)
{
-#if SUPPORT_QOS_HACK
- BackgroundPriority = _pthread_qos_class_encode(QOS_CLASS_BACKGROUND, 0, 0);
- MainPriority = _pthread_qos_class_encode(qos_class_main(), 0, 0);
-# if DEBUG
- pthread_key_init_np(QOS_KEY, &destroyQOSKey);
-# endif
-#endif
}
#endif
+/***********************************************************************
+* allocatedClasses
+* A table of all classes (and metaclasses) which have been allocated
+* with objc_allocateClassPair.
+**********************************************************************/
+static NXHashTable *allocatedClasses = nil;
+
+
typedef locstamped_category_list_t category_list;
}
void method_list_t::setFixedUp() {
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(!isFixedUp());
entsizeAndFlags = entsize() | fixed_up_method_list;
}
}
void protocol_t::setFixedUp() {
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(!isFixedUp());
flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
}
}
+static void (*classCopyFixupHandler)(Class _Nonnull oldClass,
+ Class _Nonnull newClass);
+
+void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
+ (Class _Nonnull oldClass, Class _Nonnull newClass)) {
+ classCopyFixupHandler = newFixupHandler;
+}
+
static Class
alloc_class_for_subclass(Class supercls, size_t extraBytes)
{
- if (!supercls || !supercls->isSwift()) {
+ if (!supercls || !supercls->isAnySwift()) {
return _calloc_class(sizeof(objc_class) + extraBytes);
}
bzero(swcls, sizeof(objc_class));
swcls->description = nil;
+ if (classCopyFixupHandler) {
+ classCopyFixupHandler(supercls, (Class)swcls);
+ }
+
// Mark this class as Swift-enhanced.
- swcls->bits.setIsSwift();
+ if (supercls->isSwiftStable()) {
+ swcls->bits.setIsSwiftStable();
+ }
+ if (supercls->isSwiftLegacy()) {
+ swcls->bits.setIsSwiftLegacy();
+ }
return (Class)swcls;
}
if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
Class cls = (Class)obj;
- if (!cls->isSwift()) return base + sizeof(objc_class);
+ if (!cls->isAnySwift()) return base + sizeof(objc_class);
swift_class_t *swcls = (swift_class_t *)cls;
return base - swcls->classAddressOffset + word_align(swcls->classSize);
**********************************************************************/
static class_ro_t *make_ro_writeable(class_rw_t *rw)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (rw->flags & RW_COPIED_RO) {
// already writeable, do nothing
**********************************************************************/
static NXMapTable *unattachedCategories(void)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
static NXMapTable *category_map = nil;
}
+/***********************************************************************
+* dataSegmentsContain
+* Returns true if the given address lies within a data segment in any
+* loaded image.
+*
+* This is optimized for use where the return value is expected to be
+* true. A call where the return value is false always results in a
+* slow linear search of all loaded images. A call where the return
+* value is fast will often be fast due to caching.
+**********************************************************************/
+static bool dataSegmentsContain(const void *ptr) {
+ struct Range {
+ uintptr_t start, end;
+ bool contains(uintptr_t ptr) {
+ return start <= ptr && ptr <= end;
+ }
+ };
+
+ // This is a really simple linear searched cache. On a cache hit,
+ // the hit entry is moved to the front of the array. On a cache
+ // miss where a range is successfully found on the slow path, the
+ // found range is inserted at the beginning of the cache. This gives
+ // us fast access to the most recently used elements, and LRU
+ // eviction.
+ enum { cacheCount = 16 };
+ static Range cache[cacheCount];
+
+ uintptr_t addr = (uintptr_t)ptr;
+
+ // Special case a hit on the first entry of the cache. No
+ // bookkeeping is required at all in this case.
+ if (cache[0].contains(addr)) {
+ return true;
+ }
+
+ // Search the rest of the cache.
+ for (unsigned i = 1; i < cacheCount; i++) {
+ if (cache[i].contains(addr)) {
+ // Cache hit. Move all preceding entries down one element,
+ // then place this entry at the front.
+ Range r = cache[i];
+ memmove(&cache[1], &cache[0], i * sizeof(cache[0]));
+ cache[0] = r;
+ return true;
+ }
+ }
+
+ // Cache miss. Find the image header containing the given address.
+ // If there isn't one, then we're definitely not in any image,
+ // so return false.
+ Range found = { 0, 0 };
+ auto *h = (headerType *)dyld_image_header_containing_address(ptr);
+ if (h == nullptr)
+ return false;
+
+ // Iterate over the data segments in the found image. If the address
+ // lies within one, note the data segment range in `found`.
+ // TODO: this is more work than we'd like to do. All we really need
+ // is the full range of the image. Addresses within the TEXT segment
+ // would also be acceptable for our use case. If possible, we should
+ // change this to work with the full address range of the found
+ // image header. Another possibility would be to use the range
+ // from `h` to the end of the page containing `addr`.
+ foreach_data_segment(h, [&](const segmentType *seg, intptr_t slide) {
+ Range r;
+ r.start = seg->vmaddr + slide;
+ r.end = r.start + seg->vmsize;
+ if (r.contains(addr))
+ found = r;
+ });
+
+ if (found.start != 0) {
+ memmove(&cache[1], &cache[0], (cacheCount - 1) * sizeof(cache[0]));
+ cache[0] = found;
+ return true;
+ }
+
+ return false;
+}
+
+
+/***********************************************************************
+* isKnownClass
+* Return true if the class is known to the runtime (located within the
+* shared cache, within the data segment of a loaded image, or has been
+* allocated with obj_allocateClassPair).
+**********************************************************************/
+static bool isKnownClass(Class cls) {
+ // The order of conditionals here is important for speed. We want to
+ // put the most common cases first, but also the fastest cases
+ // first. Checking the shared region is both fast and common.
+ // Checking allocatedClasses is fast, but may not be common,
+ // depending on what the program is doing. Checking if data segments
+ // contain the address is slow, so do it last.
+ return (sharedRegionContains(cls) ||
+ NXHashMember(allocatedClasses, cls) ||
+ dataSegmentsContain(cls));
+}
+
+
+/***********************************************************************
+* addClassTableEntry
+* Add a class to the table of all classes. If addMeta is true,
+* automatically adds the metaclass of the class as well.
+* Locking: runtimeLock must be held by the caller.
+**********************************************************************/
+static void addClassTableEntry(Class cls, bool addMeta = true) {
+ runtimeLock.assertLocked();
+
+ // This class is allowed to be a known class via the shared cache or via
+ // data segments, but it is not allowed to be in the dynamic table already.
+ assert(!NXHashMember(allocatedClasses, cls));
+
+ if (!isKnownClass(cls))
+ NXHashInsert(allocatedClasses, cls);
+ if (addMeta)
+ addClassTableEntry(cls->ISA(), false);
+}
+
+
+/***********************************************************************
+* checkIsKnownClass
+* Checks the given class against the list of all known classes. Dies
+* with a fatal error if the class is not known.
+* Locking: runtimeLock must be held by the caller.
+**********************************************************************/
+static void checkIsKnownClass(Class cls)
+{
+ if (!isKnownClass(cls))
+ _objc_fatal("Attempt to use unknown class %p.", cls);
+}
+
+
/***********************************************************************
* addUnattachedCategoryForClass
* Records an unattached category.
static void addUnattachedCategoryForClass(category_t *cat, Class cls,
header_info *catHeader)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
// DO NOT use cat->cls! cls may be cat->cls->isa instead
NXMapTable *cats = unattachedCategories();
**********************************************************************/
static void removeUnattachedCategoryForClass(category_t *cat, Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
// DO NOT use cat->cls! cls may be cat->cls->isa instead
NXMapTable *cats = unattachedCategories();
static category_list *
unattachedCategoriesForClass(Class cls, bool realizing)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
return (category_list *)NXMapRemove(unattachedCategories(), cls);
}
**********************************************************************/
static void removeAllUnattachedCategoriesForClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
void *list = NXMapRemove(unattachedCategories(), cls);
if (list) free(list);
static void
fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(!mlist->isFixedUp());
// fixme lock less in attachMethodLists ?
- sel_lock();
+ {
+ mutex_locker_t lock(selLock);
- // Unique selectors in list.
- for (auto& meth : *mlist) {
- const char *name = sel_cname(meth.name);
- meth.name = sel_registerNameNoLock(name, bundleCopy);
+ // Unique selectors in list.
+ for (auto& meth : *mlist) {
+ const char *name = sel_cname(meth.name);
+ meth.name = sel_registerNameNoLock(name, bundleCopy);
+ }
}
-
- sel_unlock();
// Sort by selector address.
if (sort) {
prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
bool baseMethods, bool methodsFromBundle)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (addedCount == 0) return;
**********************************************************************/
static void methodizeClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
bool isMeta = cls->isMetaClass();
auto rw = cls->data();
category_list *cats;
bool isMeta;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
isMeta = cls->isMetaClass();
**********************************************************************/
static void addNonMetaClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
void *old;
old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
static void removeNonMetaClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
NXMapRemove(nonMetaClasses(), cls->ISA());
}
**********************************************************************/
static void addNamedClass(Class cls, const char *name, Class replacing = nil)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
Class old;
if ((old = getClass(name)) && old != replacing) {
inform_duplicate(name, old, cls);
**********************************************************************/
static void removeNamedClass(Class cls, const char *name)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(!(cls->data()->flags & RO_META));
if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
NXMapRemove(gdb_objc_realized_classes, name);
static NXMapTable *future_named_class_map = nil;
static NXMapTable *futureNamedClasses()
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (future_named_class_map) return future_named_class_map;
{
void *old;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (PrintFuture) {
_objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
**********************************************************************/
static Class popFutureNamedClass(const char *name)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
Class cls = nil;
**********************************************************************/
static void addRemappedClass(Class oldcls, Class newcls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (PrintFuture) {
_objc_inform("FUTURE: using %p instead of %p for %s",
Class _class_remap(Class cls)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return remapClass(cls);
}
**********************************************************************/
Class _class_getNonMetaClass(Class cls, id obj)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
cls = getNonMetaClass(cls, obj);
assert(cls->isRealized());
return cls;
static void addRootClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(cls->isRealized());
cls->data()->nextSiblingClass = _firstRealizedClass;
static void removeRootClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
Class *classp;
for (classp = &_firstRealizedClass;
**********************************************************************/
static void addSubclass(Class supercls, Class subcls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (supercls && subcls) {
assert(supercls->isRealized());
**********************************************************************/
static void removeSubclass(Class supercls, Class subcls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(supercls->isRealized());
assert(subcls->isRealized());
assert(subcls->superclass == supercls);
**********************************************************************/
static void moveIvars(class_ro_t *ro, uint32_t superSize)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
uint32_t diff;
**********************************************************************/
static Class realizeClass(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
const class_ro_t *ro;
class_rw_t *rw;
**********************************************************************/
static void realizeAllClassesInImage(header_info *hi)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
size_t count, i;
classref_t *classlist;
**********************************************************************/
static void realizeAllClasses(void)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
header_info *hi;
for (hi = FirstHeader; hi; hi = hi->getNext()) {
**********************************************************************/
Class _objc_allocateFutureClass(const char *name)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
Class cls;
NXMapTable *map = futureNamedClasses();
**********************************************************************/
static void flushCaches(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
mutex_locker_t lock(cacheUpdateLock);
void _objc_flush_caches(Class cls)
{
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
flushCaches(cls);
if (cls && cls->superclass && cls != cls->getIsa()) {
flushCaches(cls->getIsa());
map_images(unsigned count, const char * const paths[],
const struct mach_header * const mhdrs[])
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return map_images_nolock(count, paths, mhdrs);
}
// Discover load methods
{
- rwlock_writer_t lock2(runtimeLock);
+ mutex_locker_t lock2(runtimeLock);
prepare_load_methods((const headerType *)mh);
}
unmap_image(const char *path __unused, const struct mach_header *mh)
{
recursive_mutex_locker_t lock(loadMethodLock);
- rwlock_writer_t lock2(runtimeLock);
+ mutex_locker_t lock2(runtimeLock);
unmap_image_nolock(mh);
}
// Copy objc_class to future class's struct.
// Preserve future's rw data block.
- if (newCls->isSwift()) {
+ if (newCls->isAnySwift()) {
_objc_fatal("Can't complete future class request for '%s' "
"because the real class is too big.",
cls->nameForLogging());
assert(getClass(mangledName));
} else {
addNamedClass(cls, mangledName, replacing);
+ addClassTableEntry(cls);
}
-
+
// for future reference: shared cache never contains MH_BUNDLEs
if (headerIsBundle) {
cls->data()->flags |= RO_FROM_BUNDLE;
static bool doneOnce;
TimeLogger ts(PrintImageTimes);
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
#define EACH_HEADER \
hIndex = 0; \
disableTaggedPointers();
}
+ initializeTaggedPointerObfuscator();
+
if (PrintConnecting) {
_objc_inform("CLASS: found %d classes during launch", totalClasses);
}
(isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
gdb_objc_realized_classes =
NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
-
+
+ allocatedClasses = NXCreateHashTable(NXPtrPrototype, 0, nil);
+
ts.log("IMAGE TIMES: first time tasks");
}
// Discover classes. Fix up unresolved future classes. Mark bundle classes.
for (EACH_HEADER) {
+ classref_t *classlist = _getObjc2ClassList(hi, &count);
+
if (! mustReadClasses(hi)) {
// Image is sufficiently optimized that we need not call readClass()
continue;
bool headerIsBundle = hi->isBundle();
bool headerIsPreoptimized = hi->isPreoptimized();
- classref_t *classlist = _getObjc2ClassList(hi, &count);
for (i = 0; i < count; i++) {
Class cls = (Class)classlist[i];
Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
// Fix up @selector references
static size_t UnfixedSelectors;
- sel_lock();
- for (EACH_HEADER) {
- if (hi->isPreoptimized()) continue;
-
- bool isBundle = hi->isBundle();
- SEL *sels = _getObjc2SelectorRefs(hi, &count);
- UnfixedSelectors += count;
- for (i = 0; i < count; i++) {
- const char *name = sel_cname(sels[i]);
- sels[i] = sel_registerNameNoLock(name, isBundle);
+ {
+ mutex_locker_t lock(selLock);
+ for (EACH_HEADER) {
+ if (hi->isPreoptimized()) continue;
+
+ bool isBundle = hi->isBundle();
+ SEL *sels = _getObjc2SelectorRefs(hi, &count);
+ UnfixedSelectors += count;
+ for (i = 0; i < count; i++) {
+ const char *name = sel_cname(sels[i]);
+ sels[i] = sel_registerNameNoLock(name, isBundle);
+ }
}
}
- sel_unlock();
ts.log("IMAGE TIMES: fix up selector references");
cls->ISA()->cache._occupied = 0;
}
#endif
-
+
+ addClassTableEntry(cls);
realizeClass(cls);
}
}
{
size_t count, i;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
classref_t *classlist =
_getObjc2NonlazyClassList(mhdr, &count);
size_t count, i;
loadMethodLock.assertLocked();
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
// Unload unattached categories and categories waiting for +load.
static IMP
_method_setImplementation(Class cls, method_t *m, IMP imp)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (!m) return nil;
if (!imp) return nil;
{
// Don't know the class - will be slow if RR/AWZ are affected
// fixme build list of classes whose Methods are known externally?
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return _method_setImplementation(Nil, m, imp);
}
{
if (!m1 || !m2) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
IMP m1_imp = m1->imp;
m1->imp = m2->imp;
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return copyPropertyAttributeList(prop->attributes,outCount);
}
{
if (!prop || !name || *name == '\0') return nil;
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return copyPropertyAttributeValue(prop->attributes, name);
}
fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
bool required, bool instance)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (!mlist) return;
if (mlist->isFixedUp()) return;
static void
fixupProtocol(protocol_t *proto)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (proto->protocols) {
for (uintptr_t i = 0; i < proto->protocols->count; i++) {
assert(proto);
if (!proto->isFixedUp()) {
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
fixupProtocol(proto);
}
}
if (!proto) return nil;
fixupProtocolIfNeeded(proto);
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
isInstanceMethod, recursive);
}
* protocol_getMethodTypeEncoding_nolock
* Return the @encode string for the requested protocol method.
* Returns nil if the compiler did not emit any extended @encode data.
-* Locking: runtimeLock must be held for writing by the caller
+* Locking: runtimeLock must be held by the caller
**********************************************************************/
const char *
protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
if (!proto) return nil;
fixupProtocolIfNeeded(proto);
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return protocol_getMethodTypeEncoding_nolock(proto, sel,
isRequiredMethod,
isInstanceMethod);
**********************************************************************/
BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return protocol_conformsToProtocol_nolock(newprotocol(self),
newprotocol(other));
}
fixupProtocolIfNeeded(proto);
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
method_list_t *mlist =
getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
{
if (!p || !name) return nil;
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return (objc_property_t)
protocol_getProperty_nolock(newprotocol(p), name,
isRequiredProperty, isInstanceProperty);
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
property_list_t *plist = isInstanceProperty
? newprotocol(proto)->instanceProperties
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
if (proto->protocols) {
count = (unsigned int)proto->protocols->count;
Protocol *
objc_allocateProtocol(const char *name)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
if (getProtocol(name)) {
return nil;
{
protocol_t *proto = newprotocol(proto_gen);
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
extern objc_class OBJC_CLASS_$___IncompleteProtocol;
Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
if (!proto_gen) return;
if (!addition_gen) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
if (proto->ISA() != cls) {
_objc_inform("protocol_addProtocol: modified protocol '%s' is not "
if (!proto_gen) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
if (proto->ISA() != cls) {
_objc_inform("protocol_addMethodDescription: protocol '%s' is not "
if (!proto) return;
if (!name) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
if (proto->ISA() != cls) {
_objc_inform("protocol_addProperty: protocol '%s' is not "
int
objc_getClassList(Class *buffer, int bufferLen)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
realizeAllClasses();
Class *
objc_copyClassList(unsigned int *outCount)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
realizeAllClasses();
Protocol * __unsafe_unretained *
objc_copyProtocolList(unsigned int *outCount)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
NXMapTable *protocol_map = protocols();
**********************************************************************/
Protocol *objc_getProtocol(const char *name)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return getProtocol(name);
}
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
assert(cls->isRealized());
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
assert(cls->isRealized());
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
assert(cls->isRealized());
+
auto rw = cls->data();
property_t **result = nil;
Class
_category_getClass(Category cat)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
Class result = remapClass(cat->cls);
assert(result->isRealized()); // ok for call_category_loads' usage
return result;
return nil;
}
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+
+ checkIsKnownClass(cls);
assert(cls->isRealized());
/***********************************************************************
-* _objc_copyClassNamesForImage
-* fixme
-* Locking: write-locks runtimeLock
+* objc_copyImageNames
+* Copies names of loaded images with ObjC contents.
+*
+* Locking: acquires runtimeLock
**********************************************************************/
-const char **
-_objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
+const char **objc_copyImageNames(unsigned int *outCount)
{
- size_t count, i, shift;
- classref_t *classlist;
- const char **names;
+ mutex_locker_t lock(runtimeLock);
- // Need to write-lock in case demangledName() needs to realize a class.
- rwlock_writer_t lock(runtimeLock);
-
- classlist = _getObjc2ClassList(hi, &count);
- names = (const char **)malloc((count+1) * sizeof(const char *));
+#if TARGET_OS_WIN32
+ const TCHAR **names = (const TCHAR **)
+ malloc((HeaderCount+1) * sizeof(TCHAR *));
+#else
+ const char **names = (const char **)
+ malloc((HeaderCount+1) * sizeof(char *));
+#endif
+
+ unsigned int count = 0;
+ for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
+#if TARGET_OS_WIN32
+ if (hi->moduleName) {
+ names[count++] = hi->moduleName;
+ }
+#else
+ const char *fname = hi->fname();
+ if (fname) {
+ names[count++] = fname;
+ }
+#endif
+ }
+ names[count] = nil;
- shift = 0;
- for (i = 0; i < count; i++) {
+ if (count == 0) {
+ // Return nil instead of empty list if there are no images
+ free((void *)names);
+ names = nil;
+ }
+
+ if (outCount) *outCount = count;
+ return names;
+}
+
+
+/***********************************************************************
+* copyClassNamesForImage_nolock
+* Copies class names from the given image.
+* Missing weak-import classes are omitted.
+* Swift class names are demangled.
+*
+* Locking: runtimeLock must be held by the caller
+**********************************************************************/
+const char **
+copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
+{
+ runtimeLock.assertLocked();
+ assert(hi);
+
+ size_t count;
+ classref_t *classlist = _getObjc2ClassList(hi, &count);
+ const char **names = (const char **)
+ malloc((count+1) * sizeof(const char *));
+
+ size_t shift = 0;
+ for (size_t i = 0; i < count; i++) {
Class cls = remapClass(classlist[i]);
if (cls) {
names[i-shift] = cls->demangledName(true/*realize*/);
}
+
+/***********************************************************************
+* objc_copyClassNamesForImage
+* Copies class names from the named image.
+* The image name must be identical to dladdr's dli_fname value.
+* Missing weak-import classes are omitted.
+* Swift class names are demangled.
+*
+* Locking: acquires runtimeLock
+**********************************************************************/
+const char **
+objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
+{
+ if (!image) {
+ if (outCount) *outCount = 0;
+ return nil;
+ }
+
+ mutex_locker_t lock(runtimeLock);
+
+ // Find the image.
+ header_info *hi;
+ for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
+#if TARGET_OS_WIN32
+ if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
+#else
+ if (0 == strcmp(image, hi->fname())) break;
+#endif
+ }
+
+ if (!hi) {
+ if (outCount) *outCount = 0;
+ return nil;
+ }
+
+ return copyClassNamesForImage_nolock(hi, outCount);
+}
+
+
+/***********************************************************************
+* objc_copyClassNamesForImageHeader
+* Copies class names from the given image.
+* Missing weak-import classes are omitted.
+* Swift class names are demangled.
+*
+* Locking: acquires runtimeLock
+**********************************************************************/
+const char **
+objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
+{
+ if (!mh) {
+ if (outCount) *outCount = 0;
+ return nil;
+ }
+
+ mutex_locker_t lock(runtimeLock);
+
+ // Find the image.
+ header_info *hi;
+ for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
+ if (hi->mhdr() == (const headerType *)mh) break;
+ }
+
+ if (!hi) {
+ if (outCount) *outCount = 0;
+ return nil;
+ }
+
+ return copyClassNamesForImage_nolock(hi, outCount);
+}
+
+
/***********************************************************************
* saveTemporaryString
* Save a string in a thread-local FIFO buffer.
/***********************************************************************
* objc_class::demangledName
* If realize=false, the class must already be realized or future.
-* Locking: If realize=true, runtimeLock must be held for writing by the caller.
+* Locking: If realize=true, runtimeLock must be held by the caller.
**********************************************************************/
mutex_t DemangleCacheLock;
static NXHashTable *DemangleCache;
if (isRealized() || isFuture()) {
// Class is already realized or future.
// Save demangling result in rw data.
- // We may not own rwlock for writing so use an atomic operation instead.
+ // We may not own runtimeLock so use an atomic operation instead.
if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
(void**)&data()->demangledName))
{
// fixme lldb's calls to class_getName() can also get here when
// interrogating the dyld shared cache. (rdar://27258517)
- // fixme runtimeLock.assertWriting();
+ // fixme runtimeLock.assertLocked();
// fixme assert(realize);
if (realize) {
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
realizeClass((Class)this);
data()->demangledName = de;
return de;
**********************************************************************/
static Method _class_getMethod(Class cls, SEL sel)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return getMethod_nolock(cls, sel);
}
// the cache was re-filled with the old value after the cache flush on
// behalf of the category.
- runtimeLock.read();
+ runtimeLock.lock();
+ checkIsKnownClass(cls);
if (!cls->isRealized()) {
- // Drop the read-lock and acquire the write-lock.
- // realizeClass() checks isRealized() again to prevent
- // a race while the lock is down.
- runtimeLock.unlockRead();
- runtimeLock.write();
-
realizeClass(cls);
-
- runtimeLock.unlockWrite();
- runtimeLock.read();
}
if (initialize && !cls->isInitialized()) {
- runtimeLock.unlockRead();
+ runtimeLock.unlock();
_class_initialize (_class_getNonMetaClass(cls, inst));
- runtimeLock.read();
+ runtimeLock.lock();
// If sel == initialize, _class_initialize will send +initialize and
// then the messenger will send +initialize again after this
// procedure finishes. Of course, if this is not being called
retry:
- runtimeLock.assertReading();
+ runtimeLock.assertLocked();
// Try this class's cache.
// No implementation found. Try method resolver once.
if (resolver && !triedResolver) {
- runtimeLock.unlockRead();
+ runtimeLock.unlock();
_class_resolveMethod(cls, sel, inst);
- runtimeLock.read();
+ runtimeLock.lock();
// Don't cache the result; we don't hold the lock so it may have
// changed already. Re-do the search from scratch instead.
triedResolver = YES;
cache_fill(cls, sel, imp, inst);
done:
- runtimeLock.unlockRead();
+ runtimeLock.unlock();
return imp;
}
// Cache miss. Search method list.
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
meth = getMethodNoSuper_nolock(cls, sel);
{
if (!cls || !name) return nil;
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
+
assert(cls->isRealized());
for ( ; cls; cls = cls->superclass) {
cls = (Class)this;
metacls = cls->ISA();
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
// Scan metaclass for custom AWZ.
// Scan metaclass for custom RR.
void objc_class::setHasCustomRR(bool inherited)
{
Class cls = (Class)this;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (hasCustomRR()) return;
void objc_class::setHasCustomAWZ(bool inherited)
{
Class cls = (Class)this;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (hasCustomAWZ()) return;
void objc_class::setInstancesRequireRawIsa(bool inherited)
{
Class cls = (Class)this;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (instancesRequireRawIsa()) return;
{
#if SUPPORT_INDEXED_ISA
Class cls = (Class)this;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
// No more indexes available.
{
if (!cls) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
+
// Can only change layout of in-construction classes.
// note: if modifications to post-construction classes were
// allowed, there would be a race below (us vs. concurrent object_setIvar)
ro_w->ivarLayout = ustrdupMaybeNil(layout);
}
-// SPI: Instance-specific object layout.
-
-void
-_class_setIvarLayoutAccessor(Class cls, const uint8_t* (*accessor) (id object)) {
- if (!cls) return;
-
- rwlock_writer_t lock(runtimeLock);
-
- class_ro_t *ro_w = make_ro_writeable(cls->data());
-
- // FIXME: this really isn't safe to free if there are instances of this class already.
- if (!(cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT)) try_free(ro_w->ivarLayout);
- ro_w->ivarLayout = (uint8_t *)accessor;
- cls->setInfo(RW_HAS_INSTANCE_SPECIFIC_LAYOUT);
-}
-
-const uint8_t *
-_object_getIvarLayout(Class cls, id object)
-{
- if (cls) {
- const uint8_t* layout = cls->data()->ro->ivarLayout;
- if (cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT) {
- const uint8_t* (*accessor) (id object) = (const uint8_t* (*)(id))layout;
- layout = accessor(object);
- }
- return layout;
- }
- return nil;
-}
/***********************************************************************
* class_setWeakIvarLayout
{
if (!cls) return;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
+
// Can only change layout of in-construction classes.
// note: if modifications to post-construction classes were
// allowed, there would be a race below (us vs. concurrent object_setIvar)
**********************************************************************/
Class _class_getClassForIvar(Class cls, Ivar ivar)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
for ( ; cls; cls = cls->superclass) {
if (auto ivars = cls->data()->ro->ivars) {
Ivar
_class_getVariable(Class cls, const char *name)
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
for ( ; cls; cls = cls->superclass) {
ivar_t *ivar = getIvar(cls, name);
if (!cls) return NO;
if (!proto_gen) return NO;
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
+
assert(cls->isRealized());
-
+
for (const auto& proto_ref : cls->data()->protocols) {
protocol_t *p = remapProtocol(proto_ref);
if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
{
IMP result = nil;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
+ checkIsKnownClass(cls);
+
assert(types);
assert(cls->isRealized());
return result;
}
+/**********************************************************************
+* addMethods
+* Add the given methods to a class in bulk.
+* Returns the selectors which could not be added, when replace == NO and a
+* method already exists. The returned selectors are NULL terminated and must be
+* freed by the caller. They are NULL if no failures occurred.
+* Locking: runtimeLock must be held by the caller
+**********************************************************************/
+static SEL *
+addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
+ uint32_t count, bool replace, uint32_t *outFailedCount)
+{
+ runtimeLock.assertLocked();
+
+ assert(names);
+ assert(imps);
+ assert(types);
+ assert(cls->isRealized());
+
+ method_list_t *newlist;
+ size_t newlistSize = method_list_t::byteSize(sizeof(method_t), count);
+ newlist = (method_list_t *)calloc(newlistSize, 1);
+ newlist->entsizeAndFlags =
+ (uint32_t)sizeof(method_t) | fixed_up_method_list;
+ newlist->count = 0;
+
+ method_t *newlistMethods = &newlist->first;
+
+ SEL *failedNames = nil;
+ uint32_t failedCount = 0;
+
+ for (uint32_t i = 0; i < count; i++) {
+ method_t *m;
+ if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
+ // already exists
+ if (!replace) {
+ // report failure
+ if (failedNames == nil) {
+ // allocate an extra entry for a trailing NULL in case
+ // every method fails
+ failedNames = (SEL *)calloc(sizeof(*failedNames),
+ count + 1);
+ }
+ failedNames[failedCount] = m->name;
+ failedCount++;
+ } else {
+ _method_setImplementation(cls, m, imps[i]);
+ }
+ } else {
+ method_t *newmethod = &newlistMethods[newlist->count];
+ newmethod->name = names[i];
+ newmethod->types = strdupIfMutable(types[i]);
+ newmethod->imp = imps[i];
+ newlist->count++;
+ }
+ }
+
+ if (newlist->count > 0) {
+ // fixme resize newlist because it may have been over-allocated above.
+ // Note that realloc() alone doesn't work due to ptrauth.
+
+ method_t::SortBySELAddress sorter;
+ std::stable_sort(newlist->begin(), newlist->end(), sorter);
+
+ prepareMethodLists(cls, &newlist, 1, NO, NO);
+ cls->data()->methods.attachLists(&newlist, 1);
+ flushCaches(cls);
+ } else {
+ // Attaching the method list to the class consumes it. If we don't
+ // do that, we have to free the memory ourselves.
+ free(newlist);
+ }
+
+ if (outFailedCount) *outFailedCount = failedCount;
+
+ return failedNames;
+}
+
BOOL
class_addMethod(Class cls, SEL name, IMP imp, const char *types)
{
if (!cls) return NO;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return ! addMethod(cls, name, imp, types ?: "", NO);
}
{
if (!cls) return nil;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return addMethod(cls, name, imp, types ?: "", YES);
}
+SEL *
+class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
+ const char **types, uint32_t count,
+ uint32_t *outFailedCount)
+{
+ if (!cls) {
+ if (outFailedCount) *outFailedCount = count;
+ return (SEL *)memdup(names, count * sizeof(*names));
+ }
+
+ mutex_locker_t lock(runtimeLock);
+ return addMethods(cls, names, imps, types, count, NO, outFailedCount);
+}
+
+void
+class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
+ const char **types, uint32_t count)
+{
+ if (!cls) return;
+
+ mutex_locker_t lock(runtimeLock);
+ addMethods(cls, names, imps, types, count, YES, nil);
+}
+
+
/***********************************************************************
* class_addIvar
* Adds an ivar to a class.
if (!type) type = "";
if (name && 0 == strcmp(name, "")) name = nil;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+ checkIsKnownClass(cls);
assert(cls->isRealized());
// No class variables
if (!cls) return NO;
if (class_conformsToProtocol(cls, protocol_gen)) return NO;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
assert(cls->isRealized());
}
else if (prop) {
// replace existing
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
try_free(prop->attributes);
prop->attributes = copyPropertyAttributeString(attrs, count);
return YES;
}
else {
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
assert(cls->isRealized());
Class result;
bool unrealized;
{
- rwlock_reader_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
result = getClass(name);
unrealized = result && !result->isRealized();
}
if (unrealized) {
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
realizeClass(result);
}
return result;
{
Class duplicate;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
+
+ checkIsKnownClass(original);
assert(original->isRealized());
assert(!original->isMetaClass());
// Don't methodize class - construction above is correct
addNamedClass(duplicate, duplicate->data()->ro->name);
-
+ addClassTableEntry(duplicate, /*addMeta=*/false);
+
if (PrintConnecting) {
_objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
name, original->nameForLogging(),
static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
class_ro_t *cls_ro_w, *meta_ro_w;
cls->cache.initializeToEmpty();
meta->cache.initializeToEmpty();
+
+ addClassTableEntry(cls);
}
**********************************************************************/
Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
// Fail if the class name is in use.
// Fail if the superclass isn't kosher.
{
Class cls, meta;
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
// Fail if the class name is in use.
// Fail if the superclass isn't kosher.
**********************************************************************/
void objc_registerClassPair(Class cls)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
- if ((cls->data()->flags & RW_CONSTRUCTED) ||
+ checkIsKnownClass(cls);
+
+ if ((cls->data()->flags & RW_CONSTRUCTED) ||
(cls->ISA()->data()->flags & RW_CONSTRUCTED))
{
_objc_inform("objc_registerClassPair: class '%s' was already "
**********************************************************************/
Class objc_readClassPair(Class bits, const struct objc_image_info *info)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
// No info bits are significant yet.
(void)info;
- // Fail if the class name is in use.
// Fail if the superclass isn't kosher.
- const char *name = bits->mangledName();
bool rootOK = bits->data()->flags & RO_ROOT;
- if (getClass(name) || !verifySuperclass(bits->superclass, rootOK)){
+ if (!verifySuperclass(bits->superclass, rootOK)){
return nil;
}
+ // Duplicate classes are allowed, just like they are for image loading.
+ // readClass will complain about the duplicate.
+
Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
if (cls != bits) {
// This function isn't allowed to remap anything.
**********************************************************************/
static void detach_class(Class cls, bool isMeta)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
// categories not yet attached to this class
removeAllUnattachedCategoriesForClass(cls);
if (!isMeta) {
removeNamedClass(cls, cls->mangledName());
}
+ NXHashRemove(allocatedClasses, cls);
}
**********************************************************************/
static void free_class(Class cls)
{
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
if (! cls->isRealized()) return;
void objc_disposeClassPair(Class cls)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
- if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
+ checkIsKnownClass(cls);
+
+ if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
!(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
{
// class not allocated with objc_allocateClassPair
}
-#if !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE)
+#if SUPPORT_ZONES
/***********************************************************************
* class_createInstanceFromZone
#if !SUPPORT_TAGGED_POINTERS
// These variables are always provided for debuggers.
+uintptr_t objc_debug_taggedpointer_obfuscator = 0;
uintptr_t objc_debug_taggedpointer_mask = 0;
unsigned objc_debug_taggedpointer_slot_shift = 0;
uintptr_t objc_debug_taggedpointer_slot_mask = 0;
static void
disableTaggedPointers() { }
+static void
+initializeTaggedPointerObfuscator(void) { }
+
#else
// The "slot" used in the class table and given to the debugger
// includes the is-tagged bit. This makes objc_msgSend faster.
// The "ext" representation doesn't do that.
+uintptr_t objc_debug_taggedpointer_obfuscator;
uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
static Class *
classSlotForBasicTagIndex(objc_tag_index_t tag)
{
+ uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
+ >> _OBJC_TAG_INDEX_SHIFT)
+ & _OBJC_TAG_INDEX_MASK);
+ uintptr_t obfuscatedTag = tag ^ tagObfuscator;
// Array index in objc_tag_classes includes the tagged bit itself
#if SUPPORT_MSB_TAGGED_POINTERS
- return &objc_tag_classes[0x8 | tag];
+ return &objc_tag_classes[0x8 | obfuscatedTag];
#else
- return &objc_tag_classes[(tag << 1) | 1];
+ return &objc_tag_classes[(obfuscatedTag << 1) | 1];
#endif
}
}
if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
- return &objc_tag_ext_classes[tag - OBJC_TAG_First52BitPayload];
+ int index = tag - OBJC_TAG_First52BitPayload;
+ uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
+ >> _OBJC_TAG_EXT_INDEX_SHIFT)
+ & _OBJC_TAG_EXT_INDEX_MASK);
+ return &objc_tag_ext_classes[index ^ tagObfuscator];
}
return nil;
}
+/***********************************************************************
+* initializeTaggedPointerObfuscator
+* Initialize objc_debug_taggedpointer_obfuscator with randomness.
+*
+* The tagged pointer obfuscator is intended to make it more difficult
+* for an attacker to construct a particular object as a tagged pointer,
+* in the presence of a buffer overflow or other write control over some
+* memory. The obfuscator is XORed with the tagged pointers when setting
+* or retrieving payload values. They are filled with randomness on first
+* use.
+**********************************************************************/
+static void
+initializeTaggedPointerObfuscator(void)
+{
+ if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) ||
+ // Set the obfuscator to zero for apps linked against older SDKs,
+ // in case they're relying on the tagged pointer representation.
+ DisableTaggedPointerObfuscation) {
+ objc_debug_taggedpointer_obfuscator = 0;
+ } else {
+ // Pull random data into the variable, then shift away all non-payload bits.
+ arc4random_buf(&objc_debug_taggedpointer_obfuscator,
+ sizeof(objc_debug_taggedpointer_obfuscator));
+ objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
+ }
+}
+
/***********************************************************************
* _objc_registerTaggedPointerClass
{
Class oldSuper;
- runtimeLock.assertWriting();
+ runtimeLock.assertLocked();
assert(cls->isRealized());
assert(newSuper->isRealized());
Class class_setSuperclass(Class cls, Class newSuper)
{
- rwlock_writer_t lock(runtimeLock);
+ mutex_locker_t lock(runtimeLock);
return setSuperclass(cls, newSuper);
}
#define CLS_LEAF 0x800000
// class instances may have associative references
#define CLS_INSTANCES_HAVE_ASSOCIATED_OBJECTS 0x1000000
-// class has instance-specific GC layout
-#define CLS_HAS_INSTANCE_SPECIFIC_LAYOUT 0x2000000
+// available for use; was CLS_HAS_INSTANCE_SPECIFIC_LAYOUT
+#define CLS_2000000 0x2000000
// class compiled with ARC
#define CLS_IS_ARC 0x4000000
// class is not ARC but has ARC-style weak ivar layout
if (!sels) return;
- sel_lock();
+ mutex_locker_t lock(selLock);
// Process each selector
for (index = 0; index < count; index += 1)
sels[index] = sel;
}
}
-
- sel_unlock();
}
if (!methods) return;
- sel_lock();
+ mutex_locker_t lock(selLock);
// Process each method
for (index = 0; index < methods->count; index += 1)
if (method->name != sel)
method->name = sel;
}
-
- sel_unlock();
}
}
-const char **
+const char **objc_copyImageNames(unsigned int *outCount)
+{
+ header_info *hi;
+ int count = 0;
+ int max = HeaderCount;
+#if TARGET_OS_WIN32
+ const TCHAR **names = (const TCHAR **)calloc(max+1, sizeof(TCHAR *));
+#else
+ const char **names = (const char **)calloc(max+1, sizeof(char *));
+#endif
+
+ for (hi = FirstHeader; hi != NULL && count < max; hi = hi->getNext()) {
+#if TARGET_OS_WIN32
+ if (hi->moduleName) {
+ names[count++] = hi->moduleName;
+ }
+#else
+ const char *fname = hi->fname();
+ if (fname) {
+ names[count++] = fname;
+ }
+#endif
+ }
+ names[count] = NULL;
+
+ if (count == 0) {
+ // Return NULL instead of empty list if there are no images
+ free((void *)names);
+ names = NULL;
+ }
+
+ if (outCount) *outCount = count;
+ return names;
+}
+
+
+static const char **
_objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
{
Module mods;
return list;
}
+
+/**********************************************************************
+*
+**********************************************************************/
+const char **
+objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
+{
+ header_info *hi;
+
+ if (!image) {
+ if (outCount) *outCount = 0;
+ return NULL;
+ }
+
+ // Find the image.
+ for (hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
+#if TARGET_OS_WIN32
+ if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
+#else
+ if (0 == strcmp(image, hi->fname())) break;
+#endif
+ }
+
+ if (!hi) {
+ if (outCount) *outCount = 0;
+ return NULL;
+ }
+
+ return _objc_copyClassNamesForImage(hi, outCount);
+}
+
+
+
+/**********************************************************************
+*
+**********************************************************************/
+const char **
+objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
+{
+ header_info *hi;
+
+ if (!mh) {
+ if (outCount) *outCount = 0;
+ return NULL;
+ }
+
+ // Find the image.
+ for (hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
+ if (hi->mhdr() == (const headerType *)mh) break;
+ }
+
+ if (!hi) {
+ if (outCount) *outCount = 0;
+ return NULL;
+ }
+
+ return _objc_copyClassNamesForImage(hi, outCount);
+}
+
+
Class gdb_class_getClass(Class cls)
{
const char *className = cls->name;
}
+/***********************************************************************
+* objc_setMultithreaded.
+**********************************************************************/
+void objc_setMultithreaded (BOOL flag)
+{
+ OBJC_WARN_DEPRECATED;
+
+ // Nothing here. Thread synchronization in the runtime is always active.
+}
+
+
/***********************************************************************
* Lock management
**********************************************************************/
-rwlock_t selLock;
+mutex_t selLock;
mutex_t classLock;
mutex_t methodListLock;
mutex_t cacheUpdateLock;
#include "objc-loadmethod.h"
#include "message.h"
-OBJC_EXPORT Class getOriginalClassForPosingClass(Class);
-
-
/***********************************************************************
* Exports.
**********************************************************************/
+/* Linker metadata symbols */
+
+// NSObject was in Foundation/CF on macOS < 10.8.
+#if TARGET_OS_OSX
+#if __OBJC2__
+
+const char __objc_nsobject_class_10_5 = 0;
+const char __objc_nsobject_class_10_6 = 0;
+const char __objc_nsobject_class_10_7 = 0;
+
+const char __objc_nsobject_metaclass_10_5 = 0;
+const char __objc_nsobject_metaclass_10_6 = 0;
+const char __objc_nsobject_metaclass_10_7 = 0;
+
+const char __objc_nsobject_isa_10_5 = 0;
+const char __objc_nsobject_isa_10_6 = 0;
+const char __objc_nsobject_isa_10_7 = 0;
+
+#else
+
+const char __objc_nsobject_class_10_5 = 0;
+const char __objc_nsobject_class_10_6 = 0;
+const char __objc_nsobject_class_10_7 = 0;
+
+#endif
+#endif
+
// Settings from environment variables
#define OPTION(var, env, help) bool var = false;
#include "objc-env.h"
}
-
-/***********************************************************************
-* objc_setMultithreaded.
-**********************************************************************/
-void objc_setMultithreaded (BOOL flag)
-{
- OBJC_WARN_DEPRECATED;
-
- // Nothing here. Thread synchronization in the runtime is always active.
-}
-
-
/***********************************************************************
* _objc_fetch_pthread_data
* Fetch objc's pthread data for this thread.
// GrP fixme
extern "C" Class _objc_getOrigClass(const char *name);
#endif
-const char *class_getImageName(Class cls)
-{
-#if TARGET_OS_WIN32
- TCHAR *szFileName;
- DWORD charactersCopied;
- Class origCls;
- HMODULE classModule;
- bool res;
-#endif
- if (!cls) return NULL;
+static BOOL internal_class_getImageName(Class cls, const char **outName)
+{
#if !__OBJC2__
cls = _objc_getOrigClass(cls->demangledName());
#endif
-#if TARGET_OS_WIN32
- charactersCopied = 0;
- szFileName = malloc(MAX_PATH * sizeof(TCHAR));
-
- origCls = objc_getOrigClass(cls->demangledName());
- classModule = NULL;
- res = GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS, (LPCTSTR)origCls, &classModule);
- if (res && classModule) {
- charactersCopied = GetModuleFileName(classModule, szFileName, MAX_PATH * sizeof(TCHAR));
- }
- if (classModule) FreeLibrary(classModule);
- if (charactersCopied) {
- return (const char *)szFileName;
- } else {
- free(szFileName);
- }
- return NULL;
-#else
- return dyld_image_path_containing_address(cls);
-#endif
+ auto result = dyld_image_path_containing_address(cls);
+ *outName = result;
+ return (result != nil);
}
-const char **objc_copyImageNames(unsigned int *outCount)
-{
- header_info *hi;
- int count = 0;
- int max = HeaderCount;
-#if TARGET_OS_WIN32
- const TCHAR **names = (const TCHAR **)calloc(max+1, sizeof(TCHAR *));
-#else
- const char **names = (const char **)calloc(max+1, sizeof(char *));
-#endif
-
- for (hi = FirstHeader; hi != NULL && count < max; hi = hi->getNext()) {
-#if TARGET_OS_WIN32
- if (hi->moduleName) {
- names[count++] = hi->moduleName;
- }
-#else
- const char *fname = hi->fname();
- if (fname) {
- names[count++] = fname;
- }
-#endif
- }
- names[count] = NULL;
-
- if (count == 0) {
- // Return NULL instead of empty list if there are no images
- free((void *)names);
- names = NULL;
- }
+static ChainedHookFunction<objc_hook_getImageName>
+GetImageNameHook{internal_class_getImageName};
- if (outCount) *outCount = count;
- return names;
+void objc_setHook_getImageName(objc_hook_getImageName newValue,
+ objc_hook_getImageName *outOldValue)
+{
+ GetImageNameHook.set(newValue, outOldValue);
}
-
-/**********************************************************************
-*
-**********************************************************************/
-const char **
-objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
+const char *class_getImageName(Class cls)
{
- header_info *hi;
-
- if (!image) {
- if (outCount) *outCount = 0;
- return NULL;
- }
+ if (!cls) return nil;
- // Find the image.
- for (hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
-#if TARGET_OS_WIN32
- if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
-#else
- if (0 == strcmp(image, hi->fname())) break;
-#endif
- }
-
- if (!hi) {
- if (outCount) *outCount = 0;
- return NULL;
- }
-
- return _objc_copyClassNamesForImage(hi, outCount);
+ const char *name;
+ if (GetImageNameHook.get()(cls, &name)) return name;
+ else return nil;
}
-
+
/**********************************************************************
* Fast Enumeration Support
sel = _objc_search_builtins((const char *)name);
if (sel) return YES;
- rwlock_reader_t lock(selLock);
+ mutex_locker_t lock(selLock);
if (_objc_selectors) {
sel = __objc_sel_set_get(_objc_selectors, name);
}
return bool(sel);
}
-static SEL __sel_registerName(const char *name, int lock, int copy)
+static SEL __sel_registerName(const char *name, bool shouldLock, bool copy)
{
SEL result = 0;
- if (lock) selLock.assertUnlocked();
- else selLock.assertWriting();
+ if (shouldLock) selLock.assertUnlocked();
+ else selLock.assertLocked();
if (!name) return (SEL)0;
result = _objc_search_builtins(name);
if (result) return result;
-
- if (lock) selLock.read();
+
+ conditional_mutex_locker_t lock(selLock, shouldLock);
if (_objc_selectors) {
result = __objc_sel_set_get(_objc_selectors, (SEL)name);
}
- if (lock) selLock.unlockRead();
if (result) return result;
// No match. Insert.
- if (lock) selLock.write();
-
if (!_objc_selectors) {
_objc_selectors = __objc_sel_set_create(SelrefCount);
}
- if (lock) {
- // Rescan in case it was added while we dropped the lock
- result = __objc_sel_set_get(_objc_selectors, (SEL)name);
- }
if (!result) {
result = (SEL)(copy ? strdup(name) : name);
__objc_sel_set_add(_objc_selectors, result);
#endif
}
- if (lock) selLock.unlockWrite();
return result;
}
return __sel_registerName(name, 0, copy); // NO lock, maybe copy
}
-void sel_lock(void)
-{
- selLock.write();
-}
-
-void sel_unlock(void)
-{
- selLock.unlockWrite();
-}
-
// 2001/1/24
// the majority of uses of this function (which used to return NULL if not found)
#define s(x) SEL_##x = sel_registerNameNoLock(#x, NO)
#define t(x,y) SEL_##y = sel_registerNameNoLock(#x, NO)
- sel_lock();
+ mutex_locker_t lock(selLock);
s(load);
s(initialize);
extern SEL FwdSel;
FwdSel = sel_registerNameNoLock("forward::", NO);
- sel_unlock();
-
#undef s
#undef t
}
sset->_count = 0;
// heuristic to convert executable's selrefs count to table size
-#if TARGET_OS_IPHONE
+#if TARGET_OS_IPHONE && !TARGET_OS_IOSMAC
for (idx = 0; __objc_sel_set_capacities[idx] < selrefs; idx++);
if (idx > 0 && selrefs < 1536) idx--;
#else
.long 0 /* table.headeropt_rw_offset */
.space PAGE_MAX_SIZE-28
-/* space for selopt, smax/capacity=524288, blen/mask=262143+1 */
+/* space for selopt, smax/capacity=1048576, blen/mask=524287+1 */
.space 4*(8+256) /* header and scramble */
-.space 262144 /* mask tab */
-.space 524288 /* checkbytes */
-.space 524288*4 /* offsets */
+.space 524288 /* mask tab */
+.space 1048576 /* checkbytes */
+.space 1048576*4 /* offsets */
-/* space for clsopt, smax/capacity=65536, blen/mask=16383+1 */
+/* space for clsopt, smax/capacity=131072, blen/mask=32767+1 */
.space 4*(8+256) /* header and scramble */
-.space 16384 /* mask tab */
-.space 65536 /* checkbytes */
-.space 65536*12 /* offsets to name and class and header_info */
+.space 32768 /* mask tab */
+.space 131072 /* checkbytes */
+.space 131072*12 /* offsets to name and class and header_info */
.space 512*8 /* some duplicate classes */
/* space for some demangled protocol names */
.space 16384 /* checkbytes */
.space 16384*8 /* offsets */
-/* space for header_info (RO) structures */
-.space 16384
+/* space for 2048 header_info (RO) structures */
+.space 8 + (2048*16)
.section __DATA,__objc_opt_rw
.align 3
.private_extern __objc_opt_rw_data
__objc_opt_rw_data:
-/* space for header_info (RW) structures */
-.space 16384
+
+/* space for 2048 header_info (RW) structures */
+.space 8 + (2048*8)
/* space for 16384 protocols */
#if __LP64__
#define s(x) SEL_##x = sel_registerNameNoLock(#x, NO)
#define t(x,y) SEL_##y = sel_registerNameNoLock(#x, NO)
- sel_lock();
+ mutex_locker_t lock(selLock);
s(load);
s(initialize);
s(retainWeakReference);
s(allowsWeakReference);
- sel_unlock();
-
#undef s
#undef t
}
static SEL sel_alloc(const char *name, bool copy)
{
- selLock.assertWriting();
+ selLock.assertLocked();
return (SEL)(copy ? strdupIfMutable(name) : name);
}
if (sel == search_builtins(name)) return YES;
- rwlock_reader_t lock(selLock);
+ mutex_locker_t lock(selLock);
if (namedSelectors) {
return (sel == (SEL)NXMapGet(namedSelectors, name));
}
}
-static SEL __sel_registerName(const char *name, int lock, int copy)
+static SEL __sel_registerName(const char *name, bool shouldLock, bool copy)
{
SEL result = 0;
- if (lock) selLock.assertUnlocked();
- else selLock.assertWriting();
+ if (shouldLock) selLock.assertUnlocked();
+ else selLock.assertLocked();
if (!name) return (SEL)0;
result = search_builtins(name);
if (result) return result;
- if (lock) selLock.read();
+ conditional_mutex_locker_t lock(selLock, shouldLock);
if (namedSelectors) {
result = (SEL)NXMapGet(namedSelectors, name);
}
- if (lock) selLock.unlockRead();
if (result) return result;
// No match. Insert.
- if (lock) selLock.write();
-
if (!namedSelectors) {
namedSelectors = NXCreateMapTable(NXStrValueMapPrototype,
(unsigned)SelrefCount);
}
- if (lock) {
- // Rescan in case it was added while we dropped the lock
- result = (SEL)NXMapGet(namedSelectors, name);
- }
if (!result) {
result = sel_alloc(name, copy);
// fixme choose a better container (hash not map for starters)
NXMapInsert(namedSelectors, sel_getName(result), result);
}
- if (lock) selLock.unlockWrite();
return result;
}
return __sel_registerName(name, 0, copy); // NO lock, maybe copy
}
-void sel_lock(void)
-{
- selLock.write();
-}
-
-void sel_unlock(void)
-{
- selLock.unlockWrite();
-}
-
// 2001/1/24
// the majority of uses of this function (which used to return NULL if not found)
//
-typedef struct SyncData {
+typedef struct alignas(CacheLineSize) SyncData {
struct SyncData* nextData;
DisguisedPtr<objc_object> object;
int32_t threadCount; // number of THREADS using this block
SyncData *data;
spinlock_t lock;
- SyncList() : data(nil), lock(fork_unsafe_lock) { }
+ constexpr SyncList() : data(nil), lock(fork_unsafe_lock) { }
};
// Use multiple parallel lists to decrease contention among unrelated objects.
}
}
- // malloc a new SyncData and add to list.
- // XXX calling malloc with a global lock held is bad practice,
- // might be worth releasing the lock, mallocing, and searching again.
- // But since we never free these guys we won't be stuck in malloc very often.
- result = (SyncData*)calloc(sizeof(SyncData), 1);
+ // Allocate a new SyncData and add to list.
+ // XXX allocating memory with a global lock held is bad practice,
+ // might be worth releasing the lock, allocating, and searching again.
+ // But since we never free these guys we won't be stuck in allocation very often.
+ posix_memalign((void **)&result, alignof(SyncData), sizeof(SyncData));
result->object = (objc_object *)object;
result->threadCount = 1;
new (&result->mutex) recursive_mutex_t(fork_unsafe_lock);
# endif
#else
// __OBJC_BOOL_IS_BOOL not set.
-# if TARGET_OS_OSX || (TARGET_OS_IOS && !__LP64__ && !__ARM_ARCH_7K)
+# if TARGET_OS_OSX || TARGET_OS_IOSMAC || (TARGET_OS_IOS && !__LP64__ && !__ARM_ARCH_7K)
# define OBJC_BOOL_IS_BOOL 0
# else
# define OBJC_BOOL_IS_BOOL 1
// Obsolete ARC conversions.
OBJC_EXPORT id _Nullable objc_retainedObject(objc_objectptr_t _Nullable obj)
- OBJC_UNAVAILABLE("use CFBridgingRelease() or a (__bridge_transfer id) cast instead");
+#if !OBJC_DECLARE_SYMBOLS
+ OBJC_UNAVAILABLE("use CFBridgingRelease() or a (__bridge_transfer id) cast instead")
+#endif
+ ;
OBJC_EXPORT id _Nullable objc_unretainedObject(objc_objectptr_t _Nullable obj)
- OBJC_UNAVAILABLE("use a (__bridge id) cast instead");
+#if !OBJC_DECLARE_SYMBOLS
+ OBJC_UNAVAILABLE("use a (__bridge id) cast instead")
+#endif
+ ;
OBJC_EXPORT objc_objectptr_t _Nullable objc_unretainedPointer(id _Nullable obj)
- OBJC_UNAVAILABLE("use a __bridge cast instead");
+#if !OBJC_DECLARE_SYMBOLS
+ OBJC_UNAVAILABLE("use a __bridge cast instead")
+#endif
+ ;
#if !__OBJC2__
/**
* Returns the name of a protocol.
*
- * @param p A protocol.
+ * @param proto A protocol.
*
* @return The name of the protocol \e p as a C string.
*/
/**
* Returns a method description structure for a specified method of a given protocol.
*
- * @param p A protocol.
+ * @param proto A protocol.
* @param aSel A selector.
* @param isRequiredMethod A Boolean value that indicates whether aSel is a required method.
* @param isInstanceMethod A Boolean value that indicates whether aSel is an instance method.
/**
* Returns an array of method descriptions of methods meeting a given specification for a given protocol.
*
- * @param p A protocol.
+ * @param proto A protocol.
* @param isRequiredMethod A Boolean value that indicates whether returned methods should
* be required methods (pass YES to specify required methods).
* @param isInstanceMethod A Boolean value that indicates whether returned methods should
OBJC_AVAILABLE(10.6, 3.1, 9.0, 1.0, 2.0);
+/* Hooks for Swift */
+
+/**
+ * Function type for a hook that intercepts class_getImageName().
+ *
+ * @param cls The class whose image name is being looked up.
+ * @param outImageName On return, the result of the image name lookup.
+ * @return YES if an image name for this class was found, NO otherwise.
+ *
+ * @see class_getImageName
+ * @see objc_setHook_getImageName
+ */
+typedef BOOL (*objc_hook_getImageName)(Class _Nonnull cls, const char * _Nullable * _Nonnull outImageName);
+
+/**
+ * Install a hook for class_getImageName().
+ *
+ * @param newValue The hook function to install.
+ * @param outOldValue The address of a function pointer variable. On return,
+ * the old hook function is stored in the variable.
+ *
+ * @note The store to *outOldValue is thread-safe: the variable will be
+ * updated before class_getImageName() calls your new hook to read it,
+ * even if your new hook is called from another thread before this
+ * setter completes.
+ * @note The first hook in the chain is the native implementation of
+ * class_getImageName(). Your hook should call the previous hook for
+ * classes that you do not recognize.
+ *
+ * @see class_getImageName
+ * @see objc_hook_getImageName
+ */
+OBJC_EXPORT void objc_setHook_getImageName(objc_hook_getImageName _Nonnull newValue,
+ objc_hook_getImageName _Nullable * _Nonnull outOldValue)
+ OBJC_AVAILABLE(10.14, 12.0, 12.0, 5.0, 3.0);
+
+
#define _C_ID '@'
#define _C_CLASS '#'
#define _C_SEL ':'
method_getArgumentInfo(struct objc_method * _Nonnull m, int arg,
const char * _Nullable * _Nonnull type,
int * _Nonnull offset)
+ UNAVAILABLE_ATTRIBUTE // This function was accidentally deleted in 10.9.
OBJC2_UNAVAILABLE;
OBJC_EXPORT Class _Nullable