#include <map>
#include "Options.h"
+#include "MachOFileAbstraction.hpp"
#include "ld.hpp"
#include "make_stubs.h"
#include "stub_x86_classic.hpp"
#include "stub_arm.hpp"
#include "stub_arm_classic.hpp"
-#include "stub_ppc_classic.hpp"
-
-
+#if SUPPORT_ARCH_arm64
+#include "stub_arm64.hpp"
+#endif
Pass::Pass(const Options& opts)
: compressedHelperHelper(NULL),
if ( fixup->binding == ld::Fixup::bindingsIndirectlyBound ) {
const ld::Atom* target = state.indirectBindingTable[fixup->u.bindingIndex];
switch ( fixup->kind ) {
- case ld::Fixup::kindStoreTargetAddressPPCBranch24:
case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
case ld::Fixup::kindStoreTargetAddressARMBranch24:
case ld::Fixup::kindStoreTargetAddressThumbBranch22:
+#if SUPPORT_ARCH_arm64
+ case ld::Fixup::kindStoreTargetAddressARM64Branch26:
+#endif
+ assert(target != NULL);
// create stub if target is in a dylib
if ( target->definition() == ld::Atom::definitionProxy )
return target;
if ( (target->definition() == ld::Atom::definitionRegular)
&& (target->combine() == ld::Atom::combineByName)
&& ((target->symbolTableInclusion() == ld::Atom::symbolTableIn)
- || (target->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)) )
+ || (target->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)) ) {
+ // don't make stubs for auto-hide symbols
+ if ( target->autoHide() && (!_options.hasExportMaskList() || !_options.shouldExport(target->name())) )
+ return NULL;
return target;
+ }
// create stub if target is interposable
if ( _options.interposable(target->name()) )
return target;
ld::Atom* Pass::makeStub(const ld::Atom& target, bool weakImport)
{
- //fprintf(stderr, "makeStub(target=%p %s in sect %s)\n", &target, target.name(), target.section().sectionName());
- bool stubToGlobalWeakDef = ( (target.scope() == ld::Atom::scopeGlobal)
- && (target.definition() == ld::Atom::definitionRegular)
- && (target.combine() == ld::Atom::combineByName) );
+ //fprintf(stderr, "makeStub(target=%p %s in sect %s, def=%d)\n", &target, target.name(), target.section().sectionName(), target.definition());
+ bool stubToGlobalWeakDef = ( (target.combine() == ld::Atom::combineByName) &&
+ (((target.definition() == ld::Atom::definitionRegular) && (target.scope() == ld::Atom::scopeGlobal))
+ || (target.definition() == ld::Atom::definitionProxy)) );
bool forLazyDylib = false;
const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target.file());
if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
forLazyDylib = true;
bool stubToResolver = (target.contentType() == ld::Atom::typeResolver);
-
+#if SUPPORT_ARCH_arm_any || SUPPORT_ARCH_arm64
+ bool usingDataConst = _options.useDataConstSegment();
+#endif
+
+ if ( usingCompressedLINKEDIT() && !forLazyDylib ) {
+ if ( _internal->compressedFastBinderProxy == NULL )
+ throwf("symbol dyld_stub_binder not found (normally in libSystem.dylib). Needed to perform lazy binding to function %s", target.name());
+ }
+
switch ( _architecture ) {
- case CPU_TYPE_POWERPC:
- if ( _pic )
- return new ld::passes::stubs::ppc::classic::StubPICAtom(*this, target, forLazyDylib, false, weakImport);
- else
- return new ld::passes::stubs::ppc::classic::StubNoPICAtom(*this, target, forLazyDylib, false, weakImport);
- break;
- case CPU_TYPE_POWERPC64:
- return new ld::passes::stubs::ppc::classic::StubPICAtom(*this, target, forLazyDylib, true, weakImport);
- break;
+#if SUPPORT_ARCH_i386
case CPU_TYPE_I386:
if ( usingCompressedLINKEDIT() && !forLazyDylib )
return new ld::passes::stubs::x86::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
else
return new ld::passes::stubs::x86::classic::StubAtom(*this, target, forLazyDylib, weakImport);
break;
+#endif
+#if SUPPORT_ARCH_x86_64
case CPU_TYPE_X86_64:
- if ( usingCompressedLINKEDIT() && !forLazyDylib )
+ if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() )
+ return new ld::passes::stubs::x86_64::KextStubAtom(*this, target);
+ else if ( usingCompressedLINKEDIT() && !forLazyDylib )
return new ld::passes::stubs::x86_64::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
else
return new ld::passes::stubs::x86_64::classic::StubAtom(*this, target, forLazyDylib, weakImport);
break;
+#endif
+#if SUPPORT_ARCH_arm_any
case CPU_TYPE_ARM:
- if ( usingCompressedLINKEDIT() && !forLazyDylib ) {
- if ( (_stubCount < 900) && !_mightBeInSharedRegion && !_largeText )
+ if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() ) {
+ // if text relocs are not allows in kext bundles, then linker must create a stub
+ return new ld::passes::stubs::arm::StubPICKextAtom(*this, target);
+ }
+ else if ( usingCompressedLINKEDIT() && !forLazyDylib ) {
+ if ( (_stubCount < 900) && !_mightBeInSharedRegion && !_largeText && !_options.makeEncryptable() )
return new ld::passes::stubs::arm::StubCloseAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
else if ( _pic )
- return new ld::passes::stubs::arm::StubPICAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
+ return new ld::passes::stubs::arm::StubPICAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport, usingDataConst);
else
return new ld::passes::stubs::arm::StubNoPICAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
}
return new ld::passes::stubs::arm::classic::StubNoPICAtom(*this, target, forLazyDylib, weakImport);
}
break;
+#endif
+#if SUPPORT_ARCH_arm64
+ case CPU_TYPE_ARM64:
+ if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() )
+ return new ld::passes::stubs::arm64::KextStubAtom(*this, target);
+ else
+ return new ld::passes::stubs::arm64::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport, usingDataConst);
+ break;
+#endif
}
throw "unsupported arch for stub";
}
case Options::kObjectFile:
// these kinds don't use stubs and can have resolver functions
return;
- case Options::kKextBundle:
case Options::kStaticExecutable:
case Options::kPreload:
case Options::kDyld:
case Options::kDynamicLibrary:
// uses stubs and can have resolver functions
break;
+ case Options::kKextBundle:
+ verifyNoResolverFunctions(state);
+ // if kext don't use stubs, don't do this pass
+ if ( !_options.kextsUseStubs() )
+ return;
+ break;
case Options::kDynamicExecutable:
case Options::kDynamicBundle:
// these kinds do use stubs and cannot have resolver functions
if ( pos == weakImportMap.end() ) {
// target not in weakImportMap, so add
weakImportMap[stubableTargetOfFixup] = fit->weakImport;
- // <rdar://problem/5529626> If only weak_import symbols are used, linker should use LD_LOAD_WEAK_DYLIB
- const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(stubableTargetOfFixup->file());
- if ( dylib != NULL ) {
- if ( fit->weakImport )
- (const_cast<ld::dylib::File*>(dylib))->setUsingWeakImportedSymbols();
- else
- (const_cast<ld::dylib::File*>(dylib))->setUsingNonWeakImportedSymbols();
- }
}
else {
// target in weakImportMap, check for weakness mismatch
if ( _options.outputKind() != Options::kDynamicLibrary )
throwf("resolver functions (%s) can only be used in dylibs", atom->name());
if ( !_options.makeCompressedDyldInfo() ) {
- if ( _options.architecture() == CPU_TYPE_POWERPC )
- throwf("resolver functions (%s) not supported for PowerPC", atom->name());
- else if ( _options.architecture() == CPU_TYPE_ARM )
+ if ( _options.architecture() == CPU_TYPE_ARM )
throwf("resolver functions (%s) can only be used when targeting iOS 4.2 or later", atom->name());
else
throwf("resolver functions (%s) can only be used when targeting Mac OS X 10.6 or later", atom->name());
}
}
}
+
+ const bool needStubForMain = _options.needsEntryPointLoadCommand()
+ && (state.entryPoint != NULL)
+ && (state.entryPoint->definition() == ld::Atom::definitionProxy);
+ if ( needStubForMain ) {
+ // _main not found in any .o files. Currently have proxy to dylib
+ // Add to map, so that a stub will be made
+ stubFor[state.entryPoint] = NULL;
+ }
// short circuit if no stubs needed
_internal = &state;
return;
// <rdar://problem/8553283> lazily check for helper
- if ( !_options.makeCompressedDyldInfo() && (state.classicBindingHelper == NULL) )
+ if ( !_options.makeCompressedDyldInfo() && (state.classicBindingHelper == NULL) && (_options.outputKind() != Options::kKextBundle) )
throw "symbol dyld_stub_binding_helper not found, normally in crt1.o/dylib1.o/bundle1.o";
- // disable close stubs when branch islands might be needed
- if ( (_architecture == CPU_TYPE_ARM) && (codeSize > 4*1024*1024) )
- _largeText = true;
+ // disable arm close stubs in some cases
+ if ( _architecture == CPU_TYPE_ARM ) {
+ if ( codeSize > 4*1024*1024 )
+ _largeText = true;
+ else {
+ for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
+ ld::Internal::FinalSection* sect = *sit;
+ if ( sect->type() == ld::Section::typeMachHeader )
+ continue;
+ if ( strcmp(sect->segmentName(), "__TEXT") == 0) {
+ for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
+ const ld::Atom* atom = *ait;
+ if ( atom->alignment().powerOf2 > 10 ) {
+ // overaligned section means might not be able to keep closestubs sect pushed to end of __TEXT
+ //warning("alignment 1<<%d in atom %s in section %s disables close stubs optimization",
+ // atom->alignment().powerOf2, atom->name(), sect->segmentName());
+ _largeText = true;
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
// make stub atoms
for (std::map<const ld::Atom*,ld::Atom*>::iterator it = stubFor.begin(); it != stubFor.end(); ++it) {
}
}
+ // switch entry point from proxy to stub
+ if ( needStubForMain ) {
+ const ld::Atom* mainStub = stubFor[state.entryPoint];
+ assert(mainStub != NULL);
+ state.entryPoint = mainStub;
+ }
+
// sort new atoms so links are consistent
for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
ld::Internal::FinalSection* sect = *sit;