X-Git-Url: https://git.saurik.com/apple/ld64.git/blobdiff_plain/ebf6f43431fe84b7b17822014a6d1f0169516e93..7f09b9353af9897bf18933788d6a59c152c29edd:/src/ld/LinkEditClassic.hpp diff --git a/src/ld/LinkEditClassic.hpp b/src/ld/LinkEditClassic.hpp index 172bb4c..ce7c820 100644 --- a/src/ld/LinkEditClassic.hpp +++ b/src/ld/LinkEditClassic.hpp @@ -32,6 +32,7 @@ #include #include +#include #include "Options.h" #include "ld.hpp" @@ -49,8 +50,6 @@ public: // overrides of ld::Atom virtual ld::File* file() const { return NULL; } - virtual bool translationUnitSource(const char** dir, const char** nm) const - { return false; } virtual uint64_t objectAddress() const { return 0; } virtual void encode() = 0; @@ -92,13 +91,8 @@ public: uint32_t currentOffset(); private: - class CStringEquals - { - public: - bool operator()(const char* left, const char* right) const { return (strcmp(left, right) == 0); } - }; enum { kBufferSize = 0x01000000 }; - typedef __gnu_cxx::hash_map, CStringEquals> StringToOffset; + typedef std::unordered_map StringToOffset; const uint32_t _pointerSize; std::vector _fullBuffers; @@ -230,8 +224,7 @@ private: uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool); uint64_t valueForStab(const ld::relocatable::File::Stab& stab); uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab); - void addDataInCodeLabels(const ld::Atom* atom, uint32_t& symbolIndex); - + bool isAltEntry(const ld::Atom* atom); mutable std::vector > _globals; mutable std::vector > _locals; @@ -254,6 +247,29 @@ template int SymbolTableAtom::_s_anonNameIndex = 1; +template +bool SymbolTableAtom::isAltEntry(const ld::Atom* atom) +{ + // alt entries have a group subordinate reference to the previous atom + for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) { + if ( fit->kind == ld::Fixup::kindNoneGroupSubordinate ) { + if ( fit->binding == Fixup::bindingDirectlyBound ) { + const Atom* prevAtom = fit->u.target; + assert(prevAtom != NULL); + for (ld::Fixup::iterator fit2 = prevAtom->fixupsBegin(); fit2 != prevAtom->fixupsEnd(); ++fit2) { + if ( fit2->kind == ld::Fixup::kindNoneFollowOn ) { + if ( fit2->binding == Fixup::bindingDirectlyBound ) { + if ( fit2->u.target == atom ) + return true; + } + } + } + } + } + } + return false; +} + template bool SymbolTableAtom::addLocal(const ld::Atom* atom, StringPoolAtom* pool) { @@ -319,6 +335,8 @@ bool SymbolTableAtom::addLocal(const ld::Atom* atom, StringPoolAtom* pool) desc |= N_WEAK_DEF; if ( atom->isThumb() ) desc |= N_ARM_THUMB_DEF; + if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) ) + desc |= N_ALT_ENTRY; entry.set_n_desc(desc); // set n_value ( address this symbol will be at if this executable is loaded at it preferred address ) @@ -394,6 +412,8 @@ void SymbolTableAtom::addGlobal(const ld::Atom* atom, StringPoolAtom* pool) desc |= N_SYMBOL_RESOLVER; if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) ) desc |= N_NO_DEAD_STRIP; + if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) ) + desc |= N_ALT_ENTRY; if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) { desc |= N_WEAK_DEF; // support auto hidden weak symbols: .weak_def_can_be_hidden @@ -458,7 +478,13 @@ void SymbolTableAtom::addImport(const ld::Atom* atom, StringPoolAtom* pool) // set n_type if ( this->_options.outputKind() == Options::kObjectFile ) { - if ( (atom->scope() == ld::Atom::scopeLinkageUnit) + if ( atom->section().type() == ld::Section::typeTempAlias ) { + if ( atom->scope() == ld::Atom::scopeLinkageUnit ) + entry.set_n_type(N_INDR | N_EXT | N_PEXT); + else + entry.set_n_type(N_INDR | N_EXT); + } + else if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (atom->definition() == ld::Atom::definitionTentative) ) entry.set_n_type(N_UNDF | N_EXT | N_PEXT); else @@ -507,8 +533,24 @@ void SymbolTableAtom::addImport(const ld::Atom* atom, StringPoolAtom* pool) // set n_value, zero for import proxy and size for tentative definition if ( atom->definition() == ld::Atom::definitionTentative ) entry.set_n_value(atom->size()); - else + else if ( atom->section().type() != ld::Section::typeTempAlias ) entry.set_n_value(0); + else { + assert(atom->fixupsBegin() != atom->fixupsEnd()); + for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) { + assert(fit->kind == ld::Fixup::kindNoneFollowOn); + switch ( fit->binding ) { + case ld::Fixup::bindingByNameUnbound: + entry.set_n_value(pool->add(fit->u.name)); + break; + case ld::Fixup::bindingsIndirectlyBound: + entry.set_n_value(pool->add((_state.indirectBindingTable[fit->u.bindingIndex])->name())); + break; + default: + assert(0 && "internal error: unexpected alias binding"); + } + } + } // add to array _imports.push_back(entry); @@ -624,93 +666,21 @@ bool SymbolTableAtom::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, } -template -void SymbolTableAtom::addDataInCodeLabels(const ld::Atom* atom, uint32_t& symbolIndex) -{ - char label[64]; - for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) { - label[0] = '\0'; - switch ( fit->kind ) { - case ld::Fixup::kindDataInCodeStartData: - sprintf(label, "L$start$data$%03u", symbolIndex); - break; - case ld::Fixup::kindDataInCodeStartJT8: - sprintf(label, "L$start$jt8$%03u", symbolIndex); - break; - case ld::Fixup::kindDataInCodeStartJT16: - sprintf(label, "L$start$jt16$%03u", symbolIndex); - break; - case ld::Fixup::kindDataInCodeStartJT32: - sprintf(label, "L$start$jt32$%03u", symbolIndex); - break; - case ld::Fixup::kindDataInCodeStartJTA32: - sprintf(label, "L$start$jta32$%03u", symbolIndex); - break; - case ld::Fixup::kindDataInCodeEnd: - sprintf(label, "L$start$code$%03u", symbolIndex); - break; - default: - break; - } - if ( label[0] != '\0' ) { - macho_nlist

entry; - entry.set_n_type(N_SECT); - entry.set_n_sect(atom->machoSection()); - entry.set_n_desc(0); - entry.set_n_value(atom->finalAddress() + fit->offsetInAtom); - entry.set_n_strx(this->_writer._stringPoolAtom->add(label)); - _locals.push_back(entry); - ++symbolIndex; - } - } -} - - template void SymbolTableAtom::encode() { - uint32_t symbolIndex = 0; + // Note: We lay out the symbol table so that the strings for the stabs (local) symbols are at the + // end of the string pool. The stabs strings are not used when calculated the UUID for the image. + // If the stabs strings were not last, the string offsets for all other symbols may very which would alter the UUID. - // make nlist entries for all local symbols - std::vector& localAtoms = this->_writer._localAtoms; - std::vector& globalAtoms = this->_writer._exportedAtoms; - _locals.reserve(localAtoms.size()+this->_state.stabs.size()); - this->_writer._localSymbolsStartIndex = 0; - // make nlist entries for all debug notes - _stabsIndexStart = symbolIndex; - _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset(); - for (std::vector::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) { - macho_nlist

entry; - entry.set_n_type(sit->type); - entry.set_n_sect(sectionIndexForStab(*sit)); - entry.set_n_desc(sit->desc); - entry.set_n_value(valueForStab(*sit)); - entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom)); - _locals.push_back(entry); - ++symbolIndex; - } - _stabsIndexEnd = symbolIndex; - _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset(); - for (std::vector::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) { - const ld::Atom* atom = *it; - if ( this->addLocal(atom, this->_writer._stringPoolAtom) ) - this->_writer._atomToSymbolIndex[atom] = symbolIndex++; - } - // recreate L$start$ labels in -r mode - if ( (_options.outputKind() == Options::kObjectFile) && this->_writer.hasDataInCode ) { - for (std::vector::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) { - this->addDataInCodeLabels(*it, symbolIndex); - } - for (std::vector::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) { - this->addDataInCodeLabels(*it, symbolIndex); - } - } - this->_writer._localSymbolsCount = symbolIndex; - + // reserve space for local symbols + uint32_t localsCount = _state.stabs.size() + this->_writer._localAtoms.size(); // make nlist entries for all global symbols + std::vector& globalAtoms = this->_writer._exportedAtoms; _globals.reserve(globalAtoms.size()); - this->_writer._globalSymbolsStartIndex = symbolIndex; + uint32_t symbolIndex = localsCount; + this->_writer._globalSymbolsStartIndex = localsCount; for (std::vector::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) { const ld::Atom* atom = *it; this->addGlobal(atom, this->_writer._stringPoolAtom); @@ -727,6 +697,31 @@ void SymbolTableAtom::encode() this->_writer._atomToSymbolIndex[*it] = symbolIndex++; } this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex; + + // go back to start and make nlist entries for all local symbols + std::vector& localAtoms = this->_writer._localAtoms; + _locals.reserve(localsCount); + symbolIndex = 0; + this->_writer._localSymbolsStartIndex = 0; + _stabsIndexStart = 0; + _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset(); + for (const ld::relocatable::File::Stab& stab : _state.stabs) { + macho_nlist

entry; + entry.set_n_type(stab.type); + entry.set_n_sect(sectionIndexForStab(stab)); + entry.set_n_desc(stab.desc); + entry.set_n_value(valueForStab(stab)); + entry.set_n_strx(stringOffsetForStab(stab, this->_writer._stringPoolAtom)); + _locals.push_back(entry); + ++symbolIndex; + } + _stabsIndexEnd = symbolIndex; + _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset(); + for (const ld::Atom* atom : localAtoms) { + if ( this->addLocal(atom, this->_writer._stringPoolAtom) ) + this->_writer._atomToSymbolIndex[atom] = symbolIndex++; + } + this->_writer._localSymbolsCount = symbolIndex; } template @@ -833,13 +828,13 @@ uint64_t LocalRelocationsAtom::relocBaseAddress(ld::Internal& state) // for kext bundles the reloc base address starts at __TEXT segment return _options.baseAddress(); } - // for all other kinds, the x86_64 reloc base address starts at __DATA segment + // for all other kinds, the x86_64 reloc base address starts at first writable segment (usually __DATA) for (std::vector::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) { ld::Internal::FinalSection* sect = *sit; - if ( strcmp(sect->segmentName(), "__DATA") == 0 ) + if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE ) return sect->address; } - throw "__DATA segment not found"; + throw "writable (__DATA) segment not found"; } template @@ -950,10 +945,10 @@ uint64_t ExternalRelocationsAtom::relocBaseAddress(ld::Internal& state) // for x86_64 the reloc base address starts at __DATA segment for (std::vector::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) { ld::Internal::FinalSection* sect = *sit; - if ( strcmp(sect->segmentName(), "__DATA") == 0 ) + if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE ) return sect->address; } - throw "__DATA segment not found"; + throw "writable (__DATA) segment not found"; } template @@ -985,6 +980,9 @@ uint64_t ExternalRelocationsAtom::size() const return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info

); } +#if SUPPORT_ARCH_arm64 +template <> uint32_t ExternalRelocationsAtom::pointerReloc() { return ARM64_RELOC_UNSIGNED; } +#endif #if SUPPORT_ARCH_arm_any template <> uint32_t ExternalRelocationsAtom::pointerReloc() { return ARM_RELOC_VANILLA; } #endif @@ -994,6 +992,10 @@ template <> uint32_t ExternalRelocationsAtom::pointerReloc() { return X8 template <> uint32_t ExternalRelocationsAtom::callReloc() { return X86_64_RELOC_BRANCH; } template <> uint32_t ExternalRelocationsAtom::callReloc() { return GENERIC_RELOC_VANILLA; } +#if SUPPORT_ARCH_arm64 +template <> uint32_t ExternalRelocationsAtom::callReloc() { return ARM64_RELOC_BRANCH26; } +#endif + template uint32_t ExternalRelocationsAtom::callReloc() { @@ -1286,6 +1288,15 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSecti relocs.push_back(reloc1); } break; + case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(true); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(X86_64_RELOC_TLV); + relocs.push_back(reloc1); + break; default: assert(0 && "need to handle -r reloc"); @@ -1307,7 +1318,7 @@ uint32_t SectionRelocationsAtom::sectSymNum(bool external, const ld::Atom* ta } template <> -void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* sect, +void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* sect, const Entry& entry, std::vector >& relocs) { macho_relocation_info

reloc1; @@ -1323,8 +1334,7 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* fromExternal = entry.fromTargetUsesExternalReloc; fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget); } - - + switch ( entry.kind ) { case ld::Fixup::kindStoreX86PCRel32: case ld::Fixup::kindStoreX86BranchPCRel32: @@ -1451,6 +1461,17 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* relocs.push_back(reloc1); } break; + case ld::Fixup::kindStoreX86PCRel32TLVLoad: + case ld::Fixup::kindStoreX86Abs32TLVLoad: + case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(GENERIC_RLEOC_TLV); + relocs.push_back(reloc1); + break; default: assert(0 && "need to handle -r reloc"); @@ -1458,6 +1479,7 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* } + #if SUPPORT_ARCH_arm_any template <> void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* sect, @@ -1596,9 +1618,17 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* { int len = 0; uint32_t otherHalf = 0; - uint32_t value = entry.toTarget->finalAddress()+entry.toAddend; - if ( entry.fromTarget != NULL ) - value -= (entry.fromTarget->finalAddress()+entry.fromAddend); + uint32_t value; + if ( entry.fromTarget != NULL ) { + // this is a sect-diff + value = (entry.toTarget->finalAddress()+entry.toAddend) - (entry.fromTarget->finalAddress()+entry.fromAddend); + } + else { + // this is an absolute address + value = entry.toAddend; + if ( !external ) + value += entry.toTarget->finalAddress(); + } switch ( entry.kind ) { case ld::Fixup::kindStoreARMLow16: len = 0; @@ -1685,6 +1715,205 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* } #endif +#if SUPPORT_ARCH_arm64 +template <> +void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* sect, + const Entry& entry, std::vector >& relocs) +{ + macho_relocation_info

reloc1; + macho_relocation_info

reloc2; + uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address; + bool external = entry.toTargetUsesExternalReloc; + uint32_t symbolNum = sectSymNum(external, entry.toTarget); + bool fromExternal = false; + uint32_t fromSymbolNum = 0; + if ( entry.fromTarget != NULL ) { + fromExternal = entry.fromTargetUsesExternalReloc; + fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget); + } + + + switch ( entry.kind ) { + case ld::Fixup::kindStoreARM64Branch26: + if ( entry.toAddend != 0 ) { + assert(entry.toAddend < 0x400000); + reloc2.set_r_address(address); + reloc2.set_r_symbolnum(entry.toAddend); + reloc2.set_r_pcrel(false); + reloc2.set_r_length(2); + reloc2.set_r_extern(false); + reloc2.set_r_type(ARM64_RELOC_ADDEND); + relocs.push_back(reloc2); + } + // fall into next case + case ld::Fixup::kindStoreTargetAddressARM64Branch26: + case ld::Fixup::kindStoreARM64DtraceCallSiteNop: + case ld::Fixup::kindStoreARM64DtraceIsEnableSiteClear: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(true); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_BRANCH26); + relocs.push_back(reloc1); + break; + + case ld::Fixup::kindStoreARM64Page21: + if ( entry.toAddend != 0 ) { + assert(entry.toAddend < 0x400000); + reloc2.set_r_address(address); + reloc2.set_r_symbolnum(entry.toAddend); + reloc2.set_r_pcrel(false); + reloc2.set_r_length(2); + reloc2.set_r_extern(false); + reloc2.set_r_type(ARM64_RELOC_ADDEND); + relocs.push_back(reloc2); + } + // fall into next case + case ld::Fixup::kindStoreTargetAddressARM64Page21: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(true); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_PAGE21); + relocs.push_back(reloc1); + break; + + case ld::Fixup::kindStoreARM64PageOff12: + if ( entry.toAddend != 0 ) { + assert(entry.toAddend < 0x400000); + reloc2.set_r_address(address); + reloc2.set_r_symbolnum(entry.toAddend); + reloc2.set_r_pcrel(false); + reloc2.set_r_length(2); + reloc2.set_r_extern(false); + reloc2.set_r_type(ARM64_RELOC_ADDEND); + relocs.push_back(reloc2); + } + // fall into next case + case ld::Fixup::kindStoreTargetAddressARM64PageOff12: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_PAGEOFF12); + relocs.push_back(reloc1); + break; + + case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21: + case ld::Fixup::kindStoreARM64GOTLoadPage21: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(true); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGE21); + relocs.push_back(reloc1); + break; + + case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12: + case ld::Fixup::kindStoreARM64GOTLoadPageOff12: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGEOFF12); + relocs.push_back(reloc1); + break; + + + case ld::Fixup::kindStoreLittleEndian64: + case ld::Fixup::kindStoreTargetAddressLittleEndian64: + if ( entry.fromTarget != NULL ) { + // this is a pointer-diff + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(3); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_UNSIGNED); + reloc2.set_r_address(address); + reloc2.set_r_symbolnum(fromSymbolNum); + reloc2.set_r_pcrel(false); + reloc2.set_r_length(3); + reloc2.set_r_extern(fromExternal); + reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR); + relocs.push_back(reloc2); + relocs.push_back(reloc1); + } + else { + // regular pointer + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(3); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_UNSIGNED); + relocs.push_back(reloc1); + } + break; + + case ld::Fixup::kindStoreLittleEndian32: + case ld::Fixup::kindStoreTargetAddressLittleEndian32: + if ( entry.fromTarget != NULL ) { + // this is a pointer-diff + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_UNSIGNED); + reloc2.set_r_address(address); + reloc2.set_r_symbolnum(fromSymbolNum); + reloc2.set_r_pcrel(false); + reloc2.set_r_length(2); + reloc2.set_r_extern(fromExternal); + reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR); + relocs.push_back(reloc2); + relocs.push_back(reloc1); + } + else { + // regular pointer + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_UNSIGNED); + relocs.push_back(reloc1); + } + break; + + case ld::Fixup::kindStoreARM64PointerToGOT: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(false); + reloc1.set_r_length(3); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT); + relocs.push_back(reloc1); + break; + + case ld::Fixup::kindStoreARM64PCRelToGOT: + reloc1.set_r_address(address); + reloc1.set_r_symbolnum(symbolNum); + reloc1.set_r_pcrel(true); + reloc1.set_r_length(2); + reloc1.set_r_extern(external); + reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT); + relocs.push_back(reloc1); + break; + + default: + assert(0 && "need to handle arm64 -r reloc"); + + } + +} +#endif // SUPPORT_ARCH_arm64 template @@ -1777,7 +2006,6 @@ private: uint32_t symIndexOfLazyPointerAtom(const ld::Atom*); uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*); uint32_t symbolIndex(const ld::Atom*); - bool kextBundlesDontHaveIndirectSymbolTable(); std::vector _entries; @@ -1809,9 +2037,11 @@ uint32_t IndirectSymbolTableAtom::symIndexOfStubAtom(const ld::Atom* stubAtom { for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) { if ( fit->binding == ld::Fixup::bindingDirectlyBound ) { - assert((fit->u.target->contentType() == ld::Atom::typeLazyPointer) - || (fit->u.target->contentType() == ld::Atom::typeLazyDylibPointer)); - return symIndexOfLazyPointerAtom(fit->u.target); + ld::Atom::ContentType type = fit->u.target->contentType(); + if (( type == ld::Atom::typeLazyPointer) || (type == ld::Atom::typeLazyDylibPointer) ) + return symIndexOfLazyPointerAtom(fit->u.target); + if ( type == ld::Atom::typeNonLazyPointer ) + return symIndexOfNonLazyPointerAtom(fit->u.target); } } throw "internal error: stub missing fixup to lazy pointer"; @@ -1927,12 +2157,6 @@ void IndirectSymbolTableAtom::encodeNonLazyPointerSection(ld::Internal::Final } } -template -bool IndirectSymbolTableAtom::kextBundlesDontHaveIndirectSymbolTable() -{ - return true; -} - template void IndirectSymbolTableAtom::encode() { @@ -1940,8 +2164,8 @@ void IndirectSymbolTableAtom::encode() if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() ) return; - // x86_64 kext bundles should not have an indirect symbol table - if ( (this->_options.outputKind() == Options::kKextBundle) && kextBundlesDontHaveIndirectSymbolTable() ) + // x86_64 kext bundles should not have an indirect symbol table unless using stubs + if ( (this->_options.outputKind() == Options::kKextBundle) && !this->_options.kextsUseStubs() ) return; // slidable static executables (-static -pie) should not have an indirect symbol table