X-Git-Url: https://git.saurik.com/apple/ld64.git/blobdiff_plain/f80fe69f3f29962e8aa43a99f8ed9201548f3d78..7f09b9353af9897bf18933788d6a59c152c29edd:/src/ld/LinkEditClassic.hpp diff --git a/src/ld/LinkEditClassic.hpp b/src/ld/LinkEditClassic.hpp index b9b1215..ce7c820 100644 --- a/src/ld/LinkEditClassic.hpp +++ b/src/ld/LinkEditClassic.hpp @@ -224,7 +224,7 @@ private: uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool); uint64_t valueForStab(const ld::relocatable::File::Stab& stab); uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab); - + bool isAltEntry(const ld::Atom* atom); mutable std::vector > _globals; mutable std::vector > _locals; @@ -247,6 +247,29 @@ template int SymbolTableAtom::_s_anonNameIndex = 1; +template +bool SymbolTableAtom::isAltEntry(const ld::Atom* atom) +{ + // alt entries have a group subordinate reference to the previous atom + for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) { + if ( fit->kind == ld::Fixup::kindNoneGroupSubordinate ) { + if ( fit->binding == Fixup::bindingDirectlyBound ) { + const Atom* prevAtom = fit->u.target; + assert(prevAtom != NULL); + for (ld::Fixup::iterator fit2 = prevAtom->fixupsBegin(); fit2 != prevAtom->fixupsEnd(); ++fit2) { + if ( fit2->kind == ld::Fixup::kindNoneFollowOn ) { + if ( fit2->binding == Fixup::bindingDirectlyBound ) { + if ( fit2->u.target == atom ) + return true; + } + } + } + } + } + } + return false; +} + template bool SymbolTableAtom::addLocal(const ld::Atom* atom, StringPoolAtom* pool) { @@ -312,6 +335,8 @@ bool SymbolTableAtom::addLocal(const ld::Atom* atom, StringPoolAtom* pool) desc |= N_WEAK_DEF; if ( atom->isThumb() ) desc |= N_ARM_THUMB_DEF; + if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) ) + desc |= N_ALT_ENTRY; entry.set_n_desc(desc); // set n_value ( address this symbol will be at if this executable is loaded at it preferred address ) @@ -387,6 +412,8 @@ void SymbolTableAtom::addGlobal(const ld::Atom* atom, StringPoolAtom* pool) desc |= N_SYMBOL_RESOLVER; if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) ) desc |= N_NO_DEAD_STRIP; + if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) ) + desc |= N_ALT_ENTRY; if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) { desc |= N_WEAK_DEF; // support auto hidden weak symbols: .weak_def_can_be_hidden @@ -451,7 +478,13 @@ void SymbolTableAtom::addImport(const ld::Atom* atom, StringPoolAtom* pool) // set n_type if ( this->_options.outputKind() == Options::kObjectFile ) { - if ( (atom->scope() == ld::Atom::scopeLinkageUnit) + if ( atom->section().type() == ld::Section::typeTempAlias ) { + if ( atom->scope() == ld::Atom::scopeLinkageUnit ) + entry.set_n_type(N_INDR | N_EXT | N_PEXT); + else + entry.set_n_type(N_INDR | N_EXT); + } + else if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (atom->definition() == ld::Atom::definitionTentative) ) entry.set_n_type(N_UNDF | N_EXT | N_PEXT); else @@ -500,8 +533,24 @@ void SymbolTableAtom::addImport(const ld::Atom* atom, StringPoolAtom* pool) // set n_value, zero for import proxy and size for tentative definition if ( atom->definition() == ld::Atom::definitionTentative ) entry.set_n_value(atom->size()); - else + else if ( atom->section().type() != ld::Section::typeTempAlias ) entry.set_n_value(0); + else { + assert(atom->fixupsBegin() != atom->fixupsEnd()); + for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) { + assert(fit->kind == ld::Fixup::kindNoneFollowOn); + switch ( fit->binding ) { + case ld::Fixup::bindingByNameUnbound: + entry.set_n_value(pool->add(fit->u.name)); + break; + case ld::Fixup::bindingsIndirectlyBound: + entry.set_n_value(pool->add((_state.indirectBindingTable[fit->u.bindingIndex])->name())); + break; + default: + assert(0 && "internal error: unexpected alias binding"); + } + } + } // add to array _imports.push_back(entry); @@ -620,39 +669,18 @@ bool SymbolTableAtom::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, template void SymbolTableAtom::encode() { - uint32_t symbolIndex = 0; + // Note: We lay out the symbol table so that the strings for the stabs (local) symbols are at the + // end of the string pool. The stabs strings are not used when calculated the UUID for the image. + // If the stabs strings were not last, the string offsets for all other symbols may very which would alter the UUID. - // make nlist entries for all local symbols - std::vector& localAtoms = this->_writer._localAtoms; - std::vector& globalAtoms = this->_writer._exportedAtoms; - _locals.reserve(localAtoms.size()+this->_state.stabs.size()); - this->_writer._localSymbolsStartIndex = 0; - // make nlist entries for all debug notes - _stabsIndexStart = symbolIndex; - _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset(); - for (std::vector::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) { - macho_nlist

entry; - entry.set_n_type(sit->type); - entry.set_n_sect(sectionIndexForStab(*sit)); - entry.set_n_desc(sit->desc); - entry.set_n_value(valueForStab(*sit)); - entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom)); - _locals.push_back(entry); - ++symbolIndex; - } - _stabsIndexEnd = symbolIndex; - _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset(); - for (std::vector::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) { - const ld::Atom* atom = *it; - if ( this->addLocal(atom, this->_writer._stringPoolAtom) ) - this->_writer._atomToSymbolIndex[atom] = symbolIndex++; - } - this->_writer._localSymbolsCount = symbolIndex; - + // reserve space for local symbols + uint32_t localsCount = _state.stabs.size() + this->_writer._localAtoms.size(); // make nlist entries for all global symbols + std::vector& globalAtoms = this->_writer._exportedAtoms; _globals.reserve(globalAtoms.size()); - this->_writer._globalSymbolsStartIndex = symbolIndex; + uint32_t symbolIndex = localsCount; + this->_writer._globalSymbolsStartIndex = localsCount; for (std::vector::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) { const ld::Atom* atom = *it; this->addGlobal(atom, this->_writer._stringPoolAtom); @@ -669,6 +697,31 @@ void SymbolTableAtom::encode() this->_writer._atomToSymbolIndex[*it] = symbolIndex++; } this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex; + + // go back to start and make nlist entries for all local symbols + std::vector& localAtoms = this->_writer._localAtoms; + _locals.reserve(localsCount); + symbolIndex = 0; + this->_writer._localSymbolsStartIndex = 0; + _stabsIndexStart = 0; + _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset(); + for (const ld::relocatable::File::Stab& stab : _state.stabs) { + macho_nlist

entry; + entry.set_n_type(stab.type); + entry.set_n_sect(sectionIndexForStab(stab)); + entry.set_n_desc(stab.desc); + entry.set_n_value(valueForStab(stab)); + entry.set_n_strx(stringOffsetForStab(stab, this->_writer._stringPoolAtom)); + _locals.push_back(entry); + ++symbolIndex; + } + _stabsIndexEnd = symbolIndex; + _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset(); + for (const ld::Atom* atom : localAtoms) { + if ( this->addLocal(atom, this->_writer._stringPoolAtom) ) + this->_writer._atomToSymbolIndex[atom] = symbolIndex++; + } + this->_writer._localSymbolsCount = symbolIndex; } template @@ -775,13 +828,13 @@ uint64_t LocalRelocationsAtom::relocBaseAddress(ld::Internal& state) // for kext bundles the reloc base address starts at __TEXT segment return _options.baseAddress(); } - // for all other kinds, the x86_64 reloc base address starts at __DATA segment + // for all other kinds, the x86_64 reloc base address starts at first writable segment (usually __DATA) for (std::vector::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) { ld::Internal::FinalSection* sect = *sit; - if ( strcmp(sect->segmentName(), "__DATA") == 0 ) + if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE ) return sect->address; } - throw "__DATA segment not found"; + throw "writable (__DATA) segment not found"; } template @@ -892,10 +945,10 @@ uint64_t ExternalRelocationsAtom::relocBaseAddress(ld::Internal& state) // for x86_64 the reloc base address starts at __DATA segment for (std::vector::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) { ld::Internal::FinalSection* sect = *sit; - if ( strcmp(sect->segmentName(), "__DATA") == 0 ) + if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE ) return sect->address; } - throw "__DATA segment not found"; + throw "writable (__DATA) segment not found"; } template @@ -1565,9 +1618,17 @@ void SectionRelocationsAtom::encodeSectionReloc(ld::Internal::FinalSection* { int len = 0; uint32_t otherHalf = 0; - uint32_t value = entry.toTarget->finalAddress()+entry.toAddend; - if ( entry.fromTarget != NULL ) - value -= (entry.fromTarget->finalAddress()+entry.fromAddend); + uint32_t value; + if ( entry.fromTarget != NULL ) { + // this is a sect-diff + value = (entry.toTarget->finalAddress()+entry.toAddend) - (entry.fromTarget->finalAddress()+entry.fromAddend); + } + else { + // this is an absolute address + value = entry.toAddend; + if ( !external ) + value += entry.toTarget->finalAddress(); + } switch ( entry.kind ) { case ld::Fixup::kindStoreARMLow16: len = 0; @@ -1945,7 +2006,6 @@ private: uint32_t symIndexOfLazyPointerAtom(const ld::Atom*); uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*); uint32_t symbolIndex(const ld::Atom*); - bool kextBundlesDontHaveIndirectSymbolTable(); std::vector _entries; @@ -1977,9 +2037,11 @@ uint32_t IndirectSymbolTableAtom::symIndexOfStubAtom(const ld::Atom* stubAtom { for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) { if ( fit->binding == ld::Fixup::bindingDirectlyBound ) { - assert((fit->u.target->contentType() == ld::Atom::typeLazyPointer) - || (fit->u.target->contentType() == ld::Atom::typeLazyDylibPointer)); - return symIndexOfLazyPointerAtom(fit->u.target); + ld::Atom::ContentType type = fit->u.target->contentType(); + if (( type == ld::Atom::typeLazyPointer) || (type == ld::Atom::typeLazyDylibPointer) ) + return symIndexOfLazyPointerAtom(fit->u.target); + if ( type == ld::Atom::typeNonLazyPointer ) + return symIndexOfNonLazyPointerAtom(fit->u.target); } } throw "internal error: stub missing fixup to lazy pointer"; @@ -2095,12 +2157,6 @@ void IndirectSymbolTableAtom::encodeNonLazyPointerSection(ld::Internal::Final } } -template -bool IndirectSymbolTableAtom::kextBundlesDontHaveIndirectSymbolTable() -{ - return true; -} - template void IndirectSymbolTableAtom::encode() { @@ -2108,8 +2164,8 @@ void IndirectSymbolTableAtom::encode() if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() ) return; - // x86_64 kext bundles should not have an indirect symbol table - if ( (this->_options.outputKind() == Options::kKextBundle) && kextBundlesDontHaveIndirectSymbolTable() ) + // x86_64 kext bundles should not have an indirect symbol table unless using stubs + if ( (this->_options.outputKind() == Options::kKextBundle) && !this->_options.kextsUseStubs() ) return; // slidable static executables (-static -pie) should not have an indirect symbol table