1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
3 * Copyright (c) 2009 Apple Inc. All rights reserved.
5 * @APPLE_LICENSE_HEADER_START@
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
22 * @APPLE_LICENSE_HEADER_END@
34 #include "MachOFileAbstraction.hpp"
37 #include "configure.h"
43 class File
; // forward reference
45 class GOTEntryAtom
: public ld::Atom
{
47 GOTEntryAtom(ld::Internal
& internal
, const ld::Atom
* target
, bool weakImport
, bool weakDef
, bool is64
)
48 : ld::Atom(weakDef
? _s_sectionWeak
: _s_section
, ld::Atom::definitionRegular
, ld::Atom::combineNever
,
49 ld::Atom::scopeLinkageUnit
, ld::Atom::typeNonLazyPointer
,
50 symbolTableNotIn
, false, false, false, (is64
? ld::Atom::Alignment(3) : ld::Atom::Alignment(2))),
51 _fixup(0, ld::Fixup::k1of1
, (is64
? ld::Fixup::kindStoreTargetAddressLittleEndian64
: ld::Fixup::kindStoreTargetAddressLittleEndian32
), target
),
54 { _fixup
.weakImport
= weakImport
; internal
.addAtom(*this); }
56 virtual const ld::File
* file() const { return NULL
; }
57 virtual const char* name() const { return _target
->name(); }
58 virtual uint64_t size() const { return (_is64
? 8 : 4); }
59 virtual uint64_t objectAddress() const { return 0; }
60 virtual void copyRawContent(uint8_t buffer
[]) const { }
61 virtual void setScope(Scope
) { }
62 virtual ld::Fixup::iterator
fixupsBegin() const { return &_fixup
; }
63 virtual ld::Fixup::iterator
fixupsEnd() const { return &((ld::Fixup
*)&_fixup
)[1]; }
66 mutable ld::Fixup _fixup
;
67 const ld::Atom
* _target
;
70 static ld::Section _s_section
;
71 static ld::Section _s_sectionWeak
;
74 ld::Section
GOTEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer
);
75 ld::Section
GOTEntryAtom::_s_sectionWeak("__DATA", "__got_weak", ld::Section::typeNonLazyPointer
);
77 #if SUPPORT_ARCH_arm64e
79 class GOTAuthEntryAtom
: public ld::Atom
{
81 GOTAuthEntryAtom(ld::Internal
& internal
, const ld::Atom
* target
, bool weakImport
, bool weakDef
)
82 : ld::Atom(weakDef
? _s_sectionWeak
: _s_section
, ld::Atom::definitionRegular
, ld::Atom::combineNever
,
83 ld::Atom::scopeLinkageUnit
, ld::Atom::typeNonLazyPointer
,
84 symbolTableNotIn
, false, false, false, ld::Atom::Alignment(3)),
85 _fixup1(0, ld::Fixup::k1of2
, ld::Fixup::kindSetAuthData
, (ld::Fixup::AuthData
){ 0, true, ld::Fixup::AuthData::ptrauth_key_asia
}),
86 _fixup2(0, ld::Fixup::k2of2
, ld::Fixup::kindStoreTargetAddressLittleEndianAuth64
, target
),
88 { _fixup2
.weakImport
= weakImport
; internal
.addAtom(*this); }
90 virtual const ld::File
* file() const { return NULL
; }
91 virtual const char* name() const { return _target
->name(); }
92 virtual uint64_t size() const { return 8; }
93 virtual uint64_t objectAddress() const { return 0; }
94 virtual void copyRawContent(uint8_t buffer
[]) const { }
95 virtual void setScope(Scope
) { }
96 virtual ld::Fixup::iterator
fixupsBegin() const { return (ld::Fixup
*)&_fixup1
; }
97 virtual ld::Fixup::iterator
fixupsEnd() const { return &((ld::Fixup
*)&_fixup2
)[1]; }
100 mutable ld::Fixup _fixup1
;
101 mutable ld::Fixup _fixup2
;
102 const ld::Atom
* _target
;
104 static ld::Section _s_section
;
105 static ld::Section _s_sectionWeak
;
108 ld::Section
GOTAuthEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer
);
109 ld::Section
GOTAuthEntryAtom::_s_sectionWeak("__DATA", "__got_weak", ld::Section::typeNonLazyPointer
);
114 static bool gotFixup(const Options
& opts
, ld::Internal
& internal
, const ld::Atom
* targetOfGOT
, const ld::Atom
* fixupAtom
,
115 const ld::Fixup
* fixup
, bool* optimizable
, bool* targetIsExternalWeakDef
, bool* targetIsPersonalityFn
)
117 *targetIsExternalWeakDef
= false;
118 *targetIsPersonalityFn
= false;
119 switch (fixup
->kind
) {
120 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad
:
121 #if SUPPORT_ARCH_arm64
122 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21
:
123 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12
:
125 // start by assuming this can be optimized
127 // cannot do LEA optimization if target is in another dylib
128 if ( targetOfGOT
->definition() == ld::Atom::definitionProxy
)
129 *optimizable
= false;
130 // cannot do LEA optimization if target in __huge section
131 if ( internal
.usingHugeSections
&& (targetOfGOT
->size() > 1024*1024)
132 && ( (targetOfGOT
->section().type() == ld::Section::typeZeroFill
)
133 || (targetOfGOT
->section().type() == ld::Section::typeTentativeDefs
)) ) {
134 *optimizable
= false;
136 if ( targetOfGOT
->scope() == ld::Atom::scopeGlobal
) {
137 // cannot do LEA optimization if target is weak exported symbol
138 if ( ((targetOfGOT
->definition() == ld::Atom::definitionRegular
) || (targetOfGOT
->definition() == ld::Atom::definitionProxy
)) && (targetOfGOT
->combine() == ld::Atom::combineByName
) ) {
139 switch ( opts
.outputKind() ) {
140 case Options::kDynamicExecutable
:
141 case Options::kDynamicLibrary
:
142 case Options::kDynamicBundle
:
143 case Options::kKextBundle
:
144 *targetIsExternalWeakDef
= true;
145 *optimizable
= false;
147 case Options::kStaticExecutable
:
149 case Options::kPreload
:
150 case Options::kObjectFile
:
154 // cannot do LEA optimization if target is interposable
155 if ( opts
.interposable(targetOfGOT
->name()) )
156 *optimizable
= false;
157 // cannot do LEA optimization if target is resolver function
158 if ( targetOfGOT
->contentType() == ld::Atom::typeResolver
)
159 *optimizable
= false;
160 // cannot do LEA optimization for flat-namespace
161 if ( opts
.nameSpace() != Options::kTwoLevelNameSpace
)
162 *optimizable
= false;
164 else if ( targetOfGOT
->scope() == ld::Atom::scopeLinkageUnit
) {
165 // <rdar://problem/12379969> don't do optimization if target is in custom segment
166 if ( opts
.sharedRegionEligible() ) {
167 const char* segName
= targetOfGOT
->section().segmentName();
168 if ( (strcmp(segName
, "__TEXT") != 0) && (strcmp(segName
, "__DATA") != 0) ) {
169 *optimizable
= false;
174 case ld::Fixup::kindStoreX86PCRel32GOT
:
175 #if SUPPORT_ARCH_arm64
176 case ld::Fixup::kindStoreARM64PCRelToGOT
:
178 #if SUPPORT_ARCH_arm64e
179 // Note, this handles identifying DWARF unwind info personality functions
180 if (opts
.supportsAuthenticatedPointers()) {
181 if (fixupAtom
->section().type() == ld::Section::typeCFI
)
182 *targetIsPersonalityFn
= true;
185 *optimizable
= false;
187 case ld::Fixup::kindNoneGroupSubordinatePersonality
:
188 *optimizable
= false;
189 #if SUPPORT_ARCH_arm64e
190 // Note, this is a compact unwind info personality function
191 if (opts
.supportsAuthenticatedPointers())
192 *targetIsPersonalityFn
= true;
202 struct AtomByNameSorter
204 bool operator()(const ld::Atom
* left
, const ld::Atom
* right
)
206 return (strcmp(left
->name(), right
->name()) < 0);
211 const ld::Atom
* atom
;
212 bool isPersonalityFn
;
214 bool operator<(const GotMapEntry
& other
) const {
215 if (atom
!= other
.atom
)
216 return atom
< other
.atom
;
217 return (int)isPersonalityFn
< (int)other
.isPersonalityFn
;
221 void doPass(const Options
& opts
, ld::Internal
& internal
)
223 const bool log
= false;
225 // only make got section in final linked images
226 if ( opts
.outputKind() == Options::kObjectFile
)
229 // pre-fill gotMap with existing non-lazy pointers
230 std::map
<GotMapEntry
, const ld::Atom
*> gotMap
;
231 for (ld::Internal::FinalSection
* sect
: internal
.sections
) {
232 if ( sect
->type() != ld::Section::typeNonLazyPointer
)
234 for (const ld::Atom
* atom
: sect
->atoms
) {
235 const ld::Atom
* target
= NULL
;
236 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
238 case ld::Fixup::kindStoreTargetAddressLittleEndian64
:
239 case ld::Fixup::kindStoreTargetAddressLittleEndian32
:
240 switch ( fit
->binding
) {
241 case ld::Fixup::bindingsIndirectlyBound
:
242 target
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
244 case ld::Fixup::bindingDirectlyBound
:
245 target
= fit
->u
.target
;
248 fprintf(stderr
, "non-pointer is got entry\n");
256 if ( target
!= NULL
) {
257 if (log
) fprintf(stderr
, "found existing got entry to %s\n", target
->name());
258 gotMap
[{ target
, false }] = atom
;
263 // walk all atoms and fixups looking for GOT-able references
264 // don't create GOT atoms during this loop because that could invalidate the sections iterator
265 std::vector
<const ld::Atom
*> atomsReferencingGOT
;
266 std::map
<const ld::Atom
*,bool> weakImportMap
;
267 std::map
<const ld::Atom
*,bool> weakDefMap
;
268 atomsReferencingGOT
.reserve(128);
269 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=internal
.sections
.begin(); sit
!= internal
.sections
.end(); ++sit
) {
270 ld::Internal::FinalSection
* sect
= *sit
;
271 for (std::vector
<const ld::Atom
*>::iterator ait
=sect
->atoms
.begin(); ait
!= sect
->atoms
.end(); ++ait
) {
272 const ld::Atom
* atom
= *ait
;
273 bool atomUsesGOT
= false;
274 const ld::Atom
* targetOfGOT
= NULL
;
275 bool targetIsWeakImport
= false;
276 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
277 if ( fit
->firstInCluster() )
279 switch ( fit
->binding
) {
280 case ld::Fixup::bindingsIndirectlyBound
:
281 targetOfGOT
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
282 targetIsWeakImport
= fit
->weakImport
;
284 case ld::Fixup::bindingDirectlyBound
:
285 targetOfGOT
= fit
->u
.target
;
286 targetIsWeakImport
= fit
->weakImport
;
292 bool targetIsExternalWeakDef
;
293 bool targetIsPersonalityFn
;
294 if ( !gotFixup(opts
, internal
, targetOfGOT
, atom
, fit
, &optimizable
, &targetIsExternalWeakDef
, &targetIsPersonalityFn
) )
297 // change from load of GOT entry to lea of target
298 if ( log
) fprintf(stderr
, "optimized GOT usage in %s to %s\n", atom
->name(), targetOfGOT
->name());
299 switch ( fit
->binding
) {
300 case ld::Fixup::bindingsIndirectlyBound
:
301 case ld::Fixup::bindingDirectlyBound
:
302 fit
->binding
= ld::Fixup::bindingDirectlyBound
;
303 fit
->u
.target
= targetOfGOT
;
304 switch ( fit
->kind
) {
305 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad
:
306 fit
->kind
= ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoadNowLEA
;
308 #if SUPPORT_ARCH_arm64
309 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21
:
310 fit
->kind
= ld::Fixup::kindStoreTargetAddressARM64GOTLeaPage21
;
312 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12
:
313 fit
->kind
= ld::Fixup::kindStoreTargetAddressARM64GOTLeaPageOff12
;
317 assert(0 && "unsupported GOT reference kind");
322 assert(0 && "unsupported GOT reference");
327 // remember that we need to use GOT in this function
328 if ( log
) fprintf(stderr
, "found GOT use in %s\n", atom
->name());
329 if ( !atomUsesGOT
) {
330 atomsReferencingGOT
.push_back(atom
);
333 if ( gotMap
.count({ targetOfGOT
, targetIsPersonalityFn
}) == 0 )
334 gotMap
[{ targetOfGOT
, targetIsPersonalityFn
}] = NULL
;
335 // record if target is weak def
336 weakDefMap
[targetOfGOT
] = targetIsExternalWeakDef
;
337 // record weak_import attribute
338 std::map
<const ld::Atom
*,bool>::iterator pos
= weakImportMap
.find(targetOfGOT
);
339 if ( pos
== weakImportMap
.end() ) {
340 // target not in weakImportMap, so add
341 if ( log
) fprintf(stderr
, "weakImportMap[%s] = %d\n", targetOfGOT
->name(), targetIsWeakImport
);
342 weakImportMap
[targetOfGOT
] = targetIsWeakImport
;
345 // target in weakImportMap, check for weakness mismatch
346 if ( pos
->second
!= targetIsWeakImport
) {
348 switch ( opts
.weakReferenceMismatchTreatment() ) {
349 case Options::kWeakReferenceMismatchError
:
350 throwf("mismatching weak references for symbol: %s", targetOfGOT
->name());
351 case Options::kWeakReferenceMismatchWeak
:
354 case Options::kWeakReferenceMismatchNonWeak
:
366 switch ( opts
.architecture() ) {
367 #if SUPPORT_ARCH_i386
372 #if SUPPORT_ARCH_x86_64
373 case CPU_TYPE_X86_64
:
377 #if SUPPORT_ARCH_arm_any
382 #if SUPPORT_ARCH_arm64
390 for (auto& entry
: gotMap
) {
391 if ( entry
.second
== NULL
) {
392 #if SUPPORT_ARCH_arm64e
393 if ( entry
.first
.isPersonalityFn
&& (opts
.supportsAuthenticatedPointers()) ) {
394 entry
.second
= new GOTAuthEntryAtom(internal
, entry
.first
.atom
, weakImportMap
[entry
.first
.atom
], opts
.useDataConstSegment() && weakDefMap
[entry
.first
.atom
]);
395 if (log
) fprintf(stderr
, "making new GOT slot for %s, gotMap[%p] = %p\n", entry
.first
.atom
->name(), entry
.first
.atom
, entry
.second
);
399 entry
.second
= new GOTEntryAtom(internal
, entry
.first
.atom
, weakImportMap
[entry
.first
.atom
], opts
.useDataConstSegment() && weakDefMap
[entry
.first
.atom
], is64
);
400 if (log
) fprintf(stderr
, "making new GOT slot for %s, gotMap[%p] = %p\n", entry
.first
.atom
->name(), entry
.first
.atom
, entry
.second
);
405 // update atoms to use GOT entries
406 for (std::vector
<const ld::Atom
*>::iterator it
=atomsReferencingGOT
.begin(); it
!= atomsReferencingGOT
.end(); ++it
) {
407 const ld::Atom
* atom
= *it
;
408 const ld::Atom
* targetOfGOT
= NULL
;
409 ld::Fixup::iterator fitThatSetTarget
= NULL
;
410 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
411 if ( fit
->firstInCluster() ) {
413 fitThatSetTarget
= NULL
;
415 switch ( fit
->binding
) {
416 case ld::Fixup::bindingsIndirectlyBound
:
417 targetOfGOT
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
418 fitThatSetTarget
= fit
;
420 case ld::Fixup::bindingDirectlyBound
:
421 targetOfGOT
= fit
->u
.target
;
422 fitThatSetTarget
= fit
;
428 bool targetIsExternalWeakDef
;
429 bool targetIsPersonalityFn
;
430 if ( (targetOfGOT
== NULL
) || !gotFixup(opts
, internal
, targetOfGOT
, atom
, fit
,
431 &optimizable
, &targetIsExternalWeakDef
, &targetIsPersonalityFn
) )
433 if ( !optimizable
) {
434 // GOT use not optimized away, update to bind to GOT entry
435 assert(fitThatSetTarget
!= NULL
);
436 switch ( fitThatSetTarget
->binding
) {
437 case ld::Fixup::bindingsIndirectlyBound
:
438 case ld::Fixup::bindingDirectlyBound
:
439 if ( log
) fprintf(stderr
, "updating GOT use in %s to %s\n", atom
->name(), targetOfGOT
->name());
440 fitThatSetTarget
->binding
= ld::Fixup::bindingDirectlyBound
;
441 fitThatSetTarget
->u
.target
= gotMap
[{ targetOfGOT
, targetIsPersonalityFn
}];
444 assert(0 && "unsupported GOT reference");
451 // sort new atoms so links are consistent
452 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=internal
.sections
.begin(); sit
!= internal
.sections
.end(); ++sit
) {
453 ld::Internal::FinalSection
* sect
= *sit
;
454 if ( sect
->type() == ld::Section::typeNonLazyPointer
) {
455 std::sort(sect
->atoms
.begin(), sect
->atoms
.end(), AtomByNameSorter());
462 } // namespace passes