1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
3 * Copyright (c) 2009 Apple Inc. All rights reserved.
5 * @APPLE_LICENSE_HEADER_START@
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
22 * @APPLE_LICENSE_HEADER_END@
34 #include "MachOFileAbstraction.hpp"
37 #include "configure.h"
43 class File
; // forward reference
45 class GOTEntryAtom
: public ld::Atom
{
47 GOTEntryAtom(ld::Internal
& internal
, const ld::Atom
* target
, bool weakImport
, bool weakDef
, bool is64
)
48 : ld::Atom(weakDef
? _s_sectionWeak
: _s_section
, ld::Atom::definitionRegular
, ld::Atom::combineNever
,
49 ld::Atom::scopeLinkageUnit
, ld::Atom::typeNonLazyPointer
,
50 symbolTableNotIn
, false, false, false, (is64
? ld::Atom::Alignment(3) : ld::Atom::Alignment(2))),
51 _fixup(0, ld::Fixup::k1of1
, (is64
? ld::Fixup::kindStoreTargetAddressLittleEndian64
: ld::Fixup::kindStoreTargetAddressLittleEndian32
), target
),
54 { _fixup
.weakImport
= weakImport
; internal
.addAtom(*this); }
56 virtual const ld::File
* file() const { return NULL
; }
57 virtual const char* name() const { return _target
->name(); }
58 virtual uint64_t size() const { return (_is64
? 8 : 4); }
59 virtual uint64_t objectAddress() const { return 0; }
60 virtual void copyRawContent(uint8_t buffer
[]) const { }
61 virtual void setScope(Scope
) { }
62 virtual ld::Fixup::iterator
fixupsBegin() const { return &_fixup
; }
63 virtual ld::Fixup::iterator
fixupsEnd() const { return &((ld::Fixup
*)&_fixup
)[1]; }
66 mutable ld::Fixup _fixup
;
67 const ld::Atom
* _target
;
70 static ld::Section _s_section
;
71 static ld::Section _s_sectionWeak
;
74 ld::Section
GOTEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer
);
75 ld::Section
GOTEntryAtom::_s_sectionWeak("__DATA", "__got_weak", ld::Section::typeNonLazyPointer
);
78 static bool gotFixup(const Options
& opts
, ld::Internal
& internal
, const ld::Atom
* targetOfGOT
, const ld::Fixup
* fixup
, bool* optimizable
, bool* targetIsExternalWeakDef
)
80 *targetIsExternalWeakDef
= false;
81 switch (fixup
->kind
) {
82 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad
:
83 #if SUPPORT_ARCH_arm64
84 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21
:
85 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12
:
87 // start by assuming this can be optimized
89 // cannot do LEA optimization if target is in another dylib
90 if ( targetOfGOT
->definition() == ld::Atom::definitionProxy
)
92 // cannot do LEA optimization if target in __huge section
93 if ( internal
.usingHugeSections
&& (targetOfGOT
->size() > 1024*1024)
94 && ( (targetOfGOT
->section().type() == ld::Section::typeZeroFill
)
95 || (targetOfGOT
->section().type() == ld::Section::typeTentativeDefs
)) ) {
98 if ( targetOfGOT
->scope() == ld::Atom::scopeGlobal
) {
99 // cannot do LEA optimization if target is weak exported symbol
100 if ( ((targetOfGOT
->definition() == ld::Atom::definitionRegular
) || (targetOfGOT
->definition() == ld::Atom::definitionProxy
)) && (targetOfGOT
->combine() == ld::Atom::combineByName
) ) {
101 switch ( opts
.outputKind() ) {
102 case Options::kDynamicExecutable
:
103 case Options::kDynamicLibrary
:
104 case Options::kDynamicBundle
:
105 case Options::kKextBundle
:
106 *targetIsExternalWeakDef
= true;
107 *optimizable
= false;
109 case Options::kStaticExecutable
:
111 case Options::kPreload
:
112 case Options::kObjectFile
:
116 // cannot do LEA optimization if target is interposable
117 if ( opts
.interposable(targetOfGOT
->name()) )
118 *optimizable
= false;
119 // cannot do LEA optimization if target is resolver function
120 if ( targetOfGOT
->contentType() == ld::Atom::typeResolver
)
121 *optimizable
= false;
122 // cannot do LEA optimization for flat-namespace
123 if ( opts
.nameSpace() != Options::kTwoLevelNameSpace
)
124 *optimizable
= false;
126 else if ( targetOfGOT
->scope() == ld::Atom::scopeLinkageUnit
) {
127 // <rdar://problem/12379969> don't do optimization if target is in custom segment
128 if ( opts
.sharedRegionEligible() ) {
129 const char* segName
= targetOfGOT
->section().segmentName();
130 if ( (strcmp(segName
, "__TEXT") != 0) && (strcmp(segName
, "__DATA") != 0) ) {
131 *optimizable
= false;
136 case ld::Fixup::kindStoreX86PCRel32GOT
:
137 #if SUPPORT_ARCH_arm64
138 case ld::Fixup::kindStoreARM64PCRelToGOT
:
140 *optimizable
= false;
142 case ld::Fixup::kindNoneGroupSubordinatePersonality
:
143 *optimizable
= false;
152 struct AtomByNameSorter
154 bool operator()(const ld::Atom
* left
, const ld::Atom
* right
)
156 return (strcmp(left
->name(), right
->name()) < 0);
160 void doPass(const Options
& opts
, ld::Internal
& internal
)
162 const bool log
= false;
164 // only make got section in final linked images
165 if ( opts
.outputKind() == Options::kObjectFile
)
168 // pre-fill gotMap with existing non-lazy pointers
169 std::map
<const ld::Atom
*, const ld::Atom
*> gotMap
;
170 for (ld::Internal::FinalSection
* sect
: internal
.sections
) {
171 if ( sect
->type() != ld::Section::typeNonLazyPointer
)
173 for (const ld::Atom
* atom
: sect
->atoms
) {
174 const ld::Atom
* target
= NULL
;
175 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
177 case ld::Fixup::kindStoreTargetAddressLittleEndian64
:
178 case ld::Fixup::kindStoreTargetAddressLittleEndian32
:
179 switch ( fit
->binding
) {
180 case ld::Fixup::bindingsIndirectlyBound
:
181 target
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
183 case ld::Fixup::bindingDirectlyBound
:
184 target
= fit
->u
.target
;
187 fprintf(stderr
, "non-pointer is got entry\n");
195 if ( target
!= NULL
) {
196 if (log
) fprintf(stderr
, "found existing got entry to %s\n", target
->name());
197 gotMap
[target
] = atom
;
202 // walk all atoms and fixups looking for GOT-able references
203 // don't create GOT atoms during this loop because that could invalidate the sections iterator
204 std::vector
<const ld::Atom
*> atomsReferencingGOT
;
205 std::map
<const ld::Atom
*,bool> weakImportMap
;
206 std::map
<const ld::Atom
*,bool> weakDefMap
;
207 atomsReferencingGOT
.reserve(128);
208 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=internal
.sections
.begin(); sit
!= internal
.sections
.end(); ++sit
) {
209 ld::Internal::FinalSection
* sect
= *sit
;
210 for (std::vector
<const ld::Atom
*>::iterator ait
=sect
->atoms
.begin(); ait
!= sect
->atoms
.end(); ++ait
) {
211 const ld::Atom
* atom
= *ait
;
212 bool atomUsesGOT
= false;
213 const ld::Atom
* targetOfGOT
= NULL
;
214 bool targetIsWeakImport
= false;
215 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
216 if ( fit
->firstInCluster() )
218 switch ( fit
->binding
) {
219 case ld::Fixup::bindingsIndirectlyBound
:
220 targetOfGOT
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
221 targetIsWeakImport
= fit
->weakImport
;
223 case ld::Fixup::bindingDirectlyBound
:
224 targetOfGOT
= fit
->u
.target
;
225 targetIsWeakImport
= fit
->weakImport
;
231 bool targetIsExternalWeakDef
;
232 if ( !gotFixup(opts
, internal
, targetOfGOT
, fit
, &optimizable
, &targetIsExternalWeakDef
) )
235 // change from load of GOT entry to lea of target
236 if ( log
) fprintf(stderr
, "optimized GOT usage in %s to %s\n", atom
->name(), targetOfGOT
->name());
237 switch ( fit
->binding
) {
238 case ld::Fixup::bindingsIndirectlyBound
:
239 case ld::Fixup::bindingDirectlyBound
:
240 fit
->binding
= ld::Fixup::bindingDirectlyBound
;
241 fit
->u
.target
= targetOfGOT
;
242 switch ( fit
->kind
) {
243 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad
:
244 fit
->kind
= ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoadNowLEA
;
246 #if SUPPORT_ARCH_arm64
247 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21
:
248 fit
->kind
= ld::Fixup::kindStoreTargetAddressARM64GOTLeaPage21
;
250 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12
:
251 fit
->kind
= ld::Fixup::kindStoreTargetAddressARM64GOTLeaPageOff12
;
255 assert(0 && "unsupported GOT reference kind");
260 assert(0 && "unsupported GOT reference");
265 // remember that we need to use GOT in this function
266 if ( log
) fprintf(stderr
, "found GOT use in %s\n", atom
->name());
267 if ( !atomUsesGOT
) {
268 atomsReferencingGOT
.push_back(atom
);
271 if ( gotMap
.count(targetOfGOT
) == 0 )
272 gotMap
[targetOfGOT
] = NULL
;
273 // record if target is weak def
274 weakDefMap
[targetOfGOT
] = targetIsExternalWeakDef
;
275 // record weak_import attribute
276 std::map
<const ld::Atom
*,bool>::iterator pos
= weakImportMap
.find(targetOfGOT
);
277 if ( pos
== weakImportMap
.end() ) {
278 // target not in weakImportMap, so add
279 if ( log
) fprintf(stderr
, "weakImportMap[%s] = %d\n", targetOfGOT
->name(), targetIsWeakImport
);
280 weakImportMap
[targetOfGOT
] = targetIsWeakImport
;
283 // target in weakImportMap, check for weakness mismatch
284 if ( pos
->second
!= targetIsWeakImport
) {
286 switch ( opts
.weakReferenceMismatchTreatment() ) {
287 case Options::kWeakReferenceMismatchError
:
288 throwf("mismatching weak references for symbol: %s", targetOfGOT
->name());
289 case Options::kWeakReferenceMismatchWeak
:
292 case Options::kWeakReferenceMismatchNonWeak
:
304 switch ( opts
.architecture() ) {
305 #if SUPPORT_ARCH_i386
310 #if SUPPORT_ARCH_x86_64
311 case CPU_TYPE_X86_64
:
315 #if SUPPORT_ARCH_arm_any
320 #if SUPPORT_ARCH_arm64
328 for (auto& entry
: gotMap
) {
329 if ( entry
.second
== NULL
) {
330 entry
.second
= new GOTEntryAtom(internal
, entry
.first
, weakImportMap
[entry
.first
], opts
.useDataConstSegment() && weakDefMap
[entry
.first
], is64
);
331 if (log
) fprintf(stderr
, "making new GOT slot for %s, gotMap[%p] = %p\n", entry
.first
->name(), entry
.first
, entry
.second
);
336 // update atoms to use GOT entries
337 for (std::vector
<const ld::Atom
*>::iterator it
=atomsReferencingGOT
.begin(); it
!= atomsReferencingGOT
.end(); ++it
) {
338 const ld::Atom
* atom
= *it
;
339 const ld::Atom
* targetOfGOT
= NULL
;
340 ld::Fixup::iterator fitThatSetTarget
= NULL
;
341 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
342 if ( fit
->firstInCluster() ) {
344 fitThatSetTarget
= NULL
;
346 switch ( fit
->binding
) {
347 case ld::Fixup::bindingsIndirectlyBound
:
348 targetOfGOT
= internal
.indirectBindingTable
[fit
->u
.bindingIndex
];
349 fitThatSetTarget
= fit
;
351 case ld::Fixup::bindingDirectlyBound
:
352 targetOfGOT
= fit
->u
.target
;
353 fitThatSetTarget
= fit
;
359 bool targetIsExternalWeakDef
;
360 if ( (targetOfGOT
== NULL
) || !gotFixup(opts
, internal
, targetOfGOT
, fit
, &optimizable
, &targetIsExternalWeakDef
) )
362 if ( !optimizable
) {
363 // GOT use not optimized away, update to bind to GOT entry
364 assert(fitThatSetTarget
!= NULL
);
365 switch ( fitThatSetTarget
->binding
) {
366 case ld::Fixup::bindingsIndirectlyBound
:
367 case ld::Fixup::bindingDirectlyBound
:
368 if ( log
) fprintf(stderr
, "updating GOT use in %s to %s\n", atom
->name(), targetOfGOT
->name());
369 fitThatSetTarget
->binding
= ld::Fixup::bindingDirectlyBound
;
370 fitThatSetTarget
->u
.target
= gotMap
[targetOfGOT
];
373 assert(0 && "unsupported GOT reference");
380 // sort new atoms so links are consistent
381 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=internal
.sections
.begin(); sit
!= internal
.sections
.end(); ++sit
) {
382 ld::Internal::FinalSection
* sect
= *sit
;
383 if ( sect
->type() == ld::Section::typeNonLazyPointer
) {
384 std::sort(sect
->atoms
.begin(), sect
->atoms
.end(), AtomByNameSorter());
391 } // namespace passes