]> git.saurik.com Git - apple/ld64.git/blame_incremental - src/ld/passes/got.cpp
ld64-409.12.tar.gz
[apple/ld64.git] / src / ld / passes / got.cpp
... / ...
CommitLineData
1/* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2009 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25
26#include <stdint.h>
27#include <math.h>
28#include <unistd.h>
29#include <dlfcn.h>
30
31#include <vector>
32#include <map>
33
34#include "MachOFileAbstraction.hpp"
35#include "ld.hpp"
36#include "got.h"
37#include "configure.h"
38
39namespace ld {
40namespace passes {
41namespace got {
42
43class File; // forward reference
44
45class GOTEntryAtom : public ld::Atom {
46public:
47 GOTEntryAtom(ld::Internal& internal, const ld::Atom* target, bool weakImport, bool weakDef, bool is64)
48 : ld::Atom(weakDef ? _s_sectionWeak : _s_section, ld::Atom::definitionRegular, ld::Atom::combineNever,
49 ld::Atom::scopeLinkageUnit, ld::Atom::typeNonLazyPointer,
50 symbolTableNotIn, false, false, false, (is64 ? ld::Atom::Alignment(3) : ld::Atom::Alignment(2))),
51 _fixup(0, ld::Fixup::k1of1, (is64 ? ld::Fixup::kindStoreTargetAddressLittleEndian64 : ld::Fixup::kindStoreTargetAddressLittleEndian32), target),
52 _target(target),
53 _is64(is64)
54 { _fixup.weakImport = weakImport; internal.addAtom(*this); }
55
56 virtual const ld::File* file() const { return NULL; }
57 virtual const char* name() const { return _target->name(); }
58 virtual uint64_t size() const { return (_is64 ? 8 : 4); }
59 virtual uint64_t objectAddress() const { return 0; }
60 virtual void copyRawContent(uint8_t buffer[]) const { }
61 virtual void setScope(Scope) { }
62 virtual ld::Fixup::iterator fixupsBegin() const { return &_fixup; }
63 virtual ld::Fixup::iterator fixupsEnd() const { return &((ld::Fixup*)&_fixup)[1]; }
64
65private:
66 mutable ld::Fixup _fixup;
67 const ld::Atom* _target;
68 bool _is64;
69
70 static ld::Section _s_section;
71 static ld::Section _s_sectionWeak;
72};
73
74ld::Section GOTEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer);
75ld::Section GOTEntryAtom::_s_sectionWeak("__DATA", "__got_weak", ld::Section::typeNonLazyPointer);
76
77#if SUPPORT_ARCH_arm64e
78
79class GOTAuthEntryAtom : public ld::Atom {
80public:
81 GOTAuthEntryAtom(ld::Internal& internal, const ld::Atom* target, bool weakImport, bool weakDef)
82 : ld::Atom(weakDef ? _s_sectionWeak : _s_section, ld::Atom::definitionRegular, ld::Atom::combineNever,
83 ld::Atom::scopeLinkageUnit, ld::Atom::typeNonLazyPointer,
84 symbolTableNotIn, false, false, false, ld::Atom::Alignment(3)),
85 _fixup1(0, ld::Fixup::k1of2, ld::Fixup::kindSetAuthData, (ld::Fixup::AuthData){ 0, true, ld::Fixup::AuthData::ptrauth_key_asia }),
86 _fixup2(0, ld::Fixup::k2of2, ld::Fixup::kindStoreTargetAddressLittleEndianAuth64, target),
87 _target(target)
88 { _fixup2.weakImport = weakImport; internal.addAtom(*this); }
89
90 virtual const ld::File* file() const { return NULL; }
91 virtual const char* name() const { return _target->name(); }
92 virtual uint64_t size() const { return 8; }
93 virtual uint64_t objectAddress() const { return 0; }
94 virtual void copyRawContent(uint8_t buffer[]) const { }
95 virtual void setScope(Scope) { }
96 virtual ld::Fixup::iterator fixupsBegin() const { return (ld::Fixup*)&_fixup1; }
97 virtual ld::Fixup::iterator fixupsEnd() const { return &((ld::Fixup*)&_fixup2)[1]; }
98
99private:
100 mutable ld::Fixup _fixup1;
101 mutable ld::Fixup _fixup2;
102 const ld::Atom* _target;
103
104 static ld::Section _s_section;
105 static ld::Section _s_sectionWeak;
106};
107
108ld::Section GOTAuthEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer);
109ld::Section GOTAuthEntryAtom::_s_sectionWeak("__DATA", "__got_weak", ld::Section::typeNonLazyPointer);
110
111#endif
112
113
114static bool gotFixup(const Options& opts, ld::Internal& internal, const ld::Atom* targetOfGOT, const ld::Atom* fixupAtom,
115 const ld::Fixup* fixup, bool* optimizable, bool* targetIsExternalWeakDef, bool* targetIsPersonalityFn)
116{
117 *targetIsExternalWeakDef = false;
118 *targetIsPersonalityFn = false;
119 switch (fixup->kind) {
120 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
121#if SUPPORT_ARCH_arm64
122 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21:
123 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12:
124#endif
125 // start by assuming this can be optimized
126 *optimizable = true;
127 // cannot do LEA optimization if target is in another dylib
128 if ( targetOfGOT->definition() == ld::Atom::definitionProxy )
129 *optimizable = false;
130 // cannot do LEA optimization if target in __huge section
131 if ( internal.usingHugeSections && (targetOfGOT->size() > 1024*1024)
132 && ( (targetOfGOT->section().type() == ld::Section::typeZeroFill)
133 || (targetOfGOT->section().type() == ld::Section::typeTentativeDefs)) ) {
134 *optimizable = false;
135 }
136 if ( targetOfGOT->scope() == ld::Atom::scopeGlobal ) {
137 // cannot do LEA optimization if target is weak exported symbol
138 if ( ((targetOfGOT->definition() == ld::Atom::definitionRegular) || (targetOfGOT->definition() == ld::Atom::definitionProxy)) && (targetOfGOT->combine() == ld::Atom::combineByName) ) {
139 switch ( opts.outputKind() ) {
140 case Options::kDynamicExecutable:
141 case Options::kDynamicLibrary:
142 case Options::kDynamicBundle:
143 case Options::kKextBundle:
144 *targetIsExternalWeakDef = true;
145 *optimizable = false;
146 break;
147 case Options::kStaticExecutable:
148 case Options::kDyld:
149 case Options::kPreload:
150 case Options::kObjectFile:
151 break;
152 }
153 }
154 // cannot do LEA optimization if target is interposable
155 if ( opts.interposable(targetOfGOT->name()) )
156 *optimizable = false;
157 // cannot do LEA optimization if target is resolver function
158 if ( targetOfGOT->contentType() == ld::Atom::typeResolver )
159 *optimizable = false;
160 // cannot do LEA optimization for flat-namespace
161 if ( opts.nameSpace() != Options::kTwoLevelNameSpace )
162 *optimizable = false;
163 }
164 else if ( targetOfGOT->scope() == ld::Atom::scopeLinkageUnit) {
165 // <rdar://problem/12379969> don't do optimization if target is in custom segment
166 if ( opts.sharedRegionEligible() ) {
167 const char* segName = targetOfGOT->section().segmentName();
168 if ( (strcmp(segName, "__TEXT") != 0) && (strcmp(segName, "__DATA") != 0) ) {
169 *optimizable = false;
170 }
171 }
172 }
173 return true;
174 case ld::Fixup::kindStoreX86PCRel32GOT:
175#if SUPPORT_ARCH_arm64
176 case ld::Fixup::kindStoreARM64PCRelToGOT:
177#endif
178#if SUPPORT_ARCH_arm64e
179 // Note, this handles identifying DWARF unwind info personality functions
180 if (opts.supportsAuthenticatedPointers()) {
181 if (fixupAtom->section().type() == ld::Section::typeCFI)
182 *targetIsPersonalityFn = true;
183 }
184#endif
185 *optimizable = false;
186 return true;
187 case ld::Fixup::kindNoneGroupSubordinatePersonality:
188 *optimizable = false;
189#if SUPPORT_ARCH_arm64e
190 // Note, this is a compact unwind info personality function
191 if (opts.supportsAuthenticatedPointers())
192 *targetIsPersonalityFn = true;
193#endif
194 return true;
195 default:
196 break;
197 }
198
199 return false;
200}
201
202struct AtomByNameSorter
203{
204 bool operator()(const ld::Atom* left, const ld::Atom* right)
205 {
206 return (strcmp(left->name(), right->name()) < 0);
207 }
208};
209
210struct GotMapEntry {
211 const ld::Atom* atom;
212 bool isPersonalityFn;
213
214 bool operator<(const GotMapEntry& other) const {
215 if (atom != other.atom)
216 return atom < other.atom;
217 return (int)isPersonalityFn < (int)other.isPersonalityFn;
218 }
219};
220
221void doPass(const Options& opts, ld::Internal& internal)
222{
223 const bool log = false;
224
225 // only make got section in final linked images
226 if ( opts.outputKind() == Options::kObjectFile )
227 return;
228
229 // pre-fill gotMap with existing non-lazy pointers
230 std::map<GotMapEntry, const ld::Atom*> gotMap;
231 for (ld::Internal::FinalSection* sect : internal.sections) {
232 if ( sect->type() != ld::Section::typeNonLazyPointer )
233 continue;
234 for (const ld::Atom* atom : sect->atoms) {
235 const ld::Atom* target = NULL;
236 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
237 switch (fit->kind) {
238 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
239 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
240 switch ( fit->binding ) {
241 case ld::Fixup::bindingsIndirectlyBound:
242 target = internal.indirectBindingTable[fit->u.bindingIndex];
243 break;
244 case ld::Fixup::bindingDirectlyBound:
245 target = fit->u.target;
246 break;
247 default:
248 fprintf(stderr, "non-pointer is got entry\n");
249 break;
250 }
251 break;
252 default:
253 break;
254 }
255 }
256 if ( target != NULL ) {
257 if (log) fprintf(stderr, "found existing got entry to %s\n", target->name());
258 gotMap[{ target, false }] = atom;
259 }
260 }
261 }
262
263 // walk all atoms and fixups looking for GOT-able references
264 // don't create GOT atoms during this loop because that could invalidate the sections iterator
265 std::vector<const ld::Atom*> atomsReferencingGOT;
266 std::map<const ld::Atom*,bool> weakImportMap;
267 std::map<const ld::Atom*,bool> weakDefMap;
268 atomsReferencingGOT.reserve(128);
269 for (std::vector<ld::Internal::FinalSection*>::iterator sit=internal.sections.begin(); sit != internal.sections.end(); ++sit) {
270 ld::Internal::FinalSection* sect = *sit;
271 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
272 const ld::Atom* atom = *ait;
273 bool atomUsesGOT = false;
274 const ld::Atom* targetOfGOT = NULL;
275 bool targetIsWeakImport = false;
276 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
277 if ( fit->firstInCluster() )
278 targetOfGOT = NULL;
279 switch ( fit->binding ) {
280 case ld::Fixup::bindingsIndirectlyBound:
281 targetOfGOT = internal.indirectBindingTable[fit->u.bindingIndex];
282 targetIsWeakImport = fit->weakImport;
283 break;
284 case ld::Fixup::bindingDirectlyBound:
285 targetOfGOT = fit->u.target;
286 targetIsWeakImport = fit->weakImport;
287 break;
288 default:
289 break;
290 }
291 bool optimizable;
292 bool targetIsExternalWeakDef;
293 bool targetIsPersonalityFn;
294 if ( !gotFixup(opts, internal, targetOfGOT, atom, fit, &optimizable, &targetIsExternalWeakDef, &targetIsPersonalityFn) )
295 continue;
296 if ( optimizable ) {
297 // change from load of GOT entry to lea of target
298 if ( log ) fprintf(stderr, "optimized GOT usage in %s to %s\n", atom->name(), targetOfGOT->name());
299 switch ( fit->binding ) {
300 case ld::Fixup::bindingsIndirectlyBound:
301 case ld::Fixup::bindingDirectlyBound:
302 fit->binding = ld::Fixup::bindingDirectlyBound;
303 fit->u.target = targetOfGOT;
304 switch ( fit->kind ) {
305 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
306 fit->kind = ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoadNowLEA;
307 break;
308#if SUPPORT_ARCH_arm64
309 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21:
310 fit->kind = ld::Fixup::kindStoreTargetAddressARM64GOTLeaPage21;
311 break;
312 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12:
313 fit->kind = ld::Fixup::kindStoreTargetAddressARM64GOTLeaPageOff12;
314 break;
315#endif
316 default:
317 assert(0 && "unsupported GOT reference kind");
318 break;
319 }
320 break;
321 default:
322 assert(0 && "unsupported GOT reference");
323 break;
324 }
325 }
326 else {
327 // remember that we need to use GOT in this function
328 if ( log ) fprintf(stderr, "found GOT use in %s\n", atom->name());
329 if ( !atomUsesGOT ) {
330 atomsReferencingGOT.push_back(atom);
331 atomUsesGOT = true;
332 }
333 if ( gotMap.count({ targetOfGOT, targetIsPersonalityFn }) == 0 )
334 gotMap[{ targetOfGOT, targetIsPersonalityFn }] = NULL;
335 // record if target is weak def
336 weakDefMap[targetOfGOT] = targetIsExternalWeakDef;
337 // record weak_import attribute
338 std::map<const ld::Atom*,bool>::iterator pos = weakImportMap.find(targetOfGOT);
339 if ( pos == weakImportMap.end() ) {
340 // target not in weakImportMap, so add
341 if ( log ) fprintf(stderr, "weakImportMap[%s] = %d\n", targetOfGOT->name(), targetIsWeakImport);
342 weakImportMap[targetOfGOT] = targetIsWeakImport;
343 }
344 else {
345 // target in weakImportMap, check for weakness mismatch
346 if ( pos->second != targetIsWeakImport ) {
347 // found mismatch
348 switch ( opts.weakReferenceMismatchTreatment() ) {
349 case Options::kWeakReferenceMismatchError:
350 throwf("mismatching weak references for symbol: %s", targetOfGOT->name());
351 case Options::kWeakReferenceMismatchWeak:
352 pos->second = true;
353 break;
354 case Options::kWeakReferenceMismatchNonWeak:
355 pos->second = false;
356 break;
357 }
358 }
359 }
360 }
361 }
362 }
363 }
364
365 bool is64 = false;
366 switch ( opts.architecture() ) {
367#if SUPPORT_ARCH_i386
368 case CPU_TYPE_I386:
369 is64 = false;
370 break;
371#endif
372#if SUPPORT_ARCH_x86_64
373 case CPU_TYPE_X86_64:
374 is64 = true;
375 break;
376#endif
377#if SUPPORT_ARCH_arm_any
378 case CPU_TYPE_ARM:
379 is64 = false;
380 break;
381#endif
382#if SUPPORT_ARCH_arm64
383 case CPU_TYPE_ARM64:
384 is64 = true;
385 break;
386#endif
387 }
388
389 // make GOT entries
390 for (auto& entry : gotMap) {
391 if ( entry.second == NULL ) {
392#if SUPPORT_ARCH_arm64e
393 if ( entry.first.isPersonalityFn && (opts.supportsAuthenticatedPointers()) ) {
394 entry.second = new GOTAuthEntryAtom(internal, entry.first.atom, weakImportMap[entry.first.atom], opts.useDataConstSegment() && weakDefMap[entry.first.atom]);
395 if (log) fprintf(stderr, "making new GOT slot for %s, gotMap[%p] = %p\n", entry.first.atom->name(), entry.first.atom, entry.second);
396 continue;
397 }
398#endif
399 entry.second = new GOTEntryAtom(internal, entry.first.atom, weakImportMap[entry.first.atom], opts.useDataConstSegment() && weakDefMap[entry.first.atom], is64);
400 if (log) fprintf(stderr, "making new GOT slot for %s, gotMap[%p] = %p\n", entry.first.atom->name(), entry.first.atom, entry.second);
401 }
402 }
403
404
405 // update atoms to use GOT entries
406 for (std::vector<const ld::Atom*>::iterator it=atomsReferencingGOT.begin(); it != atomsReferencingGOT.end(); ++it) {
407 const ld::Atom* atom = *it;
408 const ld::Atom* targetOfGOT = NULL;
409 ld::Fixup::iterator fitThatSetTarget = NULL;
410 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
411 if ( fit->firstInCluster() ) {
412 targetOfGOT = NULL;
413 fitThatSetTarget = NULL;
414 }
415 switch ( fit->binding ) {
416 case ld::Fixup::bindingsIndirectlyBound:
417 targetOfGOT = internal.indirectBindingTable[fit->u.bindingIndex];
418 fitThatSetTarget = fit;
419 break;
420 case ld::Fixup::bindingDirectlyBound:
421 targetOfGOT = fit->u.target;
422 fitThatSetTarget = fit;
423 break;
424 default:
425 break;
426 }
427 bool optimizable;
428 bool targetIsExternalWeakDef;
429 bool targetIsPersonalityFn;
430 if ( (targetOfGOT == NULL) || !gotFixup(opts, internal, targetOfGOT, atom, fit,
431 &optimizable, &targetIsExternalWeakDef, &targetIsPersonalityFn) )
432 continue;
433 if ( !optimizable ) {
434 // GOT use not optimized away, update to bind to GOT entry
435 assert(fitThatSetTarget != NULL);
436 switch ( fitThatSetTarget->binding ) {
437 case ld::Fixup::bindingsIndirectlyBound:
438 case ld::Fixup::bindingDirectlyBound:
439 if ( log ) fprintf(stderr, "updating GOT use in %s to %s\n", atom->name(), targetOfGOT->name());
440 fitThatSetTarget->binding = ld::Fixup::bindingDirectlyBound;
441 fitThatSetTarget->u.target = gotMap[{ targetOfGOT, targetIsPersonalityFn }];
442 break;
443 default:
444 assert(0 && "unsupported GOT reference");
445 break;
446 }
447 }
448 }
449 }
450
451 // sort new atoms so links are consistent
452 for (std::vector<ld::Internal::FinalSection*>::iterator sit=internal.sections.begin(); sit != internal.sections.end(); ++sit) {
453 ld::Internal::FinalSection* sect = *sit;
454 if ( sect->type() == ld::Section::typeNonLazyPointer ) {
455 std::sort(sect->atoms.begin(), sect->atoms.end(), AtomByNameSorter());
456 }
457 }
458}
459
460
461} // namespace got
462} // namespace passes
463} // namespace ld