]> git.saurik.com Git - apple/ld64.git/blob - src/ld/passes/got.cpp
ld64-123.2.1.tar.gz
[apple/ld64.git] / src / ld / passes / got.cpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2009 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25
26 #include <stdint.h>
27 #include <math.h>
28 #include <unistd.h>
29 #include <dlfcn.h>
30
31 #include <vector>
32 #include <map>
33 #include <ext/hash_map>
34
35 #include "ld.hpp"
36 #include "got.h"
37
38 namespace ld {
39 namespace passes {
40 namespace got {
41
42 class File; // forward reference
43
44 class GOTEntryAtom : public ld::Atom {
45 public:
46 GOTEntryAtom(ld::Internal& internal, const ld::Atom* target, bool weakImport)
47 : ld::Atom(_s_section, ld::Atom::definitionRegular, ld::Atom::combineNever,
48 ld::Atom::scopeLinkageUnit, ld::Atom::typeNonLazyPointer,
49 symbolTableNotIn, false, false, false, ld::Atom::Alignment(3)),
50 _fixup(0, ld::Fixup::k1of1, ld::Fixup::kindStoreTargetAddressLittleEndian64, target),
51 _target(target)
52 { _fixup.weakImport = weakImport; internal.addAtom(*this); }
53
54 virtual const ld::File* file() const { return NULL; }
55 virtual bool translationUnitSource(const char** dir, const char**) const
56 { return false; }
57 virtual const char* name() const { return _target->name(); }
58 virtual uint64_t size() const { return 8; }
59 virtual uint64_t objectAddress() const { return 0; }
60 virtual void copyRawContent(uint8_t buffer[]) const { }
61 virtual void setScope(Scope) { }
62 virtual ld::Fixup::iterator fixupsBegin() const { return &_fixup; }
63 virtual ld::Fixup::iterator fixupsEnd() const { return &((ld::Fixup*)&_fixup)[1]; }
64
65 private:
66 mutable ld::Fixup _fixup;
67 const ld::Atom* _target;
68
69 static ld::Section _s_section;
70 };
71
72 ld::Section GOTEntryAtom::_s_section("__DATA", "__got", ld::Section::typeNonLazyPointer);
73
74
75 static bool gotFixup(const Options& opts, ld::Internal& internal, const ld::Atom* targetOfGOT, const ld::Fixup* fixup, bool* optimizable)
76 {
77 switch (fixup->kind) {
78 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
79 // start by assuming this can be optimized
80 *optimizable = true;
81 // cannot do LEA optimization if target is in another dylib
82 if ( targetOfGOT->definition() == ld::Atom::definitionProxy )
83 *optimizable = false;
84 // cannot do LEA optimization if target in __huge section
85 if ( internal.usingHugeSections && (targetOfGOT->size() > 1024*1024)
86 && ( (targetOfGOT->section().type() == ld::Section::typeZeroFill)
87 || (targetOfGOT->section().type() == ld::Section::typeTentativeDefs)) ) {
88 *optimizable = false;
89 }
90 if ( targetOfGOT->scope() == ld::Atom::scopeGlobal ) {
91 // cannot do LEA optimization if target is weak exported symbol
92 if ( (targetOfGOT->definition() == ld::Atom::definitionRegular) && (targetOfGOT->combine() == ld::Atom::combineByName) )
93 *optimizable = false;
94 // cannot do LEA optimization if target is interposable
95 if ( opts.interposable(targetOfGOT->name()) )
96 *optimizable = false;
97 // cannot do LEA optimization if target is resolver function
98 if ( targetOfGOT->contentType() == ld::Atom::typeResolver )
99 *optimizable = false;
100 // cannot do LEA optimization for flat-namespace
101 if ( opts.nameSpace() != Options::kTwoLevelNameSpace )
102 *optimizable = false;
103 }
104 return true;
105 case ld::Fixup::kindStoreX86PCRel32GOT:
106 *optimizable = false;
107 return true;
108 default:
109 break;
110 }
111
112 return false;
113 }
114
115 struct AtomByNameSorter
116 {
117 bool operator()(const ld::Atom* left, const ld::Atom* right)
118 {
119 return (strcmp(left->name(), right->name()) < 0);
120 }
121 };
122
123 void doPass(const Options& opts, ld::Internal& internal)
124 {
125 const bool log = false;
126
127 // only make got section in final linked images
128 if ( opts.outputKind() == Options::kObjectFile )
129 return;
130
131 // walk all atoms and fixups looking for stubable references
132 // don't create stubs inline because that could invalidate the sections walk
133 std::vector<const ld::Atom*> atomsReferencingGOT;
134 std::map<const ld::Atom*,ld::Atom*> gotMap;
135 std::map<const ld::Atom*,bool> weakImportMap;
136 atomsReferencingGOT.reserve(128);
137 for (std::vector<ld::Internal::FinalSection*>::iterator sit=internal.sections.begin(); sit != internal.sections.end(); ++sit) {
138 ld::Internal::FinalSection* sect = *sit;
139 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
140 const ld::Atom* atom = *ait;
141 bool atomUsesGOT = false;
142 const ld::Atom* targetOfGOT = NULL;
143 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
144 if ( fit->firstInCluster() )
145 targetOfGOT = NULL;
146 switch ( fit->binding ) {
147 case ld::Fixup::bindingsIndirectlyBound:
148 targetOfGOT = internal.indirectBindingTable[fit->u.bindingIndex];
149 break;
150 case ld::Fixup::bindingDirectlyBound:
151 targetOfGOT = fit->u.target;
152 break;
153 default:
154 break;
155 }
156 bool optimizable;
157 if ( !gotFixup(opts, internal, targetOfGOT, fit, &optimizable) )
158 continue;
159 if ( optimizable ) {
160 // change from load of GOT entry to lea of target
161 if ( log ) fprintf(stderr, "optimized GOT usage in %s to %s\n", atom->name(), targetOfGOT->name());
162 switch ( fit->binding ) {
163 case ld::Fixup::bindingsIndirectlyBound:
164 case ld::Fixup::bindingDirectlyBound:
165 fit->binding = ld::Fixup::bindingDirectlyBound;
166 fit->u.target = targetOfGOT;
167 fit->kind = ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoadNowLEA;
168 break;
169 default:
170 assert(0 && "unsupported GOT reference");
171 break;
172 }
173 }
174 else {
175 // remember that we need to use GOT in this function
176 if ( log ) fprintf(stderr, "found GOT use in %s to %s\n", atom->name(), targetOfGOT->name());
177 if ( !atomUsesGOT ) {
178 atomsReferencingGOT.push_back(atom);
179 atomUsesGOT = true;
180 }
181 gotMap[targetOfGOT] = NULL;
182 // record weak_import attribute
183 std::map<const ld::Atom*,bool>::iterator pos = weakImportMap.find(targetOfGOT);
184 if ( pos == weakImportMap.end() ) {
185 // target not in weakImportMap, so add
186 weakImportMap[targetOfGOT] = fit->weakImport;
187 // <rdar://problem/5529626> If only weak_import symbols are used, linker should use LD_LOAD_WEAK_DYLIB
188 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(targetOfGOT->file());
189 if ( dylib != NULL ) {
190 if ( fit->weakImport )
191 (const_cast<ld::dylib::File*>(dylib))->setUsingWeakImportedSymbols();
192 else
193 (const_cast<ld::dylib::File*>(dylib))->setUsingNonWeakImportedSymbols();
194 }
195 }
196 else {
197 // target in weakImportMap, check for weakness mismatch
198 if ( pos->second != fit->weakImport ) {
199 // found mismatch
200 switch ( opts.weakReferenceMismatchTreatment() ) {
201 case Options::kWeakReferenceMismatchError:
202 throwf("mismatching weak references for symbol: %s", targetOfGOT->name());
203 case Options::kWeakReferenceMismatchWeak:
204 pos->second = true;
205 break;
206 case Options::kWeakReferenceMismatchNonWeak:
207 pos->second = false;
208 break;
209 }
210 }
211 }
212 }
213 }
214 }
215 }
216
217 // make GOT entries
218 for (std::map<const ld::Atom*,ld::Atom*>::iterator it = gotMap.begin(); it != gotMap.end(); ++it) {
219 it->second = new GOTEntryAtom(internal, it->first, weakImportMap[it->first]);
220 }
221
222 // update atoms to use GOT entries
223 for (std::vector<const ld::Atom*>::iterator it=atomsReferencingGOT.begin(); it != atomsReferencingGOT.end(); ++it) {
224 const ld::Atom* atom = *it;
225 const ld::Atom* targetOfGOT = NULL;
226 ld::Fixup::iterator fitThatSetTarget = NULL;
227 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
228 if ( fit->firstInCluster() ) {
229 targetOfGOT = NULL;
230 fitThatSetTarget = NULL;
231 }
232 switch ( fit->binding ) {
233 case ld::Fixup::bindingsIndirectlyBound:
234 targetOfGOT = internal.indirectBindingTable[fit->u.bindingIndex];
235 fitThatSetTarget = fit;
236 break;
237 case ld::Fixup::bindingDirectlyBound:
238 targetOfGOT = fit->u.target;
239 fitThatSetTarget = fit;
240 break;
241 default:
242 break;
243 }
244 bool optimizable;
245 if ( (targetOfGOT == NULL) || !gotFixup(opts, internal, targetOfGOT, fit, &optimizable) )
246 continue;
247 if ( !optimizable ) {
248 // GOT use not optimized away, update to bind to GOT entry
249 assert(fitThatSetTarget != NULL);
250 switch ( fitThatSetTarget->binding ) {
251 case ld::Fixup::bindingsIndirectlyBound:
252 case ld::Fixup::bindingDirectlyBound:
253 fitThatSetTarget->binding = ld::Fixup::bindingDirectlyBound;
254 fitThatSetTarget->u.target = gotMap[targetOfGOT];
255 break;
256 default:
257 assert(0 && "unsupported GOT reference");
258 break;
259 }
260 }
261 }
262 }
263
264 // sort new atoms so links are consistent
265 for (std::vector<ld::Internal::FinalSection*>::iterator sit=internal.sections.begin(); sit != internal.sections.end(); ++sit) {
266 ld::Internal::FinalSection* sect = *sit;
267 if ( sect->type() == ld::Section::typeNonLazyPointer ) {
268 std::sort(sect->atoms.begin(), sect->atoms.end(), AtomByNameSorter());
269 }
270 }
271 }
272
273
274 } // namespace got
275 } // namespace passes
276 } // namespace ld