]> git.saurik.com Git - apple/ld64.git/blob - src/ld/passes/stubs/stubs.cpp
abed7e3b62f64bb49076115f5f4ff77df9becd27
[apple/ld64.git] / src / ld / passes / stubs / stubs.cpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25
26
27 #include <stdint.h>
28 #include <math.h>
29 #include <unistd.h>
30 #include <assert.h>
31 #include <libkern/OSByteOrder.h>
32
33 #include <vector>
34 #include <set>
35 #include <map>
36
37 #include "Options.h"
38 #include "MachOFileAbstraction.hpp"
39 #include "ld.hpp"
40
41 #include "make_stubs.h"
42
43
44 namespace ld {
45 namespace passes {
46 namespace stubs {
47
48 class Pass {
49 public:
50 Pass(const Options& opts);
51 void process(ld::Internal& internal);
52 void addAtom(const ld::Atom& atom) { _internal->addAtom(atom); }
53 bool usingCompressedLINKEDIT() const { return _compressedLINKEDIT; }
54 ld::Internal* internal() { return _internal; }
55
56 Atom* compressedHelperHelper;
57 Atom* compressedImageCache;
58 Atom* compressedFastBinderPointer;
59
60 private:
61
62 struct AtomByNameSorter
63 {
64 bool operator()(const ld::Atom* left, const ld::Atom* right)
65 {
66 return (strcmp(left->name(), right->name()) < 0);
67 }
68 };
69
70 const ld::Atom* stubableFixup(const ld::Fixup* fixup, ld::Internal&);
71 ld::Atom* makeStub(const ld::Atom& target, bool weakImport);
72 void verifyNoResolverFunctions(ld::Internal& state);
73
74 const Options& _options;
75 const cpu_type_t _architecture;
76 const bool _lazyDylibsInUuse;
77 const bool _compressedLINKEDIT;
78 const bool _prebind;
79 const bool _mightBeInSharedRegion;
80 const bool _pic;
81 const bool _flatNamespace;
82 ld::Internal* _internal;
83 uint32_t _stubCount;
84 bool _largeText;
85 };
86
87 #include "stub_x86_64.hpp"
88 #include "stub_x86_64_classic.hpp"
89 #include "stub_x86.hpp"
90 #include "stub_x86_classic.hpp"
91 #include "stub_arm.hpp"
92 #include "stub_arm_classic.hpp"
93 #if SUPPORT_ARCH_arm64
94 #include "stub_arm64.hpp"
95 #endif
96
97 Pass::Pass(const Options& opts)
98 : compressedHelperHelper(NULL),
99 compressedImageCache(NULL),
100 compressedFastBinderPointer(NULL),
101 _options(opts),
102 _architecture(opts.architecture()),
103 _lazyDylibsInUuse(opts.usingLazyDylibLinking()),
104 _compressedLINKEDIT(opts.makeCompressedDyldInfo()),
105 _prebind(opts.prebind()),
106 _mightBeInSharedRegion(opts.sharedRegionEligible()),
107 _pic(opts.outputSlidable()),
108 _flatNamespace(opts.nameSpace() != Options::kTwoLevelNameSpace),
109 _internal(NULL), _stubCount(0), _largeText(false)
110 {
111
112 }
113
114
115 const ld::Atom* Pass::stubableFixup(const ld::Fixup* fixup, ld::Internal& state)
116 {
117 if ( fixup->binding == ld::Fixup::bindingsIndirectlyBound ) {
118 const ld::Atom* target = state.indirectBindingTable[fixup->u.bindingIndex];
119 switch ( fixup->kind ) {
120 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
121 case ld::Fixup::kindStoreTargetAddressARMBranch24:
122 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
123 #if SUPPORT_ARCH_arm64
124 case ld::Fixup::kindStoreTargetAddressARM64Branch26:
125 #endif
126 assert(target != NULL);
127 // create stub if target is in a dylib
128 if ( target->definition() == ld::Atom::definitionProxy )
129 return target;
130 // use stub if target is a resolver function in same linkage unit
131 if ( target->contentType() == ld::Atom::typeResolver )
132 return target;
133 if ( target->scope() == ld::Atom::scopeGlobal ) {
134 // create stub if target is global weak definition in symbol table
135 if ( (target->definition() == ld::Atom::definitionRegular)
136 && (target->combine() == ld::Atom::combineByName)
137 && ((target->symbolTableInclusion() == ld::Atom::symbolTableIn)
138 || (target->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)) ) {
139 // don't make stubs for auto-hide symbols
140 if ( target->autoHide() && (!_options.hasExportMaskList() || !_options.shouldExport(target->name())) )
141 return NULL;
142 return target;
143 }
144 // create stub if target is interposable
145 if ( _options.interposable(target->name()) )
146 return target;
147 if ( _flatNamespace ) {
148 // flat namespace does not indirect calls within main exectuables
149 if ( _options.outputKind() == Options::kDynamicExecutable )
150 return NULL;
151 // create stub if target is global and building -flat dylib or bundle
152 return target;
153 }
154 }
155 break;
156 default:
157 if ( target->contentType() == ld::Atom::typeResolver ) {
158 // any pointer to a resolver needs to change to pointer to stub
159 return target;
160 }
161 break;
162 }
163 }
164 return NULL;
165 }
166
167
168
169 ld::Atom* Pass::makeStub(const ld::Atom& target, bool weakImport)
170 {
171 //fprintf(stderr, "makeStub(target=%p %s in sect %s, def=%d)\n", &target, target.name(), target.section().sectionName(), target.definition());
172 bool stubToGlobalWeakDef = ( (target.combine() == ld::Atom::combineByName) &&
173 (((target.definition() == ld::Atom::definitionRegular) && (target.scope() == ld::Atom::scopeGlobal))
174 || (target.definition() == ld::Atom::definitionProxy)) );
175
176 bool forLazyDylib = false;
177 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target.file());
178 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
179 forLazyDylib = true;
180 bool stubToResolver = (target.contentType() == ld::Atom::typeResolver);
181 #if SUPPORT_ARCH_arm_any || SUPPORT_ARCH_arm64
182 bool usingDataConst = _options.useDataConstSegment();
183 #endif
184
185 if ( usingCompressedLINKEDIT() && !forLazyDylib ) {
186 if ( _internal->compressedFastBinderProxy == NULL )
187 throwf("symbol dyld_stub_binder not found (normally in libSystem.dylib). Needed to perform lazy binding to function %s", target.name());
188 }
189
190 switch ( _architecture ) {
191 #if SUPPORT_ARCH_i386
192 case CPU_TYPE_I386:
193 if ( usingCompressedLINKEDIT() && !forLazyDylib )
194 return new ld::passes::stubs::x86::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
195 else
196 return new ld::passes::stubs::x86::classic::StubAtom(*this, target, forLazyDylib, weakImport);
197 break;
198 #endif
199 #if SUPPORT_ARCH_x86_64
200 case CPU_TYPE_X86_64:
201 if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() )
202 return new ld::passes::stubs::x86_64::KextStubAtom(*this, target);
203 else if ( usingCompressedLINKEDIT() && !forLazyDylib )
204 return new ld::passes::stubs::x86_64::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
205 else
206 return new ld::passes::stubs::x86_64::classic::StubAtom(*this, target, forLazyDylib, weakImport);
207 break;
208 #endif
209 #if SUPPORT_ARCH_arm_any
210 case CPU_TYPE_ARM:
211 if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() ) {
212 // if text relocs are not allows in kext bundles, then linker must create a stub
213 return new ld::passes::stubs::arm::StubPICKextAtom(*this, target);
214 }
215 else if ( usingCompressedLINKEDIT() && !forLazyDylib ) {
216 if ( (_stubCount < 900) && !_mightBeInSharedRegion && !_largeText )
217 return new ld::passes::stubs::arm::StubCloseAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
218 else if ( _pic )
219 return new ld::passes::stubs::arm::StubPICAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport, usingDataConst);
220 else
221 return new ld::passes::stubs::arm::StubNoPICAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport);
222 }
223 else {
224 if ( _pic )
225 return new ld::passes::stubs::arm::classic::StubPICAtom(*this, target, forLazyDylib, weakImport);
226 else
227 return new ld::passes::stubs::arm::classic::StubNoPICAtom(*this, target, forLazyDylib, weakImport);
228 }
229 break;
230 #endif
231 #if SUPPORT_ARCH_arm64
232 case CPU_TYPE_ARM64:
233 if ( (_options.outputKind() == Options::kKextBundle) && _options.kextsUseStubs() )
234 return new ld::passes::stubs::arm64::KextStubAtom(*this, target);
235 else
236 return new ld::passes::stubs::arm64::StubAtom(*this, target, stubToGlobalWeakDef, stubToResolver, weakImport, usingDataConst);
237 break;
238 #endif
239 }
240 throw "unsupported arch for stub";
241 }
242
243
244 void Pass::verifyNoResolverFunctions(ld::Internal& state)
245 {
246 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
247 ld::Internal::FinalSection* sect = *sit;
248 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
249 const ld::Atom* atom = *ait;
250 if ( atom->contentType() == ld::Atom::typeResolver )
251 throwf("resolver function '%s' not supported in type of output", atom->name());
252 }
253 }
254 }
255
256 void Pass::process(ld::Internal& state)
257 {
258 switch ( _options.outputKind() ) {
259 case Options::kObjectFile:
260 // these kinds don't use stubs and can have resolver functions
261 return;
262 case Options::kStaticExecutable:
263 case Options::kPreload:
264 case Options::kDyld:
265 // these kinds don't use stubs and cannot have resolver functions
266 verifyNoResolverFunctions(state);
267 return;
268 case Options::kDynamicLibrary:
269 // uses stubs and can have resolver functions
270 break;
271 case Options::kKextBundle:
272 verifyNoResolverFunctions(state);
273 // if kext don't use stubs, don't do this pass
274 if ( !_options.kextsUseStubs() )
275 return;
276 break;
277 case Options::kDynamicExecutable:
278 case Options::kDynamicBundle:
279 // these kinds do use stubs and cannot have resolver functions
280 verifyNoResolverFunctions(state);
281 break;
282 }
283
284 // walk all atoms and fixups looking for stubable references
285 // don't create stubs inline because that could invalidate the sections iterator
286 std::vector<const ld::Atom*> atomsCallingStubs;
287 std::map<const ld::Atom*,ld::Atom*> stubFor;
288 std::map<const ld::Atom*,bool> weakImportMap;
289 atomsCallingStubs.reserve(128);
290 uint64_t codeSize = 0;
291 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
292 ld::Internal::FinalSection* sect = *sit;
293 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
294 const ld::Atom* atom = *ait;
295 codeSize += atom->size();
296 bool atomNeedsStub = false;
297 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
298 const ld::Atom* stubableTargetOfFixup = stubableFixup(fit, state);
299 if ( stubableTargetOfFixup != NULL ) {
300 if ( !atomNeedsStub ) {
301 atomsCallingStubs.push_back(atom);
302 atomNeedsStub = true;
303 }
304 stubFor[stubableTargetOfFixup] = NULL;
305 // record weak_import attribute
306 std::map<const ld::Atom*,bool>::iterator pos = weakImportMap.find(stubableTargetOfFixup);
307 if ( pos == weakImportMap.end() ) {
308 // target not in weakImportMap, so add
309 weakImportMap[stubableTargetOfFixup] = fit->weakImport;
310 }
311 else {
312 // target in weakImportMap, check for weakness mismatch
313 if ( pos->second != fit->weakImport ) {
314 // found mismatch
315 switch ( _options.weakReferenceMismatchTreatment() ) {
316 case Options::kWeakReferenceMismatchError:
317 throwf("mismatching weak references for symbol: %s", stubableTargetOfFixup->name());
318 case Options::kWeakReferenceMismatchWeak:
319 pos->second = true;
320 break;
321 case Options::kWeakReferenceMismatchNonWeak:
322 pos->second = false;
323 break;
324 }
325 }
326 }
327 }
328 }
329 // all resolver functions must have a corresponding stub
330 if ( atom->contentType() == ld::Atom::typeResolver ) {
331 if ( _options.outputKind() != Options::kDynamicLibrary )
332 throwf("resolver functions (%s) can only be used in dylibs", atom->name());
333 if ( !_options.makeCompressedDyldInfo() ) {
334 if ( _options.architecture() == CPU_TYPE_ARM )
335 throwf("resolver functions (%s) can only be used when targeting iOS 4.2 or later", atom->name());
336 else
337 throwf("resolver functions (%s) can only be used when targeting Mac OS X 10.6 or later", atom->name());
338 }
339 stubFor[atom] = NULL;
340 }
341 }
342 }
343
344 const bool needStubForMain = _options.needsEntryPointLoadCommand()
345 && (state.entryPoint != NULL)
346 && (state.entryPoint->definition() == ld::Atom::definitionProxy);
347 if ( needStubForMain ) {
348 // _main not found in any .o files. Currently have proxy to dylib
349 // Add to map, so that a stub will be made
350 stubFor[state.entryPoint] = NULL;
351 }
352
353 // short circuit if no stubs needed
354 _internal = &state;
355 _stubCount = stubFor.size();
356 if ( _stubCount == 0 )
357 return;
358
359 // <rdar://problem/8553283> lazily check for helper
360 if ( !_options.makeCompressedDyldInfo() && (state.classicBindingHelper == NULL) && (_options.outputKind() != Options::kKextBundle) )
361 throw "symbol dyld_stub_binding_helper not found, normally in crt1.o/dylib1.o/bundle1.o";
362
363 // disable arm close stubs in some cases
364 if ( _architecture == CPU_TYPE_ARM ) {
365 if ( codeSize > 4*1024*1024 )
366 _largeText = true;
367 else {
368 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
369 ld::Internal::FinalSection* sect = *sit;
370 if ( sect->type() == ld::Section::typeMachHeader )
371 continue;
372 if ( strcmp(sect->segmentName(), "__TEXT") == 0) {
373 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
374 const ld::Atom* atom = *ait;
375 if ( atom->alignment().powerOf2 > 10 ) {
376 // overaligned section means might not be able to keep closestubs sect pushed to end of __TEXT
377 //warning("alignment 1<<%d in atom %s in section %s disables close stubs optimization",
378 // atom->alignment().powerOf2, atom->name(), sect->segmentName());
379 _largeText = true;
380 break;
381 }
382 }
383 }
384 }
385 }
386 }
387
388 // make stub atoms
389 for (std::map<const ld::Atom*,ld::Atom*>::iterator it = stubFor.begin(); it != stubFor.end(); ++it) {
390 it->second = makeStub(*it->first, weakImportMap[it->first]);
391 }
392
393 // updated atoms to use stubs
394 for (std::vector<const ld::Atom*>::iterator it=atomsCallingStubs.begin(); it != atomsCallingStubs.end(); ++it) {
395 const ld::Atom* atom = *it;
396 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
397 const ld::Atom* stubableTargetOfFixup = stubableFixup(fit, state);
398 if ( stubableTargetOfFixup != NULL ) {
399 ld::Atom* stub = stubFor[stubableTargetOfFixup];
400 assert(stub != NULL && "stub not created");
401 fit->binding = ld::Fixup::bindingDirectlyBound;
402 fit->u.target = stub;
403 }
404 }
405 }
406
407 // switch entry point from proxy to stub
408 if ( needStubForMain ) {
409 const ld::Atom* mainStub = stubFor[state.entryPoint];
410 assert(mainStub != NULL);
411 state.entryPoint = mainStub;
412 }
413
414 // sort new atoms so links are consistent
415 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
416 ld::Internal::FinalSection* sect = *sit;
417 switch ( sect->type() ) {
418 case ld::Section::typeStubHelper:
419 case ld::Section::typeStub:
420 case ld::Section::typeStubClose:
421 case ld::Section::typeLazyPointer:
422 case ld::Section::typeLazyPointerClose:
423 std::sort(sect->atoms.begin(), sect->atoms.end(), AtomByNameSorter());
424 break;
425 default:
426 break;
427 }
428 }
429
430 }
431
432
433 void doPass(const Options& opts, ld::Internal& internal)
434 {
435 Pass pass(opts);
436 pass.process(internal);
437 }
438
439
440
441 } // namespace stubs
442 } // namespace passes
443 } // namespace ld
444