1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
5 * @APPLE_LICENSE_HEADER_START@
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
22 * @APPLE_LICENSE_HEADER_END@
31 #include <libkern/OSByteOrder.h>
38 #include "MachOFileAbstraction.hpp"
41 #include "make_stubs.h"
50 Pass(const Options
& opts
);
51 void process(ld::Internal
& internal
);
52 void addAtom(const ld::Atom
& atom
) { _internal
->addAtom(atom
); }
53 bool usingCompressedLINKEDIT() const { return _compressedLINKEDIT
; }
54 ld::Internal
* internal() { return _internal
; }
56 Atom
* compressedHelperHelper
;
57 Atom
* compressedImageCache
;
58 Atom
* compressedFastBinderPointer
;
62 struct AtomByNameSorter
64 bool operator()(const ld::Atom
* left
, const ld::Atom
* right
)
66 return (strcmp(left
->name(), right
->name()) < 0);
70 const ld::Atom
* stubableFixup(const ld::Fixup
* fixup
, ld::Internal
&);
71 ld::Atom
* makeStub(const ld::Atom
& target
, bool weakImport
);
72 void verifyNoResolverFunctions(ld::Internal
& state
);
74 const Options
& _options
;
75 const cpu_type_t _architecture
;
76 const bool _lazyDylibsInUuse
;
77 const bool _compressedLINKEDIT
;
79 const bool _mightBeInSharedRegion
;
81 const bool _flatNamespace
;
82 ld::Internal
* _internal
;
87 #include "stub_x86_64.hpp"
88 #include "stub_x86_64_classic.hpp"
89 #include "stub_x86.hpp"
90 #include "stub_x86_classic.hpp"
91 #include "stub_arm.hpp"
92 #include "stub_arm_classic.hpp"
93 #if SUPPORT_ARCH_arm64
94 #include "stub_arm64.hpp"
97 Pass::Pass(const Options
& opts
)
98 : compressedHelperHelper(NULL
),
99 compressedImageCache(NULL
),
100 compressedFastBinderPointer(NULL
),
102 _architecture(opts
.architecture()),
103 _lazyDylibsInUuse(opts
.usingLazyDylibLinking()),
104 _compressedLINKEDIT(opts
.makeCompressedDyldInfo()),
105 _prebind(opts
.prebind()),
106 _mightBeInSharedRegion(opts
.sharedRegionEligible()),
107 _pic(opts
.outputSlidable()),
108 _flatNamespace(opts
.nameSpace() != Options::kTwoLevelNameSpace
),
109 _internal(NULL
), _stubCount(0), _largeText(false)
115 const ld::Atom
* Pass::stubableFixup(const ld::Fixup
* fixup
, ld::Internal
& state
)
117 if ( fixup
->binding
== ld::Fixup::bindingsIndirectlyBound
) {
118 const ld::Atom
* target
= state
.indirectBindingTable
[fixup
->u
.bindingIndex
];
119 switch ( fixup
->kind
) {
120 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32
:
121 case ld::Fixup::kindStoreTargetAddressARMBranch24
:
122 case ld::Fixup::kindStoreTargetAddressThumbBranch22
:
123 #if SUPPORT_ARCH_arm64
124 case ld::Fixup::kindStoreTargetAddressARM64Branch26
:
126 assert(target
!= NULL
);
127 // create stub if target is in a dylib
128 if ( target
->definition() == ld::Atom::definitionProxy
)
130 // use stub if target is a resolver function in same linkage unit
131 if ( target
->contentType() == ld::Atom::typeResolver
)
133 if ( target
->scope() == ld::Atom::scopeGlobal
) {
134 // create stub if target is global weak definition in symbol table
135 if ( (target
->definition() == ld::Atom::definitionRegular
)
136 && (target
->combine() == ld::Atom::combineByName
)
137 && ((target
->symbolTableInclusion() == ld::Atom::symbolTableIn
)
138 || (target
->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip
)) ) {
139 // don't make stubs for auto-hide symbols
140 if ( target
->autoHide() && (!_options
.hasExportMaskList() || !_options
.shouldExport(target
->name())) )
144 // create stub if target is interposable
145 if ( _options
.interposable(target
->name()) )
147 if ( _flatNamespace
) {
148 // flat namespace does not indirect calls within main exectuables
149 if ( _options
.outputKind() == Options::kDynamicExecutable
)
151 // create stub if target is global and building -flat dylib or bundle
157 if ( target
->contentType() == ld::Atom::typeResolver
) {
158 // any pointer to a resolver needs to change to pointer to stub
169 ld::Atom
* Pass::makeStub(const ld::Atom
& target
, bool weakImport
)
171 //fprintf(stderr, "makeStub(target=%p %s in sect %s, def=%d)\n", &target, target.name(), target.section().sectionName(), target.definition());
172 bool stubToGlobalWeakDef
= ( (target
.combine() == ld::Atom::combineByName
) &&
173 (((target
.definition() == ld::Atom::definitionRegular
) && (target
.scope() == ld::Atom::scopeGlobal
))
174 || (target
.definition() == ld::Atom::definitionProxy
)) );
176 bool forLazyDylib
= false;
177 const ld::dylib::File
* dylib
= dynamic_cast<const ld::dylib::File
*>(target
.file());
178 if ( (dylib
!= NULL
) && dylib
->willBeLazyLoadedDylib() )
180 bool stubToResolver
= (target
.contentType() == ld::Atom::typeResolver
);
181 #if SUPPORT_ARCH_arm_any || SUPPORT_ARCH_arm64
182 bool usingDataConst
= _options
.useDataConstSegment();
185 if ( usingCompressedLINKEDIT() && !forLazyDylib
) {
186 if ( _internal
->compressedFastBinderProxy
== NULL
)
187 throwf("symbol dyld_stub_binder not found (normally in libSystem.dylib). Needed to perform lazy binding to function %s", target
.name());
190 switch ( _architecture
) {
191 #if SUPPORT_ARCH_i386
193 if ( usingCompressedLINKEDIT() && !forLazyDylib
)
194 return new ld::passes::stubs::x86::StubAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
);
196 return new ld::passes::stubs::x86::classic::StubAtom(*this, target
, forLazyDylib
, weakImport
);
199 #if SUPPORT_ARCH_x86_64
200 case CPU_TYPE_X86_64
:
201 if ( (_options
.outputKind() == Options::kKextBundle
) && _options
.kextsUseStubs() )
202 return new ld::passes::stubs::x86_64::KextStubAtom(*this, target
);
203 else if ( usingCompressedLINKEDIT() && !forLazyDylib
)
204 return new ld::passes::stubs::x86_64::StubAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
);
206 return new ld::passes::stubs::x86_64::classic::StubAtom(*this, target
, forLazyDylib
, weakImport
);
209 #if SUPPORT_ARCH_arm_any
211 if ( (_options
.outputKind() == Options::kKextBundle
) && _options
.kextsUseStubs() ) {
212 // if text relocs are not allows in kext bundles, then linker must create a stub
213 return new ld::passes::stubs::arm::StubPICKextAtom(*this, target
);
215 else if ( usingCompressedLINKEDIT() && !forLazyDylib
) {
216 if ( (_stubCount
< 900) && !_mightBeInSharedRegion
&& !_largeText
&& !_options
.makeEncryptable() )
217 return new ld::passes::stubs::arm::StubCloseAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
);
219 return new ld::passes::stubs::arm::StubPICAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
, usingDataConst
);
221 return new ld::passes::stubs::arm::StubNoPICAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
);
225 return new ld::passes::stubs::arm::classic::StubPICAtom(*this, target
, forLazyDylib
, weakImport
);
227 return new ld::passes::stubs::arm::classic::StubNoPICAtom(*this, target
, forLazyDylib
, weakImport
);
231 #if SUPPORT_ARCH_arm64
233 if ( (_options
.outputKind() == Options::kKextBundle
) && _options
.kextsUseStubs() )
234 return new ld::passes::stubs::arm64::KextStubAtom(*this, target
);
236 return new ld::passes::stubs::arm64::StubAtom(*this, target
, stubToGlobalWeakDef
, stubToResolver
, weakImport
, usingDataConst
);
240 throw "unsupported arch for stub";
244 void Pass::verifyNoResolverFunctions(ld::Internal
& state
)
246 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=state
.sections
.begin(); sit
!= state
.sections
.end(); ++sit
) {
247 ld::Internal::FinalSection
* sect
= *sit
;
248 for (std::vector
<const ld::Atom
*>::iterator ait
=sect
->atoms
.begin(); ait
!= sect
->atoms
.end(); ++ait
) {
249 const ld::Atom
* atom
= *ait
;
250 if ( atom
->contentType() == ld::Atom::typeResolver
)
251 throwf("resolver function '%s' not supported in type of output", atom
->name());
256 void Pass::process(ld::Internal
& state
)
258 switch ( _options
.outputKind() ) {
259 case Options::kObjectFile
:
260 // these kinds don't use stubs and can have resolver functions
262 case Options::kStaticExecutable
:
263 case Options::kPreload
:
265 // these kinds don't use stubs and cannot have resolver functions
266 verifyNoResolverFunctions(state
);
268 case Options::kDynamicLibrary
:
269 // uses stubs and can have resolver functions
271 case Options::kKextBundle
:
272 verifyNoResolverFunctions(state
);
273 // if kext don't use stubs, don't do this pass
274 if ( !_options
.kextsUseStubs() )
277 case Options::kDynamicExecutable
:
278 case Options::kDynamicBundle
:
279 // these kinds do use stubs and cannot have resolver functions
280 verifyNoResolverFunctions(state
);
284 // walk all atoms and fixups looking for stubable references
285 // don't create stubs inline because that could invalidate the sections iterator
286 std::vector
<const ld::Atom
*> atomsCallingStubs
;
287 std::map
<const ld::Atom
*,ld::Atom
*> stubFor
;
288 std::map
<const ld::Atom
*,bool> weakImportMap
;
289 atomsCallingStubs
.reserve(128);
290 uint64_t codeSize
= 0;
291 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=state
.sections
.begin(); sit
!= state
.sections
.end(); ++sit
) {
292 ld::Internal::FinalSection
* sect
= *sit
;
293 for (std::vector
<const ld::Atom
*>::iterator ait
=sect
->atoms
.begin(); ait
!= sect
->atoms
.end(); ++ait
) {
294 const ld::Atom
* atom
= *ait
;
295 codeSize
+= atom
->size();
296 bool atomNeedsStub
= false;
297 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
298 const ld::Atom
* stubableTargetOfFixup
= stubableFixup(fit
, state
);
299 if ( stubableTargetOfFixup
!= NULL
) {
300 if ( !atomNeedsStub
) {
301 atomsCallingStubs
.push_back(atom
);
302 atomNeedsStub
= true;
304 stubFor
[stubableTargetOfFixup
] = NULL
;
305 // record weak_import attribute
306 std::map
<const ld::Atom
*,bool>::iterator pos
= weakImportMap
.find(stubableTargetOfFixup
);
307 if ( pos
== weakImportMap
.end() ) {
308 // target not in weakImportMap, so add
309 weakImportMap
[stubableTargetOfFixup
] = fit
->weakImport
;
312 // target in weakImportMap, check for weakness mismatch
313 if ( pos
->second
!= fit
->weakImport
) {
315 switch ( _options
.weakReferenceMismatchTreatment() ) {
316 case Options::kWeakReferenceMismatchError
:
317 throwf("mismatching weak references for symbol: %s", stubableTargetOfFixup
->name());
318 case Options::kWeakReferenceMismatchWeak
:
321 case Options::kWeakReferenceMismatchNonWeak
:
329 // all resolver functions must have a corresponding stub
330 if ( atom
->contentType() == ld::Atom::typeResolver
) {
331 if ( _options
.outputKind() != Options::kDynamicLibrary
)
332 throwf("resolver functions (%s) can only be used in dylibs", atom
->name());
333 if ( !_options
.makeCompressedDyldInfo() ) {
334 if ( _options
.architecture() == CPU_TYPE_ARM
)
335 throwf("resolver functions (%s) can only be used when targeting iOS 4.2 or later", atom
->name());
337 throwf("resolver functions (%s) can only be used when targeting Mac OS X 10.6 or later", atom
->name());
339 stubFor
[atom
] = NULL
;
344 const bool needStubForMain
= _options
.needsEntryPointLoadCommand()
345 && (state
.entryPoint
!= NULL
)
346 && (state
.entryPoint
->definition() == ld::Atom::definitionProxy
);
347 if ( needStubForMain
) {
348 // _main not found in any .o files. Currently have proxy to dylib
349 // Add to map, so that a stub will be made
350 stubFor
[state
.entryPoint
] = NULL
;
353 // short circuit if no stubs needed
355 _stubCount
= stubFor
.size();
356 if ( _stubCount
== 0 )
359 // <rdar://problem/8553283> lazily check for helper
360 if ( !_options
.makeCompressedDyldInfo() && (state
.classicBindingHelper
== NULL
) && (_options
.outputKind() != Options::kKextBundle
) )
361 throw "symbol dyld_stub_binding_helper not found, normally in crt1.o/dylib1.o/bundle1.o";
363 // disable arm close stubs in some cases
364 if ( _architecture
== CPU_TYPE_ARM
) {
365 if ( codeSize
> 4*1024*1024 )
368 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=state
.sections
.begin(); sit
!= state
.sections
.end(); ++sit
) {
369 ld::Internal::FinalSection
* sect
= *sit
;
370 if ( sect
->type() == ld::Section::typeMachHeader
)
372 if ( strcmp(sect
->segmentName(), "__TEXT") == 0) {
373 for (std::vector
<const ld::Atom
*>::iterator ait
=sect
->atoms
.begin(); ait
!= sect
->atoms
.end(); ++ait
) {
374 const ld::Atom
* atom
= *ait
;
375 if ( atom
->alignment().powerOf2
> 10 ) {
376 // overaligned section means might not be able to keep closestubs sect pushed to end of __TEXT
377 //warning("alignment 1<<%d in atom %s in section %s disables close stubs optimization",
378 // atom->alignment().powerOf2, atom->name(), sect->segmentName());
389 for (std::map
<const ld::Atom
*,ld::Atom
*>::iterator it
= stubFor
.begin(); it
!= stubFor
.end(); ++it
) {
390 it
->second
= makeStub(*it
->first
, weakImportMap
[it
->first
]);
393 // updated atoms to use stubs
394 for (std::vector
<const ld::Atom
*>::iterator it
=atomsCallingStubs
.begin(); it
!= atomsCallingStubs
.end(); ++it
) {
395 const ld::Atom
* atom
= *it
;
396 for (ld::Fixup::iterator fit
= atom
->fixupsBegin(), end
=atom
->fixupsEnd(); fit
!= end
; ++fit
) {
397 const ld::Atom
* stubableTargetOfFixup
= stubableFixup(fit
, state
);
398 if ( stubableTargetOfFixup
!= NULL
) {
399 ld::Atom
* stub
= stubFor
[stubableTargetOfFixup
];
400 assert(stub
!= NULL
&& "stub not created");
401 fit
->binding
= ld::Fixup::bindingDirectlyBound
;
402 fit
->u
.target
= stub
;
407 // switch entry point from proxy to stub
408 if ( needStubForMain
) {
409 const ld::Atom
* mainStub
= stubFor
[state
.entryPoint
];
410 assert(mainStub
!= NULL
);
411 state
.entryPoint
= mainStub
;
414 // sort new atoms so links are consistent
415 for (std::vector
<ld::Internal::FinalSection
*>::iterator sit
=state
.sections
.begin(); sit
!= state
.sections
.end(); ++sit
) {
416 ld::Internal::FinalSection
* sect
= *sit
;
417 switch ( sect
->type() ) {
418 case ld::Section::typeStubHelper
:
419 case ld::Section::typeStub
:
420 case ld::Section::typeStubClose
:
421 case ld::Section::typeLazyPointer
:
422 case ld::Section::typeLazyPointerClose
:
423 std::sort(sect
->atoms
.begin(), sect
->atoms
.end(), AtomByNameSorter());
433 void doPass(const Options
& opts
, ld::Internal
& internal
)
436 pass
.process(internal
);
442 } // namespace passes