]> git.saurik.com Git - apple/ld64.git/blob - src/ld/LinkEditClassic.hpp
ld64-302.3.tar.gz
[apple/ld64.git] / src / ld / LinkEditClassic.hpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-*
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #ifndef __LINKEDIT_CLASSIC_HPP__
26 #define __LINKEDIT_CLASSIC_HPP__
27
28 #include <stdlib.h>
29 #include <sys/types.h>
30 #include <errno.h>
31 #include <limits.h>
32 #include <unistd.h>
33
34 #include <vector>
35 #include <unordered_map>
36
37 #include "Options.h"
38 #include "ld.hpp"
39 #include "Architectures.hpp"
40 #include "MachOFileAbstraction.hpp"
41
42 namespace ld {
43 namespace tool {
44
45
46
47 class ClassicLinkEditAtom : public ld::Atom
48 {
49 public:
50
51 // overrides of ld::Atom
52 virtual ld::File* file() const { return NULL; }
53 virtual uint64_t objectAddress() const { return 0; }
54
55 virtual void encode() = 0;
56 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe) { return false; }
57
58 ClassicLinkEditAtom(const Options& opts, ld::Internal& state,
59 OutputFile& writer, const ld::Section& sect,
60 unsigned int pointerSize)
61 : ld::Atom(sect, ld::Atom::definitionRegular,
62 ld::Atom::combineNever, ld::Atom::scopeTranslationUnit,
63 ld::Atom::typeUnclassified, ld::Atom::symbolTableNotIn,
64 false, false, false, ld::Atom::Alignment(log2(pointerSize))),
65 _options(opts), _state(state), _writer(writer) { }
66 protected:
67 const Options& _options;
68 ld::Internal& _state;
69 OutputFile& _writer;
70 };
71
72
73
74 class StringPoolAtom : public ClassicLinkEditAtom
75 {
76 public:
77 StringPoolAtom(const Options& opts, ld::Internal& state,
78 OutputFile& writer, int pointerSize);
79
80 // overrides of ld::Atom
81 virtual const char* name() const { return "string pool"; }
82 virtual uint64_t size() const;
83 virtual void copyRawContent(uint8_t buffer[]) const;
84 // overrides of ClassicLinkEditAtom
85 virtual void encode() { }
86
87 int32_t add(const char* name);
88 int32_t addUnique(const char* name);
89 int32_t emptyString() { return 1; }
90 const char* stringForIndex(int32_t) const;
91 uint32_t currentOffset();
92
93 private:
94 enum { kBufferSize = 0x01000000 };
95 typedef std::unordered_map<const char*, int32_t, CStringHash, CStringEquals> StringToOffset;
96
97 const uint32_t _pointerSize;
98 std::vector<char*> _fullBuffers;
99 char* _currentBuffer;
100 uint32_t _currentBufferUsed;
101 StringToOffset _uniqueStrings;
102
103 static ld::Section _s_section;
104 };
105
106 ld::Section StringPoolAtom::_s_section("__LINKEDIT", "__string_pool", ld::Section::typeLinkEdit, true);
107
108
109 StringPoolAtom::StringPoolAtom(const Options& opts, ld::Internal& state, OutputFile& writer, int pointerSize)
110 : ClassicLinkEditAtom(opts, state, writer, _s_section, pointerSize),
111 _pointerSize(pointerSize), _currentBuffer(NULL), _currentBufferUsed(0)
112 {
113 _currentBuffer = new char[kBufferSize];
114 // burn first byte of string pool (so zero is never a valid string offset)
115 _currentBuffer[_currentBufferUsed++] = ' ';
116 // make offset 1 always point to an empty string
117 _currentBuffer[_currentBufferUsed++] = '\0';
118 }
119
120 uint64_t StringPoolAtom::size() const
121 {
122 // pointer size align size
123 return (kBufferSize * _fullBuffers.size() + _currentBufferUsed + _pointerSize-1) & (-_pointerSize);
124 }
125
126 void StringPoolAtom::copyRawContent(uint8_t buffer[]) const
127 {
128 uint64_t offset = 0;
129 for (unsigned int i=0; i < _fullBuffers.size(); ++i) {
130 memcpy(&buffer[offset], _fullBuffers[i], kBufferSize);
131 offset += kBufferSize;
132 }
133 memcpy(&buffer[offset], _currentBuffer, _currentBufferUsed);
134 // zero fill end to align
135 offset += _currentBufferUsed;
136 while ( (offset % _pointerSize) != 0 )
137 buffer[offset++] = 0;
138 }
139
140 int32_t StringPoolAtom::add(const char* str)
141 {
142 int32_t offset = kBufferSize * _fullBuffers.size() + _currentBufferUsed;
143 int lenNeeded = strlcpy(&_currentBuffer[_currentBufferUsed], str, kBufferSize-_currentBufferUsed)+1;
144 if ( (_currentBufferUsed+lenNeeded) < kBufferSize ) {
145 _currentBufferUsed += lenNeeded;
146 }
147 else {
148 int copied = kBufferSize-_currentBufferUsed-1;
149 // change trailing '\0' that strlcpy added to real char
150 _currentBuffer[kBufferSize-1] = str[copied];
151 // alloc next buffer
152 _fullBuffers.push_back(_currentBuffer);
153 _currentBuffer = new char[kBufferSize];
154 _currentBufferUsed = 0;
155 // append rest of string
156 this->add(&str[copied+1]);
157 }
158 return offset;
159 }
160
161 uint32_t StringPoolAtom::currentOffset()
162 {
163 return kBufferSize * _fullBuffers.size() + _currentBufferUsed;
164 }
165
166
167 int32_t StringPoolAtom::addUnique(const char* str)
168 {
169 StringToOffset::iterator pos = _uniqueStrings.find(str);
170 if ( pos != _uniqueStrings.end() ) {
171 return pos->second;
172 }
173 else {
174 int32_t offset = this->add(str);
175 _uniqueStrings[str] = offset;
176 return offset;
177 }
178 }
179
180
181 const char* StringPoolAtom::stringForIndex(int32_t index) const
182 {
183 int32_t currentBufferStartIndex = kBufferSize * _fullBuffers.size();
184 int32_t maxIndex = currentBufferStartIndex + _currentBufferUsed;
185 // check for out of bounds
186 if ( index > maxIndex )
187 return "";
188 // check for index in _currentBuffer
189 if ( index > currentBufferStartIndex )
190 return &_currentBuffer[index-currentBufferStartIndex];
191 // otherwise index is in a full buffer
192 uint32_t fullBufferIndex = index/kBufferSize;
193 return &_fullBuffers[fullBufferIndex][index-(kBufferSize*fullBufferIndex)];
194 }
195
196
197
198 template <typename A>
199 class SymbolTableAtom : public ClassicLinkEditAtom
200 {
201 public:
202 SymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
203 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)),
204 _stabsStringsOffsetStart(0), _stabsStringsOffsetEnd(0),
205 _stabsIndexStart(0), _stabsIndexEnd(0) { }
206
207 // overrides of ld::Atom
208 virtual const char* name() const { return "symbol table"; }
209 virtual uint64_t size() const;
210 virtual void copyRawContent(uint8_t buffer[]) const;
211 // overrides of ClassicLinkEditAtom
212 virtual void encode();
213 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe);
214
215 private:
216 typedef typename A::P P;
217 typedef typename A::P::E E;
218 typedef typename A::P::uint_t pint_t;
219
220 bool addLocal(const ld::Atom* atom, StringPoolAtom* pool);
221 void addGlobal(const ld::Atom* atom, StringPoolAtom* pool);
222 void addImport(const ld::Atom* atom, StringPoolAtom* pool);
223 uint8_t classicOrdinalForProxy(const ld::Atom* atom);
224 uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool);
225 uint64_t valueForStab(const ld::relocatable::File::Stab& stab);
226 uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab);
227 bool isAltEntry(const ld::Atom* atom);
228
229 mutable std::vector<macho_nlist<P> > _globals;
230 mutable std::vector<macho_nlist<P> > _locals;
231 mutable std::vector<macho_nlist<P> > _imports;
232
233 uint32_t _stabsStringsOffsetStart;
234 uint32_t _stabsStringsOffsetEnd;
235 uint32_t _stabsIndexStart;
236 uint32_t _stabsIndexEnd;
237
238 static ld::Section _s_section;
239 static int _s_anonNameIndex;
240
241 };
242
243 template <typename A>
244 ld::Section SymbolTableAtom<A>::_s_section("__LINKEDIT", "__symbol_table", ld::Section::typeLinkEdit, true);
245
246 template <typename A>
247 int SymbolTableAtom<A>::_s_anonNameIndex = 1;
248
249
250 template <typename A>
251 bool SymbolTableAtom<A>::isAltEntry(const ld::Atom* atom)
252 {
253 // alt entries have a group subordinate reference to the previous atom
254 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
255 if ( fit->kind == ld::Fixup::kindNoneGroupSubordinate ) {
256 if ( fit->binding == Fixup::bindingDirectlyBound ) {
257 const Atom* prevAtom = fit->u.target;
258 assert(prevAtom != NULL);
259 for (ld::Fixup::iterator fit2 = prevAtom->fixupsBegin(); fit2 != prevAtom->fixupsEnd(); ++fit2) {
260 if ( fit2->kind == ld::Fixup::kindNoneFollowOn ) {
261 if ( fit2->binding == Fixup::bindingDirectlyBound ) {
262 if ( fit2->u.target == atom )
263 return true;
264 }
265 }
266 }
267 }
268 }
269 }
270 return false;
271 }
272
273 template <typename A>
274 bool SymbolTableAtom<A>::addLocal(const ld::Atom* atom, StringPoolAtom* pool)
275 {
276 macho_nlist<P> entry;
277 assert(atom->symbolTableInclusion() != ld::Atom::symbolTableNotIn);
278
279 // set n_strx
280 const char* symbolName = atom->name();
281 char anonName[32];
282 if ( this->_options.outputKind() == Options::kObjectFile ) {
283 if ( atom->contentType() == ld::Atom::typeCString ) {
284 if ( atom->combine() == ld::Atom::combineByNameAndContent ) {
285 // don't use 'l' labels for x86_64 strings
286 // <rdar://problem/6605499> x86_64 obj-c runtime confused when static lib is stripped
287 sprintf(anonName, "LC%u", _s_anonNameIndex++);
288 symbolName = anonName;
289 }
290 }
291 else if ( atom->contentType() == ld::Atom::typeCFI ) {
292 if ( _options.removeEHLabels() )
293 return false;
294 // synthesize .eh name
295 if ( strcmp(atom->name(), "CIE") == 0 )
296 symbolName = "EH_Frame1";
297 else
298 symbolName = "func.eh";
299 }
300 else if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
301 // make auto-strip anonymous name for symbol
302 sprintf(anonName, "l%03u", _s_anonNameIndex++);
303 symbolName = anonName;
304 }
305 }
306 entry.set_n_strx(pool->add(symbolName));
307
308 // set n_type
309 uint8_t type = N_SECT;
310 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
311 type = N_ABS;
312 }
313 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
314 && (this->_options.outputKind() == Options::kObjectFile) ) {
315 // __OBJC __class has floating abs symbols for each class data structure
316 type = N_ABS;
317 }
318 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
319 type |= N_PEXT;
320 entry.set_n_type(type);
321
322 // set n_sect (section number of implementation )
323 if ( atom->definition() == ld::Atom::definitionAbsolute )
324 entry.set_n_sect(0);
325 else
326 entry.set_n_sect(atom->machoSection());
327
328 // set n_desc
329 uint16_t desc = 0;
330 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
331 desc |= REFERENCED_DYNAMICALLY;
332 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
333 desc |= N_NO_DEAD_STRIP;
334 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) )
335 desc |= N_WEAK_DEF;
336 if ( atom->isThumb() )
337 desc |= N_ARM_THUMB_DEF;
338 if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) )
339 desc |= N_ALT_ENTRY;
340 entry.set_n_desc(desc);
341
342 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
343 if ( atom->definition() == ld::Atom::definitionAbsolute )
344 entry.set_n_value(atom->objectAddress());
345 else
346 entry.set_n_value(atom->finalAddress());
347
348 // add to array
349 _locals.push_back(entry);
350 return true;
351 }
352
353
354 template <typename A>
355 void SymbolTableAtom<A>::addGlobal(const ld::Atom* atom, StringPoolAtom* pool)
356 {
357 macho_nlist<P> entry;
358
359 // set n_strx
360 const char* symbolName = atom->name();
361 char anonName[32];
362 if ( this->_options.outputKind() == Options::kObjectFile ) {
363 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
364 // make auto-strip anonymous name for symbol
365 sprintf(anonName, "l%03u", _s_anonNameIndex++);
366 symbolName = anonName;
367 }
368 }
369 entry.set_n_strx(pool->add(symbolName));
370
371 // set n_type
372 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
373 entry.set_n_type(N_EXT | N_ABS);
374 }
375 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
376 && (this->_options.outputKind() == Options::kObjectFile) ) {
377 // __OBJC __class has floating abs symbols for each class data structure
378 entry.set_n_type(N_EXT | N_ABS);
379 }
380 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
381 entry.set_n_type(N_EXT | N_INDR);
382 }
383 else {
384 entry.set_n_type(N_EXT | N_SECT);
385 if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (this->_options.outputKind() == Options::kObjectFile) ) {
386 if ( this->_options.keepPrivateExterns() )
387 entry.set_n_type(N_EXT | N_SECT | N_PEXT);
388 }
389 else if ( (atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)
390 && (atom->section().type() == ld::Section::typeMachHeader)
391 && !_options.positionIndependentExecutable() ) {
392 // the __mh_execute_header is historical magic in non-pie executabls and must be an absolute symbol
393 entry.set_n_type(N_EXT | N_ABS);
394 }
395 }
396
397 // set n_sect (section number of implementation)
398 if ( atom->definition() == ld::Atom::definitionAbsolute )
399 entry.set_n_sect(0);
400 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) )
401 entry.set_n_sect(0);
402 else
403 entry.set_n_sect(atom->machoSection());
404
405 // set n_desc
406 uint16_t desc = 0;
407 if ( atom->isThumb() )
408 desc |= N_ARM_THUMB_DEF;
409 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
410 desc |= REFERENCED_DYNAMICALLY;
411 if ( (atom->contentType() == ld::Atom::typeResolver) && (this->_options.outputKind() == Options::kObjectFile) )
412 desc |= N_SYMBOL_RESOLVER;
413 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
414 desc |= N_NO_DEAD_STRIP;
415 if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) )
416 desc |= N_ALT_ENTRY;
417 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) {
418 desc |= N_WEAK_DEF;
419 // <rdar://problem/6783167> support auto hidden weak symbols: .weak_def_can_be_hidden
420 if ( (atom->scope() == ld::Atom::scopeGlobal) && atom->autoHide() && (this->_options.outputKind() == Options::kObjectFile) )
421 desc |= N_WEAK_REF;
422 }
423 entry.set_n_desc(desc);
424
425 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
426 if ( atom->definition() == ld::Atom::definitionAbsolute )
427 entry.set_n_value(atom->objectAddress());
428 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
429 if ( atom->isAlias() ) {
430 // this re-export also renames
431 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
432 if ( fit->kind == ld::Fixup::kindNoneFollowOn ) {
433 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
434 entry.set_n_value(pool->add(fit->u.target->name()));
435 }
436 }
437 }
438 else
439 entry.set_n_value(entry.n_strx());
440 }
441 else
442 entry.set_n_value(atom->finalAddress());
443
444 // add to array
445 _globals.push_back(entry);
446 }
447
448 template <typename A>
449 uint8_t SymbolTableAtom<A>::classicOrdinalForProxy(const ld::Atom* atom)
450 {
451 assert(atom->definition() == ld::Atom::definitionProxy);
452 // when linking for flat-namespace ordinals are always zero
453 if ( _options.nameSpace() != Options::kTwoLevelNameSpace )
454 return 0;
455 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
456 // when linking -undefined dynamic_lookup, unbound symbols use DYNAMIC_LOOKUP_ORDINAL
457 if ( dylib == NULL ) {
458 if (_options.undefinedTreatment() == Options::kUndefinedDynamicLookup )
459 return DYNAMIC_LOOKUP_ORDINAL;
460 if (_options.allowedUndefined(atom->name()) )
461 return DYNAMIC_LOOKUP_ORDINAL;
462 }
463 assert(dylib != NULL);
464 int ord = this->_writer.dylibToOrdinal(dylib);
465 if ( ord == BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE )
466 return EXECUTABLE_ORDINAL;
467 return ord;
468 }
469
470
471 template <typename A>
472 void SymbolTableAtom<A>::addImport(const ld::Atom* atom, StringPoolAtom* pool)
473 {
474 macho_nlist<P> entry;
475
476 // set n_strx
477 entry.set_n_strx(pool->add(atom->name()));
478
479 // set n_type
480 if ( this->_options.outputKind() == Options::kObjectFile ) {
481 if ( atom->section().type() == ld::Section::typeTempAlias ) {
482 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
483 entry.set_n_type(N_INDR | N_EXT | N_PEXT);
484 else
485 entry.set_n_type(N_INDR | N_EXT);
486 }
487 else if ( (atom->scope() == ld::Atom::scopeLinkageUnit)
488 && (atom->definition() == ld::Atom::definitionTentative) )
489 entry.set_n_type(N_UNDF | N_EXT | N_PEXT);
490 else
491 entry.set_n_type(N_UNDF | N_EXT);
492 }
493 else {
494 if ( this->_options.prebind() )
495 entry.set_n_type(N_PBUD | N_EXT);
496 else
497 entry.set_n_type(N_UNDF | N_EXT);
498 }
499
500 // set n_sect
501 entry.set_n_sect(0);
502
503 uint16_t desc = 0;
504 if ( this->_options.outputKind() != Options::kObjectFile ) {
505 uint8_t ordinal = this->classicOrdinalForProxy(atom);
506 //fprintf(stderr, "ordinal=%u from reader=%p for symbol=%s\n", ordinal, atom->getFile(), atom->getName());
507 SET_LIBRARY_ORDINAL(desc, ordinal);
508
509 #if 0
510 // set n_desc ( high byte is library ordinal, low byte is reference type )
511 std::map<const ObjectFile::Atom*,ObjectFile::Atom*>::iterator pos = fStubsMap.find(atom);
512 if ( pos != fStubsMap.end() || ( strncmp(atom->getName(), ".objc_class_name_", 17) == 0) )
513 desc |= REFERENCE_FLAG_UNDEFINED_LAZY;
514 else
515 desc |= REFERENCE_FLAG_UNDEFINED_NON_LAZY;
516 #endif
517 }
518 else if ( atom->definition() == ld::Atom::definitionTentative ) {
519 uint8_t align = atom->alignment().powerOf2;
520 // always record custom alignment of common symbols to match what compiler does
521 SET_COMM_ALIGN(desc, align);
522 }
523 if ( (this->_options.outputKind() != Options::kObjectFile)
524 && (atom->definition() == ld::Atom::definitionProxy)
525 && (atom->combine() == ld::Atom::combineByName) ) {
526 desc |= N_REF_TO_WEAK;
527 }
528 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
529 if ( atom->weakImported() || ((dylib != NULL) && dylib->forcedWeakLinked()) )
530 desc |= N_WEAK_REF;
531 entry.set_n_desc(desc);
532
533 // set n_value, zero for import proxy and size for tentative definition
534 if ( atom->definition() == ld::Atom::definitionTentative )
535 entry.set_n_value(atom->size());
536 else if ( atom->section().type() != ld::Section::typeTempAlias )
537 entry.set_n_value(0);
538 else {
539 assert(atom->fixupsBegin() != atom->fixupsEnd());
540 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
541 assert(fit->kind == ld::Fixup::kindNoneFollowOn);
542 switch ( fit->binding ) {
543 case ld::Fixup::bindingByNameUnbound:
544 entry.set_n_value(pool->add(fit->u.name));
545 break;
546 case ld::Fixup::bindingsIndirectlyBound:
547 entry.set_n_value(pool->add((_state.indirectBindingTable[fit->u.bindingIndex])->name()));
548 break;
549 default:
550 assert(0 && "internal error: unexpected alias binding");
551 }
552 }
553 }
554
555 // add to array
556 _imports.push_back(entry);
557 }
558
559 template <typename A>
560 uint8_t SymbolTableAtom<A>::sectionIndexForStab(const ld::relocatable::File::Stab& stab)
561 {
562 // in FUN stabs, n_sect field is 0 for start FUN and 1 for end FUN
563 if ( stab.type == N_FUN )
564 return stab.other;
565 else if ( stab.type == N_GSYM )
566 return 0;
567 else if ( stab.atom != NULL )
568 return stab.atom->machoSection();
569 else
570 return stab.other;
571 }
572
573
574 template <typename A>
575 uint64_t SymbolTableAtom<A>::valueForStab(const ld::relocatable::File::Stab& stab)
576 {
577 switch ( stab.type ) {
578 case N_FUN:
579 if ( stab.atom == NULL ) {
580 // <rdar://problem/5591394> Add support to ld64 for N_FUN stabs when used for symbolic constants
581 return stab.value;
582 }
583 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
584 // end of function N_FUN has size
585 return stab.atom->size();
586 }
587 else {
588 // start of function N_FUN has address
589 return stab.atom->finalAddress();
590 }
591 case N_LBRAC:
592 case N_RBRAC:
593 case N_SLINE:
594 if ( stab.atom == NULL )
595 // some weird assembly files have slines not associated with a function
596 return stab.value;
597 else
598 // all these stab types need their value changed from an offset in the atom to an address
599 return stab.atom->finalAddress() + stab.value;
600 case N_STSYM:
601 case N_LCSYM:
602 case N_BNSYM:
603 // all these need address of atom
604 if ( stab.atom != NULL )
605 return stab.atom->finalAddress();
606 else
607 return 0; // <rdar://problem/7811357> work around for mismatch N_BNSYM
608 case N_ENSYM:
609 return stab.atom->size();
610 case N_SO:
611 if ( stab.atom == NULL ) {
612 return 0;
613 }
614 else {
615 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
616 // end of translation unit N_SO has address of end of last atom
617 return stab.atom->finalAddress() + stab.atom->size();
618 }
619 else {
620 // start of translation unit N_SO has address of end of first atom
621 return stab.atom->finalAddress();
622 }
623 }
624 break;
625 default:
626 return stab.value;
627 }
628 }
629
630 template <typename A>
631 uint32_t SymbolTableAtom<A>::stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool)
632 {
633 switch (stab.type) {
634 case N_SO:
635 if ( (stab.string == NULL) || stab.string[0] == '\0' ) {
636 return pool->emptyString();
637 break;
638 }
639 // fall into uniquing case
640 case N_SOL:
641 case N_BINCL:
642 case N_EXCL:
643 return pool->addUnique(stab.string);
644 break;
645 default:
646 if ( stab.string == NULL )
647 return 0;
648 else if ( stab.string[0] == '\0' )
649 return pool->emptyString();
650 else
651 return pool->add(stab.string);
652 }
653 return 0;
654 }
655
656
657
658 template <typename A>
659 bool SymbolTableAtom<A>::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe)
660 {
661 ssos = _stabsStringsOffsetStart;
662 ssoe = _stabsStringsOffsetEnd;
663 sos = _stabsIndexStart * sizeof(macho_nlist<P>);
664 soe = _stabsIndexEnd * sizeof(macho_nlist<P>);
665 return ( (_stabsIndexStart != _stabsIndexEnd) || (_stabsStringsOffsetStart != _stabsStringsOffsetEnd) );
666 }
667
668
669 template <typename A>
670 void SymbolTableAtom<A>::encode()
671 {
672 // Note: We lay out the symbol table so that the strings for the stabs (local) symbols are at the
673 // end of the string pool. The stabs strings are not used when calculated the UUID for the image.
674 // If the stabs strings were not last, the string offsets for all other symbols may very which would alter the UUID.
675
676 // reserve space for local symbols
677 uint32_t localsCount = _state.stabs.size() + this->_writer._localAtoms.size();
678
679 // make nlist entries for all global symbols
680 std::vector<const ld::Atom*>& globalAtoms = this->_writer._exportedAtoms;
681 _globals.reserve(globalAtoms.size());
682 uint32_t symbolIndex = localsCount;
683 this->_writer._globalSymbolsStartIndex = localsCount;
684 for (std::vector<const ld::Atom*>::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) {
685 const ld::Atom* atom = *it;
686 this->addGlobal(atom, this->_writer._stringPoolAtom);
687 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
688 }
689 this->_writer._globalSymbolsCount = symbolIndex - this->_writer._globalSymbolsStartIndex;
690
691 // make nlist entries for all undefined (imported) symbols
692 std::vector<const ld::Atom*>& importAtoms = this->_writer._importedAtoms;
693 _imports.reserve(importAtoms.size());
694 this->_writer._importSymbolsStartIndex = symbolIndex;
695 for (std::vector<const ld::Atom*>::const_iterator it=importAtoms.begin(); it != importAtoms.end(); ++it) {
696 this->addImport(*it, this->_writer._stringPoolAtom);
697 this->_writer._atomToSymbolIndex[*it] = symbolIndex++;
698 }
699 this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex;
700
701 // go back to start and make nlist entries for all local symbols
702 std::vector<const ld::Atom*>& localAtoms = this->_writer._localAtoms;
703 _locals.reserve(localsCount);
704 symbolIndex = 0;
705 this->_writer._localSymbolsStartIndex = 0;
706 _stabsIndexStart = 0;
707 _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset();
708 for (const ld::relocatable::File::Stab& stab : _state.stabs) {
709 macho_nlist<P> entry;
710 entry.set_n_type(stab.type);
711 entry.set_n_sect(sectionIndexForStab(stab));
712 entry.set_n_desc(stab.desc);
713 entry.set_n_value(valueForStab(stab));
714 entry.set_n_strx(stringOffsetForStab(stab, this->_writer._stringPoolAtom));
715 _locals.push_back(entry);
716 ++symbolIndex;
717 }
718 _stabsIndexEnd = symbolIndex;
719 _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset();
720 for (const ld::Atom* atom : localAtoms) {
721 if ( this->addLocal(atom, this->_writer._stringPoolAtom) )
722 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
723 }
724 this->_writer._localSymbolsCount = symbolIndex;
725 }
726
727 template <typename A>
728 uint64_t SymbolTableAtom<A>::size() const
729 {
730 return sizeof(macho_nlist<P>) * (_locals.size() + _globals.size() + _imports.size());
731 }
732
733 template <typename A>
734 void SymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
735 {
736 memcpy(&buffer[this->_writer._localSymbolsStartIndex*sizeof(macho_nlist<P>)], &_locals[0],
737 this->_writer._localSymbolsCount*sizeof(macho_nlist<P>));
738 memcpy(&buffer[this->_writer._globalSymbolsStartIndex*sizeof(macho_nlist<P>)], &_globals[0],
739 this->_writer._globalSymbolsCount*sizeof(macho_nlist<P>));
740 memcpy(&buffer[this->_writer._importSymbolsStartIndex *sizeof(macho_nlist<P>)], &_imports[0],
741 this->_writer._importSymbolsCount*sizeof(macho_nlist<P>));
742 }
743
744
745
746
747 class RelocationsAtomAbstract : public ClassicLinkEditAtom
748 {
749 public:
750 RelocationsAtomAbstract(const Options& opts, ld::Internal& state,
751 OutputFile& writer, const ld::Section& sect,
752 unsigned int pointerSize)
753 : ClassicLinkEditAtom(opts, state, writer, sect, pointerSize) { }
754
755 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) = 0;
756 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) = 0;
757 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) = 0;
758 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) = 0;
759 virtual uint64_t relocBaseAddress(ld::Internal& state) = 0;
760 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
761 const ld::Atom* inAtom, uint32_t offsetInAtom,
762 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
763 const ld::Atom* toTarget, uint64_t toAddend,
764 const ld::Atom* fromTarget, uint64_t fromAddend) = 0;
765 protected:
766 uint32_t symbolIndex(const ld::Atom* atom) const;
767
768 };
769
770
771
772 uint32_t RelocationsAtomAbstract::symbolIndex(const ld::Atom* atom) const
773 {
774 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
775 if ( pos != this->_writer._atomToSymbolIndex.end() )
776 return pos->second;
777 fprintf(stderr, "_atomToSymbolIndex content:\n");
778 for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
779 fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
780 }
781 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
782 }
783
784
785 template <typename A>
786 class LocalRelocationsAtom : public RelocationsAtomAbstract
787 {
788 public:
789 LocalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
790 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
791
792 // overrides of ld::Atom
793 virtual const char* name() const { return "local relocations"; }
794 virtual uint64_t size() const;
795 virtual void copyRawContent(uint8_t buffer[]) const;
796 // overrides of ClassicLinkEditAtom
797 virtual void encode() {}
798 // overrides of RelocationsAtomAbstract
799 virtual void addPointerReloc(uint64_t addr, uint32_t symNum);
800 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
801 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
802 virtual uint64_t relocBaseAddress(ld::Internal& state);
803 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum);
804 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
805 const ld::Atom* inAtom, uint32_t offsetInAtom,
806 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
807 const ld::Atom* toTarget, uint64_t toAddend,
808 const ld::Atom* fromTarget, uint64_t fromAddend) { }
809
810 private:
811 typedef typename A::P P;
812 typedef typename A::P::E E;
813 typedef typename A::P::uint_t pint_t;
814
815 std::vector<macho_relocation_info<P> > _relocs;
816
817 static ld::Section _s_section;
818 };
819
820 template <typename A>
821 ld::Section LocalRelocationsAtom<A>::_s_section("__LINKEDIT", "__local_relocs", ld::Section::typeLinkEdit, true);
822
823
824 template <>
825 uint64_t LocalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
826 {
827 if ( _options.outputKind() == Options::kKextBundle ) {
828 // for kext bundles the reloc base address starts at __TEXT segment
829 return _options.baseAddress();
830 }
831 // for all other kinds, the x86_64 reloc base address starts at first writable segment (usually __DATA)
832 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
833 ld::Internal::FinalSection* sect = *sit;
834 if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE )
835 return sect->address;
836 }
837 throw "writable (__DATA) segment not found";
838 }
839
840 template <typename A>
841 uint64_t LocalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
842 {
843 return _options.baseAddress();
844 }
845
846 template <typename A>
847 void LocalRelocationsAtom<A>::addPointerReloc(uint64_t addr, uint32_t symNum)
848 {
849 macho_relocation_info<P> reloc;
850 reloc.set_r_address(addr);
851 reloc.set_r_symbolnum(symNum);
852 reloc.set_r_pcrel(false);
853 reloc.set_r_length();
854 reloc.set_r_extern(false);
855 reloc.set_r_type(GENERIC_RELOC_VANILLA);
856 _relocs.push_back(reloc);
857 }
858
859 template <typename A>
860 void LocalRelocationsAtom<A>::addTextReloc(uint64_t addr, ld::Fixup::Kind kind, uint64_t targetAddr, uint32_t symNum)
861 {
862 }
863
864
865 template <typename A>
866 uint64_t LocalRelocationsAtom<A>::size() const
867 {
868 return _relocs.size() * sizeof(macho_relocation_info<P>);
869 }
870
871 template <typename A>
872 void LocalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
873 {
874 memcpy(buffer, &_relocs[0], _relocs.size()*sizeof(macho_relocation_info<P>));
875 }
876
877
878
879
880
881
882 template <typename A>
883 class ExternalRelocationsAtom : public RelocationsAtomAbstract
884 {
885 public:
886 ExternalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
887 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
888
889 // overrides of ld::Atom
890 virtual const char* name() const { return "external relocations"; }
891 virtual uint64_t size() const;
892 virtual void copyRawContent(uint8_t buffer[]) const;
893 // overrides of ClassicLinkEditAtom
894 virtual void encode() {}
895 // overrides of RelocationsAtomAbstract
896 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
897 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
898 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*);
899 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*);
900 virtual uint64_t relocBaseAddress(ld::Internal& state);
901 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
902 const ld::Atom* inAtom, uint32_t offsetInAtom,
903 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
904 const ld::Atom* toTarget, uint64_t toAddend,
905 const ld::Atom* fromTarget, uint64_t fromAddend) { }
906
907
908 private:
909 typedef typename A::P P;
910 typedef typename A::P::E E;
911 typedef typename A::P::uint_t pint_t;
912
913 struct LocAndAtom {
914 LocAndAtom(uint64_t l, const ld::Atom* a) : loc(l), atom(a), symbolIndex(0) {}
915
916 uint64_t loc;
917 const ld::Atom* atom;
918 uint32_t symbolIndex;
919
920 bool operator<(const LocAndAtom& rhs) const {
921 // sort first by symbol number
922 if ( this->symbolIndex != rhs.symbolIndex )
923 return (this->symbolIndex < rhs.symbolIndex);
924 // then sort all uses of the same symbol by address
925 return (this->loc < rhs.loc);
926 }
927
928 };
929
930 static uint32_t pointerReloc();
931 static uint32_t callReloc();
932
933 mutable std::vector<LocAndAtom> _pointerLocations;
934 mutable std::vector<LocAndAtom> _callSiteLocations;
935
936 static ld::Section _s_section;
937 };
938
939 template <typename A>
940 ld::Section ExternalRelocationsAtom<A>::_s_section("__LINKEDIT", "__extrn_relocs", ld::Section::typeLinkEdit, true);
941
942 template <>
943 uint64_t ExternalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
944 {
945 // for x86_64 the reloc base address starts at __DATA segment
946 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
947 ld::Internal::FinalSection* sect = *sit;
948 if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE )
949 return sect->address;
950 }
951 throw "writable (__DATA) segment not found";
952 }
953
954 template <typename A>
955 uint64_t ExternalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
956 {
957 return 0;
958 }
959
960 template <typename A>
961 void ExternalRelocationsAtom<A>::addExternalPointerReloc(uint64_t addr, const ld::Atom* target)
962 {
963 _pointerLocations.push_back(LocAndAtom(addr, target));
964 }
965
966 template <typename A>
967 void ExternalRelocationsAtom<A>::addExternalCallSiteReloc(uint64_t addr, const ld::Atom* target)
968 {
969 _callSiteLocations.push_back(LocAndAtom(addr, target));
970 }
971
972
973 template <typename A>
974 uint64_t ExternalRelocationsAtom<A>::size() const
975 {
976 if ( _options.outputKind() == Options::kStaticExecutable ) {
977 assert(_pointerLocations.size() == 0);
978 assert(_callSiteLocations.size() == 0);
979 }
980 return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info<P>);
981 }
982
983 #if SUPPORT_ARCH_arm64
984 template <> uint32_t ExternalRelocationsAtom<arm64>::pointerReloc() { return ARM64_RELOC_UNSIGNED; }
985 #endif
986 #if SUPPORT_ARCH_arm_any
987 template <> uint32_t ExternalRelocationsAtom<arm>::pointerReloc() { return ARM_RELOC_VANILLA; }
988 #endif
989 template <> uint32_t ExternalRelocationsAtom<x86>::pointerReloc() { return GENERIC_RELOC_VANILLA; }
990 template <> uint32_t ExternalRelocationsAtom<x86_64>::pointerReloc() { return X86_64_RELOC_UNSIGNED; }
991
992
993 template <> uint32_t ExternalRelocationsAtom<x86_64>::callReloc() { return X86_64_RELOC_BRANCH; }
994 template <> uint32_t ExternalRelocationsAtom<x86>::callReloc() { return GENERIC_RELOC_VANILLA; }
995 #if SUPPORT_ARCH_arm64
996 template <> uint32_t ExternalRelocationsAtom<arm64>::callReloc() { return ARM64_RELOC_BRANCH26; }
997 #endif
998
999 template <typename A>
1000 uint32_t ExternalRelocationsAtom<A>::callReloc()
1001 {
1002 assert(0 && "external call relocs not implemented");
1003 return 0;
1004 }
1005
1006
1007 template <typename A>
1008 void ExternalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1009 {
1010 macho_relocation_info<P>* r = (macho_relocation_info<P>*)buffer;
1011
1012 // assign symbol index, now that symbol table is built
1013 for (typename std::vector<LocAndAtom>::iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it) {
1014 it->symbolIndex = symbolIndex(it->atom);
1015 }
1016 std::sort(_pointerLocations.begin(), _pointerLocations.end());
1017 for (typename std::vector<LocAndAtom>::const_iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it, ++r) {
1018 r->set_r_address(it->loc);
1019 r->set_r_symbolnum(it->symbolIndex);
1020 r->set_r_pcrel(false);
1021 r->set_r_length();
1022 r->set_r_extern(true);
1023 r->set_r_type(this->pointerReloc());
1024 }
1025
1026 for (typename std::vector<LocAndAtom>::iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it) {
1027 it->symbolIndex = symbolIndex(it->atom);
1028 }
1029 std::sort(_callSiteLocations.begin(), _callSiteLocations.end());
1030 for (typename std::vector<LocAndAtom>::const_iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it, ++r) {
1031 r->set_r_address(it->loc);
1032 r->set_r_symbolnum(it->symbolIndex);
1033 r->set_r_pcrel(true);
1034 r->set_r_length(2);
1035 r->set_r_extern(true);
1036 r->set_r_type(this->callReloc());
1037 }
1038 }
1039
1040
1041 template <typename A>
1042 class SectionRelocationsAtom : public RelocationsAtomAbstract
1043 {
1044 public:
1045 SectionRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1046 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
1047
1048 // overrides of ld::Atom
1049 virtual const char* name() const { return "section relocations"; }
1050 virtual uint64_t size() const;
1051 virtual void copyRawContent(uint8_t buffer[]) const;
1052 // overrides of ClassicLinkEditAtom
1053 virtual void encode();
1054 // overrides of RelocationsAtomAbstract
1055 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
1056 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
1057 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
1058 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
1059 virtual uint64_t relocBaseAddress(ld::Internal& state) { return 0; }
1060 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
1061 const ld::Atom* inAtom, uint32_t offsetInAtom,
1062 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1063 const ld::Atom* toTarget, uint64_t toAddend,
1064 const ld::Atom* fromTarget, uint64_t fromAddend);
1065
1066 private:
1067 typedef typename A::P P;
1068 typedef typename A::P::E E;
1069 typedef typename A::P::uint_t pint_t;
1070
1071
1072 struct Entry {
1073 ld::Fixup::Kind kind;
1074 bool toTargetUsesExternalReloc;
1075 bool fromTargetUsesExternalReloc;
1076 const ld::Atom* inAtom;
1077 uint32_t offsetInAtom;
1078 const ld::Atom* toTarget;
1079 uint64_t toAddend;
1080 const ld::Atom* fromTarget;
1081 uint64_t fromAddend;
1082 };
1083 uint32_t sectSymNum(bool external, const ld::Atom* target);
1084 void encodeSectionReloc(ld::Internal::FinalSection* sect,
1085 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs);
1086
1087 struct SectionAndEntries {
1088 ld::Internal::FinalSection* sect;
1089 std::vector<Entry> entries;
1090 std::vector<macho_relocation_info<P> > relocs;
1091 };
1092
1093 std::vector<SectionAndEntries> _entriesBySection;
1094
1095 static ld::Section _s_section;
1096 };
1097
1098 template <typename A>
1099 ld::Section SectionRelocationsAtom<A>::_s_section("__LINKEDIT", "__sect_relocs", ld::Section::typeLinkEdit, true);
1100
1101
1102
1103
1104 template <typename A>
1105 uint64_t SectionRelocationsAtom<A>::size() const
1106 {
1107 uint32_t count = 0;
1108 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1109 const SectionAndEntries& se = *it;
1110 count += se.relocs.size();
1111 }
1112 return count * sizeof(macho_relocation_info<P>);
1113 }
1114
1115 template <typename A>
1116 void SectionRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1117 {
1118 uint32_t offset = 0;
1119 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1120 const SectionAndEntries& se = *it;
1121 memcpy(&buffer[offset], &se.relocs[0], se.relocs.size()*sizeof(macho_relocation_info<P>));
1122 offset += (se.relocs.size() * sizeof(macho_relocation_info<P>));
1123 }
1124 }
1125
1126
1127 template <>
1128 void SectionRelocationsAtom<x86_64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1129 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1130 {
1131 macho_relocation_info<P> reloc1;
1132 macho_relocation_info<P> reloc2;
1133 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1134 bool external = entry.toTargetUsesExternalReloc;
1135 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1136 bool fromExternal = false;
1137 uint32_t fromSymbolNum = 0;
1138 if ( entry.fromTarget != NULL ) {
1139 fromExternal = entry.fromTargetUsesExternalReloc;
1140 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1141 }
1142
1143
1144 switch ( entry.kind ) {
1145 case ld::Fixup::kindStoreX86BranchPCRel32:
1146 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1147 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1148 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1149 reloc1.set_r_address(address);
1150 reloc1.set_r_symbolnum(symbolNum);
1151 reloc1.set_r_pcrel(true);
1152 reloc1.set_r_length(2);
1153 reloc1.set_r_extern(external);
1154 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1155 relocs.push_back(reloc1);
1156 break;
1157
1158 case ld::Fixup::kindStoreX86BranchPCRel8:
1159 reloc1.set_r_address(address);
1160 reloc1.set_r_symbolnum(symbolNum);
1161 reloc1.set_r_pcrel(true);
1162 reloc1.set_r_length(0);
1163 reloc1.set_r_extern(external);
1164 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1165 relocs.push_back(reloc1);
1166 break;
1167
1168 case ld::Fixup::kindStoreX86PCRel32:
1169 case ld::Fixup::kindStoreTargetAddressX86PCRel32:
1170 reloc1.set_r_address(address);
1171 reloc1.set_r_symbolnum(symbolNum);
1172 reloc1.set_r_pcrel(true);
1173 reloc1.set_r_length(2);
1174 reloc1.set_r_extern(external);
1175 reloc1.set_r_type(X86_64_RELOC_SIGNED);
1176 relocs.push_back(reloc1);
1177 break;
1178
1179 case ld::Fixup::kindStoreX86PCRel32_1:
1180 reloc1.set_r_address(address);
1181 reloc1.set_r_symbolnum(symbolNum);
1182 reloc1.set_r_pcrel(true);
1183 reloc1.set_r_length(2);
1184 reloc1.set_r_extern(external);
1185 reloc1.set_r_type(X86_64_RELOC_SIGNED_1);
1186 relocs.push_back(reloc1);
1187 break;
1188
1189 case ld::Fixup::kindStoreX86PCRel32_2:
1190 reloc1.set_r_address(address);
1191 reloc1.set_r_symbolnum(symbolNum);
1192 reloc1.set_r_pcrel(true);
1193 reloc1.set_r_length(2);
1194 reloc1.set_r_extern(external);
1195 reloc1.set_r_type(X86_64_RELOC_SIGNED_2);
1196 relocs.push_back(reloc1);
1197 break;
1198
1199 case ld::Fixup::kindStoreX86PCRel32_4:
1200 reloc1.set_r_address(address);
1201 reloc1.set_r_symbolnum(symbolNum);
1202 reloc1.set_r_pcrel(true);
1203 reloc1.set_r_length(2);
1204 reloc1.set_r_extern(external);
1205 reloc1.set_r_type(X86_64_RELOC_SIGNED_4);
1206 relocs.push_back(reloc1);
1207 break;
1208
1209 case ld::Fixup::kindStoreX86PCRel32GOTLoad:
1210 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
1211 reloc1.set_r_address(address);
1212 reloc1.set_r_symbolnum(symbolNum);
1213 reloc1.set_r_pcrel(true);
1214 reloc1.set_r_length(2);
1215 reloc1.set_r_extern(external);
1216 reloc1.set_r_type(X86_64_RELOC_GOT_LOAD);
1217 relocs.push_back(reloc1);
1218 break;
1219
1220 case ld::Fixup::kindStoreX86PCRel32GOT:
1221 reloc1.set_r_address(address);
1222 reloc1.set_r_symbolnum(symbolNum);
1223 reloc1.set_r_pcrel(true);
1224 reloc1.set_r_length(2);
1225 reloc1.set_r_extern(external);
1226 reloc1.set_r_type(X86_64_RELOC_GOT);
1227 relocs.push_back(reloc1);
1228 break;
1229
1230 case ld::Fixup::kindStoreLittleEndian64:
1231 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1232 if ( entry.fromTarget != NULL ) {
1233 // this is a pointer-diff
1234 reloc1.set_r_address(address);
1235 reloc1.set_r_symbolnum(symbolNum);
1236 reloc1.set_r_pcrel(false);
1237 reloc1.set_r_length(3);
1238 reloc1.set_r_extern(external);
1239 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1240 reloc2.set_r_address(address);
1241 reloc2.set_r_symbolnum(fromSymbolNum);
1242 reloc2.set_r_pcrel(false);
1243 reloc2.set_r_length(3);
1244 reloc2.set_r_extern(fromExternal);
1245 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1246 relocs.push_back(reloc2);
1247 relocs.push_back(reloc1);
1248 }
1249 else {
1250 // regular pointer
1251 reloc1.set_r_address(address);
1252 reloc1.set_r_symbolnum(symbolNum);
1253 reloc1.set_r_pcrel(false);
1254 reloc1.set_r_length(3);
1255 reloc1.set_r_extern(external);
1256 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1257 relocs.push_back(reloc1);
1258 }
1259 break;
1260
1261 case ld::Fixup::kindStoreLittleEndian32:
1262 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1263 if ( entry.fromTarget != NULL ) {
1264 // this is a pointer-diff
1265 reloc1.set_r_address(address);
1266 reloc1.set_r_symbolnum(symbolNum);
1267 reloc1.set_r_pcrel(false);
1268 reloc1.set_r_length(2);
1269 reloc1.set_r_extern(external);
1270 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1271 reloc2.set_r_address(address);
1272 reloc2.set_r_symbolnum(fromSymbolNum);
1273 reloc2.set_r_pcrel(false);
1274 reloc2.set_r_length(2);
1275 reloc2.set_r_extern(fromExternal);
1276 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1277 relocs.push_back(reloc2);
1278 relocs.push_back(reloc1);
1279 }
1280 else {
1281 // regular pointer
1282 reloc1.set_r_address(address);
1283 reloc1.set_r_symbolnum(symbolNum);
1284 reloc1.set_r_pcrel(false);
1285 reloc1.set_r_length(2);
1286 reloc1.set_r_extern(external);
1287 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1288 relocs.push_back(reloc1);
1289 }
1290 break;
1291 case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad:
1292 reloc1.set_r_address(address);
1293 reloc1.set_r_symbolnum(symbolNum);
1294 reloc1.set_r_pcrel(true);
1295 reloc1.set_r_length(2);
1296 reloc1.set_r_extern(external);
1297 reloc1.set_r_type(X86_64_RELOC_TLV);
1298 relocs.push_back(reloc1);
1299 break;
1300 default:
1301 assert(0 && "need to handle -r reloc");
1302
1303 }
1304
1305 }
1306
1307
1308
1309 template <typename A>
1310 uint32_t SectionRelocationsAtom<A>::sectSymNum(bool external, const ld::Atom* target)
1311 {
1312 if ( target->definition() == ld::Atom::definitionAbsolute )
1313 return R_ABS;
1314 if ( external )
1315 return this->symbolIndex(target); // in external relocations, r_symbolnum field is symbol index
1316 else
1317 return target->machoSection(); // in non-extern relocations, r_symbolnum is mach-o section index of target
1318 }
1319
1320 template <>
1321 void SectionRelocationsAtom<x86>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1322 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1323 {
1324 macho_relocation_info<P> reloc1;
1325 macho_relocation_info<P> reloc2;
1326 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1327 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1328 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1329 bool external = entry.toTargetUsesExternalReloc;
1330 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1331 bool fromExternal = false;
1332 uint32_t fromSymbolNum = 0;
1333 if ( entry.fromTarget != NULL ) {
1334 fromExternal = entry.fromTargetUsesExternalReloc;
1335 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1336 }
1337
1338 switch ( entry.kind ) {
1339 case ld::Fixup::kindStoreX86PCRel32:
1340 case ld::Fixup::kindStoreX86BranchPCRel32:
1341 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1342 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1343 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1344 if ( !external && (entry.toAddend != 0) ) {
1345 // use scattered reloc is target offset is non-zero
1346 sreloc1->set_r_scattered(true);
1347 sreloc1->set_r_pcrel(true);
1348 sreloc1->set_r_length(2);
1349 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1350 sreloc1->set_r_address(address);
1351 sreloc1->set_r_value(entry.toTarget->finalAddress());
1352 }
1353 else {
1354 reloc1.set_r_address(address);
1355 reloc1.set_r_symbolnum(symbolNum);
1356 reloc1.set_r_pcrel(true);
1357 reloc1.set_r_length(2);
1358 reloc1.set_r_extern(external);
1359 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1360 }
1361 relocs.push_back(reloc1);
1362 break;
1363
1364 case ld::Fixup::kindStoreX86BranchPCRel8:
1365 if ( !external && (entry.toAddend != 0) ) {
1366 // use scattered reloc is target offset is non-zero
1367 sreloc1->set_r_scattered(true);
1368 sreloc1->set_r_pcrel(true);
1369 sreloc1->set_r_length(0);
1370 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1371 sreloc1->set_r_address(address);
1372 sreloc1->set_r_value(entry.toTarget->finalAddress());
1373 }
1374 else {
1375 reloc1.set_r_address(address);
1376 reloc1.set_r_symbolnum(symbolNum);
1377 reloc1.set_r_pcrel(true);
1378 reloc1.set_r_length(0);
1379 reloc1.set_r_extern(external);
1380 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1381 }
1382 relocs.push_back(reloc1);
1383 break;
1384
1385 case ld::Fixup::kindStoreX86PCRel16:
1386 if ( !external && (entry.toAddend != 0) ) {
1387 // use scattered reloc is target offset is non-zero
1388 sreloc1->set_r_scattered(true);
1389 sreloc1->set_r_pcrel(true);
1390 sreloc1->set_r_length(1);
1391 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1392 sreloc1->set_r_address(address);
1393 sreloc1->set_r_value(entry.toTarget->finalAddress());
1394 }
1395 else {
1396 reloc1.set_r_address(address);
1397 reloc1.set_r_symbolnum(symbolNum);
1398 reloc1.set_r_pcrel(true);
1399 reloc1.set_r_length(1);
1400 reloc1.set_r_extern(external);
1401 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1402 }
1403 relocs.push_back(reloc1);
1404 break;
1405
1406 case ld::Fixup::kindStoreLittleEndian32:
1407 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1408 if ( entry.fromTarget != NULL ) {
1409 // this is a pointer-diff
1410 sreloc1->set_r_scattered(true);
1411 sreloc1->set_r_pcrel(false);
1412 sreloc1->set_r_length(2);
1413 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1414 sreloc1->set_r_type(GENERIC_RELOC_LOCAL_SECTDIFF);
1415 else
1416 sreloc1->set_r_type(GENERIC_RELOC_SECTDIFF);
1417 sreloc1->set_r_address(address);
1418 if ( entry.toTarget == entry.inAtom ) {
1419 if ( entry.toAddend > entry.toTarget->size() )
1420 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1421 else
1422 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1423 }
1424 else
1425 sreloc1->set_r_value(entry.toTarget->finalAddress());
1426 sreloc2->set_r_scattered(true);
1427 sreloc2->set_r_pcrel(false);
1428 sreloc2->set_r_length(2);
1429 sreloc2->set_r_type(GENERIC_RELOC_PAIR);
1430 sreloc2->set_r_address(0);
1431 if ( entry.fromTarget == entry.inAtom ) {
1432 if ( entry.fromAddend > entry.fromTarget->size() )
1433 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.offsetInAtom);
1434 else
1435 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1436 }
1437 else
1438 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1439 relocs.push_back(reloc1);
1440 relocs.push_back(reloc2);
1441 }
1442 else {
1443 // regular pointer
1444 if ( !external && (entry.toAddend != 0) && (entry.toTarget->symbolTableInclusion() != ld::Atom::symbolTableNotIn) ) {
1445 // use scattered reloc if target offset is non-zero into named atom (5658046)
1446 sreloc1->set_r_scattered(true);
1447 sreloc1->set_r_pcrel(false);
1448 sreloc1->set_r_length(2);
1449 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1450 sreloc1->set_r_address(address);
1451 sreloc1->set_r_value(entry.toTarget->finalAddress());
1452 }
1453 else {
1454 reloc1.set_r_address(address);
1455 reloc1.set_r_symbolnum(symbolNum);
1456 reloc1.set_r_pcrel(false);
1457 reloc1.set_r_length(2);
1458 reloc1.set_r_extern(external);
1459 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1460 }
1461 relocs.push_back(reloc1);
1462 }
1463 break;
1464 case ld::Fixup::kindStoreX86PCRel32TLVLoad:
1465 case ld::Fixup::kindStoreX86Abs32TLVLoad:
1466 case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad:
1467 reloc1.set_r_address(address);
1468 reloc1.set_r_symbolnum(symbolNum);
1469 reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad);
1470 reloc1.set_r_length(2);
1471 reloc1.set_r_extern(external);
1472 reloc1.set_r_type(GENERIC_RLEOC_TLV);
1473 relocs.push_back(reloc1);
1474 break;
1475 default:
1476 assert(0 && "need to handle -r reloc");
1477
1478 }
1479 }
1480
1481
1482
1483 #if SUPPORT_ARCH_arm_any
1484 template <>
1485 void SectionRelocationsAtom<arm>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1486 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1487 {
1488 macho_relocation_info<P> reloc1;
1489 macho_relocation_info<P> reloc2;
1490 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1491 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1492 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1493 bool external = entry.toTargetUsesExternalReloc;
1494 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1495 bool fromExternal = false;
1496 uint32_t fromSymbolNum = 0;
1497 if ( entry.fromTarget != NULL ) {
1498 fromExternal = entry.fromTargetUsesExternalReloc;
1499 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1500 }
1501
1502
1503 switch ( entry.kind ) {
1504 case ld::Fixup::kindStoreTargetAddressARMBranch24:
1505 case ld::Fixup::kindStoreARMBranch24:
1506 case ld::Fixup::kindStoreARMDtraceCallSiteNop:
1507 case ld::Fixup::kindStoreARMDtraceIsEnableSiteClear:
1508 if ( !external && (entry.toAddend != 0) ) {
1509 // use scattered reloc is target offset is non-zero
1510 sreloc1->set_r_scattered(true);
1511 sreloc1->set_r_pcrel(true);
1512 sreloc1->set_r_length(2);
1513 sreloc1->set_r_type(ARM_RELOC_BR24);
1514 sreloc1->set_r_address(address);
1515 sreloc1->set_r_value(entry.toTarget->finalAddress());
1516 }
1517 else {
1518 reloc1.set_r_address(address);
1519 reloc1.set_r_symbolnum(symbolNum);
1520 reloc1.set_r_pcrel(true);
1521 reloc1.set_r_length(2);
1522 reloc1.set_r_extern(external);
1523 reloc1.set_r_type(ARM_RELOC_BR24);
1524 }
1525 relocs.push_back(reloc1);
1526 break;
1527
1528 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
1529 case ld::Fixup::kindStoreThumbBranch22:
1530 case ld::Fixup::kindStoreThumbDtraceCallSiteNop:
1531 case ld::Fixup::kindStoreThumbDtraceIsEnableSiteClear:
1532 if ( !external && (entry.toAddend != 0) ) {
1533 // use scattered reloc is target offset is non-zero
1534 sreloc1->set_r_scattered(true);
1535 sreloc1->set_r_pcrel(true);
1536 sreloc1->set_r_length(2);
1537 sreloc1->set_r_type(ARM_THUMB_RELOC_BR22);
1538 sreloc1->set_r_address(address);
1539 sreloc1->set_r_value(entry.toTarget->finalAddress());
1540 }
1541 else {
1542 reloc1.set_r_address(address);
1543 reloc1.set_r_symbolnum(symbolNum);
1544 reloc1.set_r_pcrel(true);
1545 reloc1.set_r_length(2);
1546 reloc1.set_r_extern(external);
1547 reloc1.set_r_type(ARM_THUMB_RELOC_BR22);
1548 }
1549 relocs.push_back(reloc1);
1550 break;
1551
1552 case ld::Fixup::kindStoreLittleEndian32:
1553 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1554 if ( entry.fromTarget != NULL ) {
1555 // this is a pointer-diff
1556 sreloc1->set_r_scattered(true);
1557 sreloc1->set_r_pcrel(false);
1558 sreloc1->set_r_length(2);
1559 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1560 sreloc1->set_r_type(ARM_RELOC_LOCAL_SECTDIFF);
1561 else
1562 sreloc1->set_r_type(ARM_RELOC_SECTDIFF);
1563 sreloc1->set_r_address(address);
1564 if ( entry.toTarget == entry.inAtom ) {
1565 if ( entry.toAddend > entry.toTarget->size() )
1566 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1567 else
1568 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1569 }
1570 else {
1571 sreloc1->set_r_value(entry.toTarget->finalAddress());
1572 }
1573 sreloc2->set_r_scattered(true);
1574 sreloc2->set_r_pcrel(false);
1575 sreloc2->set_r_length(2);
1576 sreloc2->set_r_type(ARM_RELOC_PAIR);
1577 sreloc2->set_r_address(0);
1578 if ( entry.fromTarget == entry.inAtom ) {
1579 //unsigned int pcBaseOffset = entry.inAtom->isThumb() ? 4 : 8;
1580 //if ( entry.fromAddend > pcBaseOffset )
1581 // sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend-pcBaseOffset);
1582 //else
1583 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1584 }
1585 else {
1586 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1587 }
1588 relocs.push_back(reloc1);
1589 relocs.push_back(reloc2);
1590 }
1591 else {
1592 // regular pointer
1593 if ( !external && (entry.toAddend != 0) ) {
1594 // use scattered reloc is target offset is non-zero
1595 sreloc1->set_r_scattered(true);
1596 sreloc1->set_r_pcrel(false);
1597 sreloc1->set_r_length(2);
1598 sreloc1->set_r_type(ARM_RELOC_VANILLA);
1599 sreloc1->set_r_address(address);
1600 sreloc1->set_r_value(entry.toTarget->finalAddress());
1601 }
1602 else {
1603 reloc1.set_r_address(address);
1604 reloc1.set_r_symbolnum(symbolNum);
1605 reloc1.set_r_pcrel(false);
1606 reloc1.set_r_length(2);
1607 reloc1.set_r_extern(external);
1608 reloc1.set_r_type(ARM_RELOC_VANILLA);
1609 }
1610 relocs.push_back(reloc1);
1611 }
1612 break;
1613
1614 case ld::Fixup::kindStoreARMLow16:
1615 case ld::Fixup::kindStoreARMHigh16:
1616 case ld::Fixup::kindStoreThumbLow16:
1617 case ld::Fixup::kindStoreThumbHigh16:
1618 {
1619 int len = 0;
1620 uint32_t otherHalf = 0;
1621 uint32_t value;
1622 if ( entry.fromTarget != NULL ) {
1623 // this is a sect-diff
1624 value = (entry.toTarget->finalAddress()+entry.toAddend) - (entry.fromTarget->finalAddress()+entry.fromAddend);
1625 }
1626 else {
1627 // this is an absolute address
1628 value = entry.toAddend;
1629 if ( !external )
1630 value += entry.toTarget->finalAddress();
1631 }
1632 switch ( entry.kind ) {
1633 case ld::Fixup::kindStoreARMLow16:
1634 len = 0;
1635 otherHalf = value >> 16;
1636 break;
1637 case ld::Fixup::kindStoreARMHigh16:
1638 len = 1;
1639 otherHalf = value & 0xFFFF;
1640 break;
1641 case ld::Fixup::kindStoreThumbLow16:
1642 len = 2;
1643 otherHalf = value >> 16;
1644 break;
1645 case ld::Fixup::kindStoreThumbHigh16:
1646 len = 3;
1647 otherHalf = value & 0xFFFF;
1648 break;
1649 default:
1650 break;
1651 }
1652 if ( entry.fromTarget != NULL ) {
1653 // this is a sect-diff
1654 sreloc1->set_r_scattered(true);
1655 sreloc1->set_r_pcrel(false);
1656 sreloc1->set_r_length(len);
1657 sreloc1->set_r_type(ARM_RELOC_HALF_SECTDIFF);
1658 sreloc1->set_r_address(address);
1659 sreloc1->set_r_value(entry.toTarget->finalAddress());
1660 sreloc2->set_r_scattered(true);
1661 sreloc2->set_r_pcrel(false);
1662 sreloc2->set_r_length(len);
1663 sreloc2->set_r_type(ARM_RELOC_PAIR);
1664 sreloc2->set_r_address(otherHalf);
1665 if ( entry.fromTarget == entry.inAtom )
1666 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1667 else
1668 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1669 relocs.push_back(reloc1);
1670 relocs.push_back(reloc2);
1671 }
1672 else {
1673 // this is absolute address
1674 if ( !external && (entry.toAddend != 0) ) {
1675 // use scattered reloc is target offset is non-zero
1676 sreloc1->set_r_scattered(true);
1677 sreloc1->set_r_pcrel(false);
1678 sreloc1->set_r_length(len);
1679 sreloc1->set_r_type(ARM_RELOC_HALF);
1680 sreloc1->set_r_address(address);
1681 sreloc1->set_r_value(entry.toTarget->finalAddress());
1682 reloc2.set_r_address(otherHalf);
1683 reloc2.set_r_symbolnum(0);
1684 reloc2.set_r_pcrel(false);
1685 reloc2.set_r_length(len);
1686 reloc2.set_r_extern(false);
1687 reloc2.set_r_type(ARM_RELOC_PAIR);
1688 relocs.push_back(reloc1);
1689 relocs.push_back(reloc2);
1690 }
1691 else {
1692 reloc1.set_r_address(address);
1693 reloc1.set_r_symbolnum(symbolNum);
1694 reloc1.set_r_pcrel(false);
1695 reloc1.set_r_length(len);
1696 reloc1.set_r_extern(external);
1697 reloc1.set_r_type(ARM_RELOC_HALF);
1698 reloc2.set_r_address(otherHalf); // other half
1699 reloc2.set_r_symbolnum(0);
1700 reloc2.set_r_pcrel(false);
1701 reloc2.set_r_length(len);
1702 reloc2.set_r_extern(false);
1703 reloc2.set_r_type(ARM_RELOC_PAIR);
1704 relocs.push_back(reloc1);
1705 relocs.push_back(reloc2);
1706 }
1707 }
1708 }
1709 break;
1710
1711 default:
1712 assert(0 && "need to handle -r reloc");
1713
1714 }
1715 }
1716 #endif
1717
1718 #if SUPPORT_ARCH_arm64
1719 template <>
1720 void SectionRelocationsAtom<arm64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1721 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1722 {
1723 macho_relocation_info<P> reloc1;
1724 macho_relocation_info<P> reloc2;
1725 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1726 bool external = entry.toTargetUsesExternalReloc;
1727 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1728 bool fromExternal = false;
1729 uint32_t fromSymbolNum = 0;
1730 if ( entry.fromTarget != NULL ) {
1731 fromExternal = entry.fromTargetUsesExternalReloc;
1732 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1733 }
1734
1735
1736 switch ( entry.kind ) {
1737 case ld::Fixup::kindStoreARM64Branch26:
1738 if ( entry.toAddend != 0 ) {
1739 assert(entry.toAddend < 0x400000);
1740 reloc2.set_r_address(address);
1741 reloc2.set_r_symbolnum(entry.toAddend);
1742 reloc2.set_r_pcrel(false);
1743 reloc2.set_r_length(2);
1744 reloc2.set_r_extern(false);
1745 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1746 relocs.push_back(reloc2);
1747 }
1748 // fall into next case
1749 case ld::Fixup::kindStoreTargetAddressARM64Branch26:
1750 case ld::Fixup::kindStoreARM64DtraceCallSiteNop:
1751 case ld::Fixup::kindStoreARM64DtraceIsEnableSiteClear:
1752 reloc1.set_r_address(address);
1753 reloc1.set_r_symbolnum(symbolNum);
1754 reloc1.set_r_pcrel(true);
1755 reloc1.set_r_length(2);
1756 reloc1.set_r_extern(external);
1757 reloc1.set_r_type(ARM64_RELOC_BRANCH26);
1758 relocs.push_back(reloc1);
1759 break;
1760
1761 case ld::Fixup::kindStoreARM64Page21:
1762 if ( entry.toAddend != 0 ) {
1763 assert(entry.toAddend < 0x400000);
1764 reloc2.set_r_address(address);
1765 reloc2.set_r_symbolnum(entry.toAddend);
1766 reloc2.set_r_pcrel(false);
1767 reloc2.set_r_length(2);
1768 reloc2.set_r_extern(false);
1769 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1770 relocs.push_back(reloc2);
1771 }
1772 // fall into next case
1773 case ld::Fixup::kindStoreTargetAddressARM64Page21:
1774 reloc1.set_r_address(address);
1775 reloc1.set_r_symbolnum(symbolNum);
1776 reloc1.set_r_pcrel(true);
1777 reloc1.set_r_length(2);
1778 reloc1.set_r_extern(external);
1779 reloc1.set_r_type(ARM64_RELOC_PAGE21);
1780 relocs.push_back(reloc1);
1781 break;
1782
1783 case ld::Fixup::kindStoreARM64PageOff12:
1784 if ( entry.toAddend != 0 ) {
1785 assert(entry.toAddend < 0x400000);
1786 reloc2.set_r_address(address);
1787 reloc2.set_r_symbolnum(entry.toAddend);
1788 reloc2.set_r_pcrel(false);
1789 reloc2.set_r_length(2);
1790 reloc2.set_r_extern(false);
1791 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1792 relocs.push_back(reloc2);
1793 }
1794 // fall into next case
1795 case ld::Fixup::kindStoreTargetAddressARM64PageOff12:
1796 reloc1.set_r_address(address);
1797 reloc1.set_r_symbolnum(symbolNum);
1798 reloc1.set_r_pcrel(false);
1799 reloc1.set_r_length(2);
1800 reloc1.set_r_extern(external);
1801 reloc1.set_r_type(ARM64_RELOC_PAGEOFF12);
1802 relocs.push_back(reloc1);
1803 break;
1804
1805 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21:
1806 case ld::Fixup::kindStoreARM64GOTLoadPage21:
1807 reloc1.set_r_address(address);
1808 reloc1.set_r_symbolnum(symbolNum);
1809 reloc1.set_r_pcrel(true);
1810 reloc1.set_r_length(2);
1811 reloc1.set_r_extern(external);
1812 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGE21);
1813 relocs.push_back(reloc1);
1814 break;
1815
1816 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12:
1817 case ld::Fixup::kindStoreARM64GOTLoadPageOff12:
1818 reloc1.set_r_address(address);
1819 reloc1.set_r_symbolnum(symbolNum);
1820 reloc1.set_r_pcrel(false);
1821 reloc1.set_r_length(2);
1822 reloc1.set_r_extern(external);
1823 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGEOFF12);
1824 relocs.push_back(reloc1);
1825 break;
1826
1827 case ld::Fixup::kindStoreARM64TLVPLoadPageOff12:
1828 case ld::Fixup::kindStoreTargetAddressARM64TLVPLoadPageOff12:
1829 reloc1.set_r_address(address);
1830 reloc1.set_r_symbolnum(symbolNum);
1831 reloc1.set_r_pcrel(false);
1832 reloc1.set_r_length(2);
1833 reloc1.set_r_extern(external);
1834 reloc1.set_r_type(ARM64_RELOC_TLVP_LOAD_PAGEOFF12);
1835 relocs.push_back(reloc1);
1836 break;
1837
1838 case ld::Fixup::kindStoreARM64TLVPLoadPage21:
1839 case ld::Fixup::kindStoreTargetAddressARM64TLVPLoadPage21:
1840 reloc1.set_r_address(address);
1841 reloc1.set_r_symbolnum(symbolNum);
1842 reloc1.set_r_pcrel(true);
1843 reloc1.set_r_length(2);
1844 reloc1.set_r_extern(external);
1845 reloc1.set_r_type(ARM64_RELOC_TLVP_LOAD_PAGE21);
1846 relocs.push_back(reloc1);
1847 break;
1848
1849 case ld::Fixup::kindStoreLittleEndian64:
1850 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1851 if ( entry.fromTarget != NULL ) {
1852 // this is a pointer-diff
1853 reloc1.set_r_address(address);
1854 reloc1.set_r_symbolnum(symbolNum);
1855 reloc1.set_r_pcrel(false);
1856 reloc1.set_r_length(3);
1857 reloc1.set_r_extern(external);
1858 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1859 reloc2.set_r_address(address);
1860 reloc2.set_r_symbolnum(fromSymbolNum);
1861 reloc2.set_r_pcrel(false);
1862 reloc2.set_r_length(3);
1863 reloc2.set_r_extern(fromExternal);
1864 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1865 relocs.push_back(reloc2);
1866 relocs.push_back(reloc1);
1867 }
1868 else {
1869 // regular pointer
1870 reloc1.set_r_address(address);
1871 reloc1.set_r_symbolnum(symbolNum);
1872 reloc1.set_r_pcrel(false);
1873 reloc1.set_r_length(3);
1874 reloc1.set_r_extern(external);
1875 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1876 relocs.push_back(reloc1);
1877 }
1878 break;
1879
1880 case ld::Fixup::kindStoreLittleEndian32:
1881 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1882 if ( entry.fromTarget != NULL ) {
1883 // this is a pointer-diff
1884 reloc1.set_r_address(address);
1885 reloc1.set_r_symbolnum(symbolNum);
1886 reloc1.set_r_pcrel(false);
1887 reloc1.set_r_length(2);
1888 reloc1.set_r_extern(external);
1889 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1890 reloc2.set_r_address(address);
1891 reloc2.set_r_symbolnum(fromSymbolNum);
1892 reloc2.set_r_pcrel(false);
1893 reloc2.set_r_length(2);
1894 reloc2.set_r_extern(fromExternal);
1895 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1896 relocs.push_back(reloc2);
1897 relocs.push_back(reloc1);
1898 }
1899 else {
1900 // regular pointer
1901 reloc1.set_r_address(address);
1902 reloc1.set_r_symbolnum(symbolNum);
1903 reloc1.set_r_pcrel(false);
1904 reloc1.set_r_length(2);
1905 reloc1.set_r_extern(external);
1906 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1907 relocs.push_back(reloc1);
1908 }
1909 break;
1910
1911 case ld::Fixup::kindStoreARM64PointerToGOT:
1912 reloc1.set_r_address(address);
1913 reloc1.set_r_symbolnum(symbolNum);
1914 reloc1.set_r_pcrel(false);
1915 reloc1.set_r_length(3);
1916 reloc1.set_r_extern(external);
1917 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1918 relocs.push_back(reloc1);
1919 break;
1920
1921 case ld::Fixup::kindStoreARM64PCRelToGOT:
1922 reloc1.set_r_address(address);
1923 reloc1.set_r_symbolnum(symbolNum);
1924 reloc1.set_r_pcrel(true);
1925 reloc1.set_r_length(2);
1926 reloc1.set_r_extern(external);
1927 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1928 relocs.push_back(reloc1);
1929 break;
1930
1931 default:
1932 assert(0 && "need to handle arm64 -r reloc");
1933
1934 }
1935
1936 }
1937 #endif // SUPPORT_ARCH_arm64
1938
1939
1940 template <typename A>
1941 void SectionRelocationsAtom<A>::addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind kind,
1942 const ld::Atom* inAtom, uint32_t offsetInAtom,
1943 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1944 const ld::Atom* toTarget, uint64_t toAddend,
1945 const ld::Atom* fromTarget, uint64_t fromAddend)
1946 {
1947 Entry entry;
1948 entry.kind = kind;
1949 entry.toTargetUsesExternalReloc = toTargetUsesExternalReloc;
1950 entry.fromTargetUsesExternalReloc = fromTargetExternalReloc;
1951 entry.inAtom = inAtom;
1952 entry.offsetInAtom = offsetInAtom;
1953 entry.toTarget = toTarget;
1954 entry.toAddend = toAddend;
1955 entry.fromTarget = fromTarget;
1956 entry.fromAddend = fromAddend;
1957
1958 static ld::Internal::FinalSection* lastSection = NULL;
1959 static SectionAndEntries* lastSectionAndEntries = NULL;
1960
1961 if ( sect != lastSection ) {
1962 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1963 if ( sect == it->sect ) {
1964 lastSection = sect;
1965 lastSectionAndEntries = &*it;
1966 break;
1967 }
1968 }
1969 if ( sect != lastSection ) {
1970 SectionAndEntries tmp;
1971 tmp.sect = sect;
1972 _entriesBySection.push_back(tmp);
1973 lastSection = sect;
1974 lastSectionAndEntries = &_entriesBySection.back();
1975 }
1976 }
1977 lastSectionAndEntries->entries.push_back(entry);
1978 }
1979
1980 template <typename A>
1981 void SectionRelocationsAtom<A>::encode()
1982 {
1983 // convert each Entry record to one or two reloc records
1984 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1985 SectionAndEntries& se = *it;
1986 for(typename std::vector<Entry>::iterator eit=se.entries.begin(); eit != se.entries.end(); ++eit) {
1987 encodeSectionReloc(se.sect, *eit, se.relocs);
1988 }
1989 }
1990
1991 // update sections with start and count or relocs
1992 uint32_t index = 0;
1993 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1994 SectionAndEntries& se = *it;
1995 se.sect->relocStart = index;
1996 se.sect->relocCount = se.relocs.size();
1997 index += se.sect->relocCount;
1998 }
1999
2000 }
2001
2002
2003
2004 template <typename A>
2005 class IndirectSymbolTableAtom : public ClassicLinkEditAtom
2006 {
2007 public:
2008 IndirectSymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
2009 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)) { }
2010
2011 // overrides of ld::Atom
2012 virtual const char* name() const { return "indirect symbol table"; }
2013 virtual uint64_t size() const;
2014 virtual void copyRawContent(uint8_t buffer[]) const;
2015 // overrides of ClassicLinkEditAtom
2016 virtual void encode();
2017
2018 private:
2019 typedef typename A::P P;
2020 typedef typename A::P::E E;
2021 typedef typename A::P::uint_t pint_t;
2022
2023 void encodeStubSection(ld::Internal::FinalSection* sect);
2024 void encodeLazyPointerSection(ld::Internal::FinalSection* sect);
2025 void encodeNonLazyPointerSection(ld::Internal::FinalSection* sect);
2026 uint32_t symIndexOfStubAtom(const ld::Atom*);
2027 uint32_t symIndexOfLazyPointerAtom(const ld::Atom*);
2028 uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*);
2029 uint32_t symbolIndex(const ld::Atom*);
2030
2031
2032 std::vector<uint32_t> _entries;
2033
2034 static ld::Section _s_section;
2035 };
2036
2037 template <typename A>
2038 ld::Section IndirectSymbolTableAtom<A>::_s_section("__LINKEDIT", "__ind_sym_tab", ld::Section::typeLinkEdit, true);
2039
2040
2041
2042
2043 template <typename A>
2044 uint32_t IndirectSymbolTableAtom<A>::symbolIndex(const ld::Atom* atom)
2045 {
2046 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
2047 if ( pos != this->_writer._atomToSymbolIndex.end() )
2048 return pos->second;
2049 //fprintf(stderr, "_atomToSymbolIndex content:\n");
2050 //for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
2051 // fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
2052 //}
2053 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
2054 }
2055
2056 template <typename A>
2057 uint32_t IndirectSymbolTableAtom<A>::symIndexOfStubAtom(const ld::Atom* stubAtom)
2058 {
2059 for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) {
2060 if ( fit->binding == ld::Fixup::bindingDirectlyBound ) {
2061 ld::Atom::ContentType type = fit->u.target->contentType();
2062 if (( type == ld::Atom::typeLazyPointer) || (type == ld::Atom::typeLazyDylibPointer) )
2063 return symIndexOfLazyPointerAtom(fit->u.target);
2064 if ( type == ld::Atom::typeNonLazyPointer )
2065 return symIndexOfNonLazyPointerAtom(fit->u.target);
2066 }
2067 }
2068 throw "internal error: stub missing fixup to lazy pointer";
2069 }
2070
2071
2072 template <typename A>
2073 uint32_t IndirectSymbolTableAtom<A>::symIndexOfLazyPointerAtom(const ld::Atom* lpAtom)
2074 {
2075 for (ld::Fixup::iterator fit = lpAtom->fixupsBegin(); fit != lpAtom->fixupsEnd(); ++fit) {
2076 if ( fit->kind == ld::Fixup::kindLazyTarget ) {
2077 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
2078 return symbolIndex(fit->u.target);
2079 }
2080 }
2081 throw "internal error: lazy pointer missing fixupLazyTarget fixup";
2082 }
2083
2084 template <typename A>
2085 uint32_t IndirectSymbolTableAtom<A>::symIndexOfNonLazyPointerAtom(const ld::Atom* nlpAtom)
2086 {
2087 //fprintf(stderr, "symIndexOfNonLazyPointerAtom(%p) %s\n", nlpAtom, nlpAtom->name());
2088 for (ld::Fixup::iterator fit = nlpAtom->fixupsBegin(); fit != nlpAtom->fixupsEnd(); ++fit) {
2089 // non-lazy-pointer to a stripped symbol => no symbol index
2090 if ( fit->clusterSize != ld::Fixup::k1of1 )
2091 return INDIRECT_SYMBOL_LOCAL;
2092 const ld::Atom* target;
2093 switch ( fit->binding ) {
2094 case ld::Fixup::bindingDirectlyBound:
2095 target = fit->u.target;
2096 break;
2097 case ld::Fixup::bindingsIndirectlyBound:
2098 target = _state.indirectBindingTable[fit->u.bindingIndex];
2099 break;
2100 default:
2101 throw "internal error: unexpected non-lazy pointer binding";
2102 }
2103 bool targetIsGlobal = (target->scope() == ld::Atom::scopeGlobal);
2104 switch ( target->definition() ) {
2105 case ld::Atom::definitionRegular:
2106 if ( targetIsGlobal ) {
2107 if ( _options.outputKind() == Options::kObjectFile ) {
2108 // nlpointer to global symbol uses indirect symbol table in .o files
2109 return symbolIndex(target);
2110 }
2111 else if ( target->combine() == ld::Atom::combineByName ) {
2112 // dyld needs to bind nlpointer to global weak def
2113 return symbolIndex(target);
2114 }
2115 else if ( _options.nameSpace() != Options::kTwoLevelNameSpace ) {
2116 // dyld needs to bind nlpointer to global def linked for flat namespace
2117 return symbolIndex(target);
2118 }
2119 }
2120 break;
2121 case ld::Atom::definitionTentative:
2122 case ld::Atom::definitionAbsolute:
2123 if ( _options.outputKind() == Options::kObjectFile ) {
2124 // tentative def in .o file always uses symbol index
2125 return symbolIndex(target);
2126 }
2127 // dyld needs to bind nlpointer to global def linked for flat namespace
2128 if ( targetIsGlobal && _options.nameSpace() != Options::kTwoLevelNameSpace )
2129 return symbolIndex(target);
2130 break;
2131 case ld::Atom::definitionProxy:
2132 // dyld needs to bind nlpointer to something in another dylib
2133 {
2134 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target->file());
2135 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
2136 throwf("illegal data reference to %s in lazy loaded dylib %s", target->name(), dylib->path());
2137 }
2138 return symbolIndex(target);
2139 }
2140 }
2141 if ( nlpAtom->fixupsBegin() == nlpAtom->fixupsEnd() ) {
2142 // no fixups means this is the ImageLoader cache slot
2143 return INDIRECT_SYMBOL_ABS;
2144 }
2145
2146 // The magic index INDIRECT_SYMBOL_LOCAL tells dyld it should does not need to bind
2147 // this non-lazy pointer.
2148 return INDIRECT_SYMBOL_LOCAL;
2149 }
2150
2151
2152
2153 template <typename A>
2154 void IndirectSymbolTableAtom<A>::encodeStubSection(ld::Internal::FinalSection* sect)
2155 {
2156 sect->indirectSymTabStartIndex = _entries.size();
2157 sect->indirectSymTabElementSize = sect->atoms[0]->size();
2158 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2159 _entries.push_back(symIndexOfStubAtom(*ait));
2160 }
2161 }
2162
2163 template <typename A>
2164 void IndirectSymbolTableAtom<A>::encodeLazyPointerSection(ld::Internal::FinalSection* sect)
2165 {
2166 sect->indirectSymTabStartIndex = _entries.size();
2167 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2168 _entries.push_back(symIndexOfLazyPointerAtom(*ait));
2169 }
2170 }
2171
2172 template <typename A>
2173 void IndirectSymbolTableAtom<A>::encodeNonLazyPointerSection(ld::Internal::FinalSection* sect)
2174 {
2175 sect->indirectSymTabStartIndex = _entries.size();
2176 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2177 _entries.push_back(symIndexOfNonLazyPointerAtom(*ait));
2178 }
2179 }
2180
2181 template <typename A>
2182 void IndirectSymbolTableAtom<A>::encode()
2183 {
2184 // static executables should not have an indirect symbol table, unless PIE
2185 if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() )
2186 return;
2187
2188 // x86_64 kext bundles should not have an indirect symbol table unless using stubs
2189 if ( (this->_options.outputKind() == Options::kKextBundle) && !this->_options.kextsUseStubs() )
2190 return;
2191
2192 // slidable static executables (-static -pie) should not have an indirect symbol table
2193 if ( (this->_options.outputKind() == Options::kStaticExecutable) && this->_options.positionIndependentExecutable() )
2194 return;
2195
2196 // find all special sections that need a range of the indirect symbol table section
2197 for (std::vector<ld::Internal::FinalSection*>::iterator sit = this->_state.sections.begin(); sit != this->_state.sections.end(); ++sit) {
2198 ld::Internal::FinalSection* sect = *sit;
2199 switch ( sect->type() ) {
2200 case ld::Section::typeStub:
2201 case ld::Section::typeStubClose:
2202 this->encodeStubSection(sect);
2203 break;
2204 case ld::Section::typeLazyPointerClose:
2205 case ld::Section::typeLazyPointer:
2206 case ld::Section::typeLazyDylibPointer:
2207 this->encodeLazyPointerSection(sect);
2208 break;
2209 case ld::Section::typeNonLazyPointer:
2210 this->encodeNonLazyPointerSection(sect);
2211 break;
2212 default:
2213 break;
2214 }
2215 }
2216 }
2217
2218 template <typename A>
2219 uint64_t IndirectSymbolTableAtom<A>::size() const
2220 {
2221 return _entries.size() * sizeof(uint32_t);
2222 }
2223
2224 template <typename A>
2225 void IndirectSymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
2226 {
2227 uint32_t* array = (uint32_t*)buffer;
2228 for(unsigned long i=0; i < _entries.size(); ++i) {
2229 E::set32(array[i], _entries[i]);
2230 }
2231 }
2232
2233
2234
2235
2236
2237
2238
2239
2240 } // namespace tool
2241 } // namespace ld
2242
2243 #endif // __LINKEDIT_CLASSIC_HPP__