]> git.saurik.com Git - apple/ld64.git/blob - src/ld/LinkEditClassic.hpp
2eab13bfe85aa54fa8c1f3877c3aa12c0d3fbda5
[apple/ld64.git] / src / ld / LinkEditClassic.hpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-*
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #ifndef __LINKEDIT_CLASSIC_HPP__
26 #define __LINKEDIT_CLASSIC_HPP__
27
28 #include <stdlib.h>
29 #include <sys/types.h>
30 #include <errno.h>
31 #include <limits.h>
32 #include <unistd.h>
33
34 #include <vector>
35 #include <unordered_map>
36
37 #include "Options.h"
38 #include "ld.hpp"
39 #include "Architectures.hpp"
40 #include "MachOFileAbstraction.hpp"
41
42 namespace ld {
43 namespace tool {
44
45
46
47 class ClassicLinkEditAtom : public ld::Atom
48 {
49 public:
50
51 // overrides of ld::Atom
52 virtual ld::File* file() const { return NULL; }
53 virtual uint64_t objectAddress() const { return 0; }
54
55 virtual void encode() = 0;
56 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe) { return false; }
57
58 ClassicLinkEditAtom(const Options& opts, ld::Internal& state,
59 OutputFile& writer, const ld::Section& sect,
60 unsigned int pointerSize)
61 : ld::Atom(sect, ld::Atom::definitionRegular,
62 ld::Atom::combineNever, ld::Atom::scopeTranslationUnit,
63 ld::Atom::typeUnclassified, ld::Atom::symbolTableNotIn,
64 false, false, false, ld::Atom::Alignment(log2(pointerSize))),
65 _options(opts), _state(state), _writer(writer) { }
66 protected:
67 const Options& _options;
68 ld::Internal& _state;
69 OutputFile& _writer;
70 };
71
72
73
74 class StringPoolAtom : public ClassicLinkEditAtom
75 {
76 public:
77 StringPoolAtom(const Options& opts, ld::Internal& state,
78 OutputFile& writer, int pointerSize);
79
80 // overrides of ld::Atom
81 virtual const char* name() const { return "string pool"; }
82 virtual uint64_t size() const;
83 virtual void copyRawContent(uint8_t buffer[]) const;
84 // overrides of ClassicLinkEditAtom
85 virtual void encode() { }
86
87 int32_t add(const char* name);
88 int32_t addUnique(const char* name);
89 int32_t emptyString() { return 1; }
90 const char* stringForIndex(int32_t) const;
91 uint32_t currentOffset();
92
93 private:
94 enum { kBufferSize = 0x01000000 };
95 typedef std::unordered_map<const char*, int32_t, CStringHash, CStringEquals> StringToOffset;
96
97 const uint32_t _pointerSize;
98 std::vector<char*> _fullBuffers;
99 char* _currentBuffer;
100 uint32_t _currentBufferUsed;
101 StringToOffset _uniqueStrings;
102
103 static ld::Section _s_section;
104 };
105
106 ld::Section StringPoolAtom::_s_section("__LINKEDIT", "__string_pool", ld::Section::typeLinkEdit, true);
107
108
109 StringPoolAtom::StringPoolAtom(const Options& opts, ld::Internal& state, OutputFile& writer, int pointerSize)
110 : ClassicLinkEditAtom(opts, state, writer, _s_section, pointerSize),
111 _pointerSize(pointerSize), _currentBuffer(NULL), _currentBufferUsed(0)
112 {
113 _currentBuffer = new char[kBufferSize];
114 // burn first byte of string pool (so zero is never a valid string offset)
115 _currentBuffer[_currentBufferUsed++] = ' ';
116 // make offset 1 always point to an empty string
117 _currentBuffer[_currentBufferUsed++] = '\0';
118 }
119
120 uint64_t StringPoolAtom::size() const
121 {
122 // pointer size align size
123 return (kBufferSize * _fullBuffers.size() + _currentBufferUsed + _pointerSize-1) & (-_pointerSize);
124 }
125
126 void StringPoolAtom::copyRawContent(uint8_t buffer[]) const
127 {
128 uint64_t offset = 0;
129 for (unsigned int i=0; i < _fullBuffers.size(); ++i) {
130 memcpy(&buffer[offset], _fullBuffers[i], kBufferSize);
131 offset += kBufferSize;
132 }
133 memcpy(&buffer[offset], _currentBuffer, _currentBufferUsed);
134 // zero fill end to align
135 offset += _currentBufferUsed;
136 while ( (offset % _pointerSize) != 0 )
137 buffer[offset++] = 0;
138 }
139
140 int32_t StringPoolAtom::add(const char* str)
141 {
142 int32_t offset = kBufferSize * _fullBuffers.size() + _currentBufferUsed;
143 int lenNeeded = strlcpy(&_currentBuffer[_currentBufferUsed], str, kBufferSize-_currentBufferUsed)+1;
144 if ( (_currentBufferUsed+lenNeeded) < kBufferSize ) {
145 _currentBufferUsed += lenNeeded;
146 }
147 else {
148 int copied = kBufferSize-_currentBufferUsed-1;
149 // change trailing '\0' that strlcpy added to real char
150 _currentBuffer[kBufferSize-1] = str[copied];
151 // alloc next buffer
152 _fullBuffers.push_back(_currentBuffer);
153 _currentBuffer = new char[kBufferSize];
154 _currentBufferUsed = 0;
155 // append rest of string
156 this->add(&str[copied+1]);
157 }
158 return offset;
159 }
160
161 uint32_t StringPoolAtom::currentOffset()
162 {
163 return kBufferSize * _fullBuffers.size() + _currentBufferUsed;
164 }
165
166
167 int32_t StringPoolAtom::addUnique(const char* str)
168 {
169 StringToOffset::iterator pos = _uniqueStrings.find(str);
170 if ( pos != _uniqueStrings.end() ) {
171 return pos->second;
172 }
173 else {
174 int32_t offset = this->add(str);
175 _uniqueStrings[str] = offset;
176 return offset;
177 }
178 }
179
180
181 const char* StringPoolAtom::stringForIndex(int32_t index) const
182 {
183 int32_t currentBufferStartIndex = kBufferSize * _fullBuffers.size();
184 int32_t maxIndex = currentBufferStartIndex + _currentBufferUsed;
185 // check for out of bounds
186 if ( index > maxIndex )
187 return "";
188 // check for index in _currentBuffer
189 if ( index > currentBufferStartIndex )
190 return &_currentBuffer[index-currentBufferStartIndex];
191 // otherwise index is in a full buffer
192 uint32_t fullBufferIndex = index/kBufferSize;
193 return &_fullBuffers[fullBufferIndex][index-(kBufferSize*fullBufferIndex)];
194 }
195
196
197
198 template <typename A>
199 class SymbolTableAtom : public ClassicLinkEditAtom
200 {
201 public:
202 SymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
203 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)),
204 _stabsStringsOffsetStart(0), _stabsStringsOffsetEnd(0),
205 _stabsIndexStart(0), _stabsIndexEnd(0) { }
206
207 // overrides of ld::Atom
208 virtual const char* name() const { return "symbol table"; }
209 virtual uint64_t size() const;
210 virtual void copyRawContent(uint8_t buffer[]) const;
211 // overrides of ClassicLinkEditAtom
212 virtual void encode();
213 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe);
214
215 private:
216 typedef typename A::P P;
217 typedef typename A::P::E E;
218 typedef typename A::P::uint_t pint_t;
219
220 bool addLocal(const ld::Atom* atom, StringPoolAtom* pool);
221 void addGlobal(const ld::Atom* atom, StringPoolAtom* pool);
222 void addImport(const ld::Atom* atom, StringPoolAtom* pool);
223 uint8_t classicOrdinalForProxy(const ld::Atom* atom);
224 uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool);
225 uint64_t valueForStab(const ld::relocatable::File::Stab& stab);
226 uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab);
227 bool isAltEntry(const ld::Atom* atom);
228
229 mutable std::vector<macho_nlist<P> > _globals;
230 mutable std::vector<macho_nlist<P> > _locals;
231 mutable std::vector<macho_nlist<P> > _imports;
232
233 uint32_t _stabsStringsOffsetStart;
234 uint32_t _stabsStringsOffsetEnd;
235 uint32_t _stabsIndexStart;
236 uint32_t _stabsIndexEnd;
237
238 static ld::Section _s_section;
239 static int _s_anonNameIndex;
240
241 };
242
243 template <typename A>
244 ld::Section SymbolTableAtom<A>::_s_section("__LINKEDIT", "__symbol_table", ld::Section::typeLinkEdit, true);
245
246 template <typename A>
247 int SymbolTableAtom<A>::_s_anonNameIndex = 1;
248
249
250 template <typename A>
251 bool SymbolTableAtom<A>::isAltEntry(const ld::Atom* atom)
252 {
253 // alt entries have a group subordinate reference to the previous atom
254 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
255 if ( fit->kind == ld::Fixup::kindNoneGroupSubordinate ) {
256 if ( fit->binding == Fixup::bindingDirectlyBound ) {
257 const Atom* prevAtom = fit->u.target;
258 assert(prevAtom != NULL);
259 for (ld::Fixup::iterator fit2 = prevAtom->fixupsBegin(); fit2 != prevAtom->fixupsEnd(); ++fit2) {
260 if ( fit2->kind == ld::Fixup::kindNoneFollowOn ) {
261 if ( fit2->binding == Fixup::bindingDirectlyBound ) {
262 if ( fit2->u.target == atom )
263 return true;
264 }
265 }
266 }
267 }
268 }
269 }
270 return false;
271 }
272
273 template <typename A>
274 bool SymbolTableAtom<A>::addLocal(const ld::Atom* atom, StringPoolAtom* pool)
275 {
276 macho_nlist<P> entry;
277 assert(atom->symbolTableInclusion() != ld::Atom::symbolTableNotIn);
278
279 // set n_strx
280 const char* symbolName = atom->name();
281 char anonName[32];
282 if ( this->_options.outputKind() == Options::kObjectFile ) {
283 if ( atom->contentType() == ld::Atom::typeCString ) {
284 if ( atom->combine() == ld::Atom::combineByNameAndContent ) {
285 // don't use 'l' labels for x86_64 strings
286 // <rdar://problem/6605499> x86_64 obj-c runtime confused when static lib is stripped
287 sprintf(anonName, "LC%u", _s_anonNameIndex++);
288 symbolName = anonName;
289 }
290 }
291 else if ( atom->contentType() == ld::Atom::typeCFI ) {
292 if ( _options.removeEHLabels() )
293 return false;
294 // synthesize .eh name
295 if ( strcmp(atom->name(), "CIE") == 0 )
296 symbolName = "EH_Frame1";
297 else
298 symbolName = "func.eh";
299 }
300 else if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
301 // make auto-strip anonymous name for symbol
302 sprintf(anonName, "l%03u", _s_anonNameIndex++);
303 symbolName = anonName;
304 }
305 }
306 entry.set_n_strx(pool->add(symbolName));
307
308 // set n_type
309 uint8_t type = N_SECT;
310 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
311 type = N_ABS;
312 }
313 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
314 && (this->_options.outputKind() == Options::kObjectFile) ) {
315 // __OBJC __class has floating abs symbols for each class data structure
316 type = N_ABS;
317 }
318 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
319 type |= N_PEXT;
320 entry.set_n_type(type);
321
322 // set n_sect (section number of implementation )
323 if ( atom->definition() == ld::Atom::definitionAbsolute )
324 entry.set_n_sect(0);
325 else
326 entry.set_n_sect(atom->machoSection());
327
328 // set n_desc
329 uint16_t desc = 0;
330 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
331 desc |= REFERENCED_DYNAMICALLY;
332 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
333 desc |= N_NO_DEAD_STRIP;
334 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) )
335 desc |= N_WEAK_DEF;
336 if ( atom->isThumb() )
337 desc |= N_ARM_THUMB_DEF;
338 if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) )
339 desc |= N_ALT_ENTRY;
340 entry.set_n_desc(desc);
341
342 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
343 if ( atom->definition() == ld::Atom::definitionAbsolute )
344 entry.set_n_value(atom->objectAddress());
345 else
346 entry.set_n_value(atom->finalAddress());
347
348 // add to array
349 _locals.push_back(entry);
350 return true;
351 }
352
353
354 template <typename A>
355 void SymbolTableAtom<A>::addGlobal(const ld::Atom* atom, StringPoolAtom* pool)
356 {
357 macho_nlist<P> entry;
358
359 // set n_strx
360 const char* symbolName = atom->name();
361 char anonName[32];
362 if ( this->_options.outputKind() == Options::kObjectFile ) {
363 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
364 // make auto-strip anonymous name for symbol
365 sprintf(anonName, "l%03u", _s_anonNameIndex++);
366 symbolName = anonName;
367 }
368 }
369 entry.set_n_strx(pool->add(symbolName));
370
371 // set n_type
372 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
373 entry.set_n_type(N_EXT | N_ABS);
374 }
375 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
376 && (this->_options.outputKind() == Options::kObjectFile) ) {
377 // __OBJC __class has floating abs symbols for each class data structure
378 entry.set_n_type(N_EXT | N_ABS);
379 }
380 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
381 entry.set_n_type(N_EXT | N_INDR);
382 }
383 else {
384 entry.set_n_type(N_EXT | N_SECT);
385 if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (this->_options.outputKind() == Options::kObjectFile) ) {
386 if ( this->_options.keepPrivateExterns() )
387 entry.set_n_type(N_EXT | N_SECT | N_PEXT);
388 }
389 else if ( (atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)
390 && (atom->section().type() == ld::Section::typeMachHeader)
391 && !_options.positionIndependentExecutable() ) {
392 // the __mh_execute_header is historical magic in non-pie executabls and must be an absolute symbol
393 entry.set_n_type(N_EXT | N_ABS);
394 }
395 }
396
397 // set n_sect (section number of implementation)
398 if ( atom->definition() == ld::Atom::definitionAbsolute )
399 entry.set_n_sect(0);
400 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) )
401 entry.set_n_sect(0);
402 else
403 entry.set_n_sect(atom->machoSection());
404
405 // set n_desc
406 uint16_t desc = 0;
407 if ( atom->isThumb() )
408 desc |= N_ARM_THUMB_DEF;
409 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
410 desc |= REFERENCED_DYNAMICALLY;
411 if ( (atom->contentType() == ld::Atom::typeResolver) && (this->_options.outputKind() == Options::kObjectFile) )
412 desc |= N_SYMBOL_RESOLVER;
413 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
414 desc |= N_NO_DEAD_STRIP;
415 if ( (this->_options.outputKind() == Options::kObjectFile) && this->_state.allObjectFilesScatterable && isAltEntry(atom) )
416 desc |= N_ALT_ENTRY;
417 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) {
418 desc |= N_WEAK_DEF;
419 // <rdar://problem/6783167> support auto hidden weak symbols: .weak_def_can_be_hidden
420 if ( (atom->scope() == ld::Atom::scopeGlobal) && atom->autoHide() && (this->_options.outputKind() == Options::kObjectFile) )
421 desc |= N_WEAK_REF;
422 }
423 entry.set_n_desc(desc);
424
425 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
426 if ( atom->definition() == ld::Atom::definitionAbsolute )
427 entry.set_n_value(atom->objectAddress());
428 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
429 if ( atom->isAlias() ) {
430 // this re-export also renames
431 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
432 if ( fit->kind == ld::Fixup::kindNoneFollowOn ) {
433 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
434 entry.set_n_value(pool->add(fit->u.target->name()));
435 }
436 }
437 }
438 else
439 entry.set_n_value(entry.n_strx());
440 }
441 else
442 entry.set_n_value(atom->finalAddress());
443
444 // add to array
445 _globals.push_back(entry);
446 }
447
448 template <typename A>
449 uint8_t SymbolTableAtom<A>::classicOrdinalForProxy(const ld::Atom* atom)
450 {
451 assert(atom->definition() == ld::Atom::definitionProxy);
452 // when linking for flat-namespace ordinals are always zero
453 if ( _options.nameSpace() != Options::kTwoLevelNameSpace )
454 return 0;
455 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
456 // when linking -undefined dynamic_lookup, unbound symbols use DYNAMIC_LOOKUP_ORDINAL
457 if ( dylib == NULL ) {
458 if (_options.undefinedTreatment() == Options::kUndefinedDynamicLookup )
459 return DYNAMIC_LOOKUP_ORDINAL;
460 if (_options.allowedUndefined(atom->name()) )
461 return DYNAMIC_LOOKUP_ORDINAL;
462 }
463 assert(dylib != NULL);
464 int ord = this->_writer.dylibToOrdinal(dylib);
465 if ( ord == BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE )
466 return EXECUTABLE_ORDINAL;
467 return ord;
468 }
469
470
471 template <typename A>
472 void SymbolTableAtom<A>::addImport(const ld::Atom* atom, StringPoolAtom* pool)
473 {
474 macho_nlist<P> entry;
475
476 // set n_strx
477 entry.set_n_strx(pool->add(atom->name()));
478
479 // set n_type
480 if ( this->_options.outputKind() == Options::kObjectFile ) {
481 if ( atom->section().type() == ld::Section::typeTempAlias ) {
482 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
483 entry.set_n_type(N_INDR | N_EXT | N_PEXT);
484 else
485 entry.set_n_type(N_INDR | N_EXT);
486 }
487 else if ( (atom->scope() == ld::Atom::scopeLinkageUnit)
488 && (atom->definition() == ld::Atom::definitionTentative) )
489 entry.set_n_type(N_UNDF | N_EXT | N_PEXT);
490 else
491 entry.set_n_type(N_UNDF | N_EXT);
492 }
493 else {
494 if ( this->_options.prebind() )
495 entry.set_n_type(N_PBUD | N_EXT);
496 else
497 entry.set_n_type(N_UNDF | N_EXT);
498 }
499
500 // set n_sect
501 entry.set_n_sect(0);
502
503 uint16_t desc = 0;
504 if ( this->_options.outputKind() != Options::kObjectFile ) {
505 uint8_t ordinal = this->classicOrdinalForProxy(atom);
506 //fprintf(stderr, "ordinal=%u from reader=%p for symbol=%s\n", ordinal, atom->getFile(), atom->getName());
507 SET_LIBRARY_ORDINAL(desc, ordinal);
508
509 #if 0
510 // set n_desc ( high byte is library ordinal, low byte is reference type )
511 std::map<const ObjectFile::Atom*,ObjectFile::Atom*>::iterator pos = fStubsMap.find(atom);
512 if ( pos != fStubsMap.end() || ( strncmp(atom->getName(), ".objc_class_name_", 17) == 0) )
513 desc |= REFERENCE_FLAG_UNDEFINED_LAZY;
514 else
515 desc |= REFERENCE_FLAG_UNDEFINED_NON_LAZY;
516 #endif
517 }
518 else if ( atom->definition() == ld::Atom::definitionTentative ) {
519 uint8_t align = atom->alignment().powerOf2;
520 // always record custom alignment of common symbols to match what compiler does
521 SET_COMM_ALIGN(desc, align);
522 }
523 if ( (this->_options.outputKind() != Options::kObjectFile)
524 && (atom->definition() == ld::Atom::definitionProxy)
525 && (atom->combine() == ld::Atom::combineByName) ) {
526 desc |= N_REF_TO_WEAK;
527 }
528 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
529 if ( atom->weakImported() || ((dylib != NULL) && dylib->forcedWeakLinked()) )
530 desc |= N_WEAK_REF;
531 entry.set_n_desc(desc);
532
533 // set n_value, zero for import proxy and size for tentative definition
534 if ( atom->definition() == ld::Atom::definitionTentative )
535 entry.set_n_value(atom->size());
536 else if ( atom->section().type() != ld::Section::typeTempAlias )
537 entry.set_n_value(0);
538 else {
539 assert(atom->fixupsBegin() != atom->fixupsEnd());
540 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
541 assert(fit->kind == ld::Fixup::kindNoneFollowOn);
542 switch ( fit->binding ) {
543 case ld::Fixup::bindingByNameUnbound:
544 entry.set_n_value(pool->add(fit->u.name));
545 break;
546 case ld::Fixup::bindingsIndirectlyBound:
547 entry.set_n_value(pool->add((_state.indirectBindingTable[fit->u.bindingIndex])->name()));
548 break;
549 default:
550 assert(0 && "internal error: unexpected alias binding");
551 }
552 }
553 }
554
555 // add to array
556 _imports.push_back(entry);
557 }
558
559 template <typename A>
560 uint8_t SymbolTableAtom<A>::sectionIndexForStab(const ld::relocatable::File::Stab& stab)
561 {
562 // in FUN stabs, n_sect field is 0 for start FUN and 1 for end FUN
563 if ( stab.type == N_FUN )
564 return stab.other;
565 else if ( stab.type == N_GSYM )
566 return 0;
567 else if ( stab.atom != NULL )
568 return stab.atom->machoSection();
569 else
570 return stab.other;
571 }
572
573
574 template <typename A>
575 uint64_t SymbolTableAtom<A>::valueForStab(const ld::relocatable::File::Stab& stab)
576 {
577 switch ( stab.type ) {
578 case N_FUN:
579 if ( stab.atom == NULL ) {
580 // <rdar://problem/5591394> Add support to ld64 for N_FUN stabs when used for symbolic constants
581 return stab.value;
582 }
583 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
584 // end of function N_FUN has size
585 return stab.atom->size();
586 }
587 else {
588 // start of function N_FUN has address
589 return stab.atom->finalAddress();
590 }
591 case N_LBRAC:
592 case N_RBRAC:
593 case N_SLINE:
594 if ( stab.atom == NULL )
595 // some weird assembly files have slines not associated with a function
596 return stab.value;
597 else
598 // all these stab types need their value changed from an offset in the atom to an address
599 return stab.atom->finalAddress() + stab.value;
600 case N_STSYM:
601 case N_LCSYM:
602 case N_BNSYM:
603 // all these need address of atom
604 if ( stab.atom != NULL )
605 return stab.atom->finalAddress();
606 else
607 return 0; // <rdar://problem/7811357> work around for mismatch N_BNSYM
608 case N_ENSYM:
609 return stab.atom->size();
610 case N_SO:
611 if ( stab.atom == NULL ) {
612 return 0;
613 }
614 else {
615 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
616 // end of translation unit N_SO has address of end of last atom
617 return stab.atom->finalAddress() + stab.atom->size();
618 }
619 else {
620 // start of translation unit N_SO has address of end of first atom
621 return stab.atom->finalAddress();
622 }
623 }
624 break;
625 default:
626 return stab.value;
627 }
628 }
629
630 template <typename A>
631 uint32_t SymbolTableAtom<A>::stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool)
632 {
633 switch (stab.type) {
634 case N_SO:
635 if ( (stab.string == NULL) || stab.string[0] == '\0' ) {
636 return pool->emptyString();
637 break;
638 }
639 // fall into uniquing case
640 case N_SOL:
641 case N_BINCL:
642 case N_EXCL:
643 return pool->addUnique(stab.string);
644 break;
645 default:
646 if ( stab.string == NULL )
647 return 0;
648 else if ( stab.string[0] == '\0' )
649 return pool->emptyString();
650 else
651 return pool->add(stab.string);
652 }
653 return 0;
654 }
655
656
657
658 template <typename A>
659 bool SymbolTableAtom<A>::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe)
660 {
661 ssos = _stabsStringsOffsetStart;
662 ssoe = _stabsStringsOffsetEnd;
663 sos = _stabsIndexStart * sizeof(macho_nlist<P>);
664 soe = _stabsIndexEnd * sizeof(macho_nlist<P>);
665 return ( (_stabsIndexStart != _stabsIndexEnd) || (_stabsStringsOffsetStart != _stabsStringsOffsetEnd) );
666 }
667
668
669 template <typename A>
670 void SymbolTableAtom<A>::encode()
671 {
672 uint32_t symbolIndex = 0;
673
674 // make nlist entries for all local symbols
675 std::vector<const ld::Atom*>& localAtoms = this->_writer._localAtoms;
676 std::vector<const ld::Atom*>& globalAtoms = this->_writer._exportedAtoms;
677 _locals.reserve(localAtoms.size()+this->_state.stabs.size());
678 this->_writer._localSymbolsStartIndex = 0;
679 // make nlist entries for all debug notes
680 _stabsIndexStart = symbolIndex;
681 _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset();
682 for (std::vector<ld::relocatable::File::Stab>::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) {
683 macho_nlist<P> entry;
684 entry.set_n_type(sit->type);
685 entry.set_n_sect(sectionIndexForStab(*sit));
686 entry.set_n_desc(sit->desc);
687 entry.set_n_value(valueForStab(*sit));
688 entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom));
689 _locals.push_back(entry);
690 ++symbolIndex;
691 }
692 _stabsIndexEnd = symbolIndex;
693 _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset();
694 for (std::vector<const ld::Atom*>::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) {
695 const ld::Atom* atom = *it;
696 if ( this->addLocal(atom, this->_writer._stringPoolAtom) )
697 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
698 }
699 this->_writer._localSymbolsCount = symbolIndex;
700
701
702 // make nlist entries for all global symbols
703 _globals.reserve(globalAtoms.size());
704 this->_writer._globalSymbolsStartIndex = symbolIndex;
705 for (std::vector<const ld::Atom*>::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) {
706 const ld::Atom* atom = *it;
707 this->addGlobal(atom, this->_writer._stringPoolAtom);
708 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
709 }
710 this->_writer._globalSymbolsCount = symbolIndex - this->_writer._globalSymbolsStartIndex;
711
712 // make nlist entries for all undefined (imported) symbols
713 std::vector<const ld::Atom*>& importAtoms = this->_writer._importedAtoms;
714 _imports.reserve(importAtoms.size());
715 this->_writer._importSymbolsStartIndex = symbolIndex;
716 for (std::vector<const ld::Atom*>::const_iterator it=importAtoms.begin(); it != importAtoms.end(); ++it) {
717 this->addImport(*it, this->_writer._stringPoolAtom);
718 this->_writer._atomToSymbolIndex[*it] = symbolIndex++;
719 }
720 this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex;
721 }
722
723 template <typename A>
724 uint64_t SymbolTableAtom<A>::size() const
725 {
726 return sizeof(macho_nlist<P>) * (_locals.size() + _globals.size() + _imports.size());
727 }
728
729 template <typename A>
730 void SymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
731 {
732 memcpy(&buffer[this->_writer._localSymbolsStartIndex*sizeof(macho_nlist<P>)], &_locals[0],
733 this->_writer._localSymbolsCount*sizeof(macho_nlist<P>));
734 memcpy(&buffer[this->_writer._globalSymbolsStartIndex*sizeof(macho_nlist<P>)], &_globals[0],
735 this->_writer._globalSymbolsCount*sizeof(macho_nlist<P>));
736 memcpy(&buffer[this->_writer._importSymbolsStartIndex *sizeof(macho_nlist<P>)], &_imports[0],
737 this->_writer._importSymbolsCount*sizeof(macho_nlist<P>));
738 }
739
740
741
742
743 class RelocationsAtomAbstract : public ClassicLinkEditAtom
744 {
745 public:
746 RelocationsAtomAbstract(const Options& opts, ld::Internal& state,
747 OutputFile& writer, const ld::Section& sect,
748 unsigned int pointerSize)
749 : ClassicLinkEditAtom(opts, state, writer, sect, pointerSize) { }
750
751 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) = 0;
752 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) = 0;
753 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) = 0;
754 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) = 0;
755 virtual uint64_t relocBaseAddress(ld::Internal& state) = 0;
756 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
757 const ld::Atom* inAtom, uint32_t offsetInAtom,
758 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
759 const ld::Atom* toTarget, uint64_t toAddend,
760 const ld::Atom* fromTarget, uint64_t fromAddend) = 0;
761 protected:
762 uint32_t symbolIndex(const ld::Atom* atom) const;
763
764 };
765
766
767
768 uint32_t RelocationsAtomAbstract::symbolIndex(const ld::Atom* atom) const
769 {
770 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
771 if ( pos != this->_writer._atomToSymbolIndex.end() )
772 return pos->second;
773 fprintf(stderr, "_atomToSymbolIndex content:\n");
774 for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
775 fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
776 }
777 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
778 }
779
780
781 template <typename A>
782 class LocalRelocationsAtom : public RelocationsAtomAbstract
783 {
784 public:
785 LocalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
786 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
787
788 // overrides of ld::Atom
789 virtual const char* name() const { return "local relocations"; }
790 virtual uint64_t size() const;
791 virtual void copyRawContent(uint8_t buffer[]) const;
792 // overrides of ClassicLinkEditAtom
793 virtual void encode() {}
794 // overrides of RelocationsAtomAbstract
795 virtual void addPointerReloc(uint64_t addr, uint32_t symNum);
796 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
797 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
798 virtual uint64_t relocBaseAddress(ld::Internal& state);
799 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum);
800 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
801 const ld::Atom* inAtom, uint32_t offsetInAtom,
802 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
803 const ld::Atom* toTarget, uint64_t toAddend,
804 const ld::Atom* fromTarget, uint64_t fromAddend) { }
805
806 private:
807 typedef typename A::P P;
808 typedef typename A::P::E E;
809 typedef typename A::P::uint_t pint_t;
810
811 std::vector<macho_relocation_info<P> > _relocs;
812
813 static ld::Section _s_section;
814 };
815
816 template <typename A>
817 ld::Section LocalRelocationsAtom<A>::_s_section("__LINKEDIT", "__local_relocs", ld::Section::typeLinkEdit, true);
818
819
820 template <>
821 uint64_t LocalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
822 {
823 if ( _options.outputKind() == Options::kKextBundle ) {
824 // for kext bundles the reloc base address starts at __TEXT segment
825 return _options.baseAddress();
826 }
827 // for all other kinds, the x86_64 reloc base address starts at first writable segment (usually __DATA)
828 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
829 ld::Internal::FinalSection* sect = *sit;
830 if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE )
831 return sect->address;
832 }
833 throw "writable (__DATA) segment not found";
834 }
835
836 template <typename A>
837 uint64_t LocalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
838 {
839 return _options.baseAddress();
840 }
841
842 template <typename A>
843 void LocalRelocationsAtom<A>::addPointerReloc(uint64_t addr, uint32_t symNum)
844 {
845 macho_relocation_info<P> reloc;
846 reloc.set_r_address(addr);
847 reloc.set_r_symbolnum(symNum);
848 reloc.set_r_pcrel(false);
849 reloc.set_r_length();
850 reloc.set_r_extern(false);
851 reloc.set_r_type(GENERIC_RELOC_VANILLA);
852 _relocs.push_back(reloc);
853 }
854
855 template <typename A>
856 void LocalRelocationsAtom<A>::addTextReloc(uint64_t addr, ld::Fixup::Kind kind, uint64_t targetAddr, uint32_t symNum)
857 {
858 }
859
860
861 template <typename A>
862 uint64_t LocalRelocationsAtom<A>::size() const
863 {
864 return _relocs.size() * sizeof(macho_relocation_info<P>);
865 }
866
867 template <typename A>
868 void LocalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
869 {
870 memcpy(buffer, &_relocs[0], _relocs.size()*sizeof(macho_relocation_info<P>));
871 }
872
873
874
875
876
877
878 template <typename A>
879 class ExternalRelocationsAtom : public RelocationsAtomAbstract
880 {
881 public:
882 ExternalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
883 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
884
885 // overrides of ld::Atom
886 virtual const char* name() const { return "external relocations"; }
887 virtual uint64_t size() const;
888 virtual void copyRawContent(uint8_t buffer[]) const;
889 // overrides of ClassicLinkEditAtom
890 virtual void encode() {}
891 // overrides of RelocationsAtomAbstract
892 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
893 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
894 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*);
895 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*);
896 virtual uint64_t relocBaseAddress(ld::Internal& state);
897 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
898 const ld::Atom* inAtom, uint32_t offsetInAtom,
899 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
900 const ld::Atom* toTarget, uint64_t toAddend,
901 const ld::Atom* fromTarget, uint64_t fromAddend) { }
902
903
904 private:
905 typedef typename A::P P;
906 typedef typename A::P::E E;
907 typedef typename A::P::uint_t pint_t;
908
909 struct LocAndAtom {
910 LocAndAtom(uint64_t l, const ld::Atom* a) : loc(l), atom(a), symbolIndex(0) {}
911
912 uint64_t loc;
913 const ld::Atom* atom;
914 uint32_t symbolIndex;
915
916 bool operator<(const LocAndAtom& rhs) const {
917 // sort first by symbol number
918 if ( this->symbolIndex != rhs.symbolIndex )
919 return (this->symbolIndex < rhs.symbolIndex);
920 // then sort all uses of the same symbol by address
921 return (this->loc < rhs.loc);
922 }
923
924 };
925
926 static uint32_t pointerReloc();
927 static uint32_t callReloc();
928
929 mutable std::vector<LocAndAtom> _pointerLocations;
930 mutable std::vector<LocAndAtom> _callSiteLocations;
931
932 static ld::Section _s_section;
933 };
934
935 template <typename A>
936 ld::Section ExternalRelocationsAtom<A>::_s_section("__LINKEDIT", "__extrn_relocs", ld::Section::typeLinkEdit, true);
937
938 template <>
939 uint64_t ExternalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
940 {
941 // for x86_64 the reloc base address starts at __DATA segment
942 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
943 ld::Internal::FinalSection* sect = *sit;
944 if ( !sect->isSectionHidden() && _options.initialSegProtection(sect->segmentName()) & VM_PROT_WRITE )
945 return sect->address;
946 }
947 throw "writable (__DATA) segment not found";
948 }
949
950 template <typename A>
951 uint64_t ExternalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
952 {
953 return 0;
954 }
955
956 template <typename A>
957 void ExternalRelocationsAtom<A>::addExternalPointerReloc(uint64_t addr, const ld::Atom* target)
958 {
959 _pointerLocations.push_back(LocAndAtom(addr, target));
960 }
961
962 template <typename A>
963 void ExternalRelocationsAtom<A>::addExternalCallSiteReloc(uint64_t addr, const ld::Atom* target)
964 {
965 _callSiteLocations.push_back(LocAndAtom(addr, target));
966 }
967
968
969 template <typename A>
970 uint64_t ExternalRelocationsAtom<A>::size() const
971 {
972 if ( _options.outputKind() == Options::kStaticExecutable ) {
973 assert(_pointerLocations.size() == 0);
974 assert(_callSiteLocations.size() == 0);
975 }
976 return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info<P>);
977 }
978
979 #if SUPPORT_ARCH_arm64
980 template <> uint32_t ExternalRelocationsAtom<arm64>::pointerReloc() { return ARM64_RELOC_UNSIGNED; }
981 #endif
982 #if SUPPORT_ARCH_arm_any
983 template <> uint32_t ExternalRelocationsAtom<arm>::pointerReloc() { return ARM_RELOC_VANILLA; }
984 #endif
985 template <> uint32_t ExternalRelocationsAtom<x86>::pointerReloc() { return GENERIC_RELOC_VANILLA; }
986 template <> uint32_t ExternalRelocationsAtom<x86_64>::pointerReloc() { return X86_64_RELOC_UNSIGNED; }
987
988
989 template <> uint32_t ExternalRelocationsAtom<x86_64>::callReloc() { return X86_64_RELOC_BRANCH; }
990 template <> uint32_t ExternalRelocationsAtom<x86>::callReloc() { return GENERIC_RELOC_VANILLA; }
991 #if SUPPORT_ARCH_arm64
992 template <> uint32_t ExternalRelocationsAtom<arm64>::callReloc() { return ARM64_RELOC_BRANCH26; }
993 #endif
994
995 template <typename A>
996 uint32_t ExternalRelocationsAtom<A>::callReloc()
997 {
998 assert(0 && "external call relocs not implemented");
999 return 0;
1000 }
1001
1002
1003 template <typename A>
1004 void ExternalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1005 {
1006 macho_relocation_info<P>* r = (macho_relocation_info<P>*)buffer;
1007
1008 // assign symbol index, now that symbol table is built
1009 for (typename std::vector<LocAndAtom>::iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it) {
1010 it->symbolIndex = symbolIndex(it->atom);
1011 }
1012 std::sort(_pointerLocations.begin(), _pointerLocations.end());
1013 for (typename std::vector<LocAndAtom>::const_iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it, ++r) {
1014 r->set_r_address(it->loc);
1015 r->set_r_symbolnum(it->symbolIndex);
1016 r->set_r_pcrel(false);
1017 r->set_r_length();
1018 r->set_r_extern(true);
1019 r->set_r_type(this->pointerReloc());
1020 }
1021
1022 for (typename std::vector<LocAndAtom>::iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it) {
1023 it->symbolIndex = symbolIndex(it->atom);
1024 }
1025 std::sort(_callSiteLocations.begin(), _callSiteLocations.end());
1026 for (typename std::vector<LocAndAtom>::const_iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it, ++r) {
1027 r->set_r_address(it->loc);
1028 r->set_r_symbolnum(it->symbolIndex);
1029 r->set_r_pcrel(true);
1030 r->set_r_length(2);
1031 r->set_r_extern(true);
1032 r->set_r_type(this->callReloc());
1033 }
1034 }
1035
1036
1037 template <typename A>
1038 class SectionRelocationsAtom : public RelocationsAtomAbstract
1039 {
1040 public:
1041 SectionRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1042 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
1043
1044 // overrides of ld::Atom
1045 virtual const char* name() const { return "section relocations"; }
1046 virtual uint64_t size() const;
1047 virtual void copyRawContent(uint8_t buffer[]) const;
1048 // overrides of ClassicLinkEditAtom
1049 virtual void encode();
1050 // overrides of RelocationsAtomAbstract
1051 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
1052 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
1053 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
1054 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
1055 virtual uint64_t relocBaseAddress(ld::Internal& state) { return 0; }
1056 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
1057 const ld::Atom* inAtom, uint32_t offsetInAtom,
1058 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1059 const ld::Atom* toTarget, uint64_t toAddend,
1060 const ld::Atom* fromTarget, uint64_t fromAddend);
1061
1062 private:
1063 typedef typename A::P P;
1064 typedef typename A::P::E E;
1065 typedef typename A::P::uint_t pint_t;
1066
1067
1068 struct Entry {
1069 ld::Fixup::Kind kind;
1070 bool toTargetUsesExternalReloc;
1071 bool fromTargetUsesExternalReloc;
1072 const ld::Atom* inAtom;
1073 uint32_t offsetInAtom;
1074 const ld::Atom* toTarget;
1075 uint64_t toAddend;
1076 const ld::Atom* fromTarget;
1077 uint64_t fromAddend;
1078 };
1079 uint32_t sectSymNum(bool external, const ld::Atom* target);
1080 void encodeSectionReloc(ld::Internal::FinalSection* sect,
1081 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs);
1082
1083 struct SectionAndEntries {
1084 ld::Internal::FinalSection* sect;
1085 std::vector<Entry> entries;
1086 std::vector<macho_relocation_info<P> > relocs;
1087 };
1088
1089 std::vector<SectionAndEntries> _entriesBySection;
1090
1091 static ld::Section _s_section;
1092 };
1093
1094 template <typename A>
1095 ld::Section SectionRelocationsAtom<A>::_s_section("__LINKEDIT", "__sect_relocs", ld::Section::typeLinkEdit, true);
1096
1097
1098
1099
1100 template <typename A>
1101 uint64_t SectionRelocationsAtom<A>::size() const
1102 {
1103 uint32_t count = 0;
1104 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1105 const SectionAndEntries& se = *it;
1106 count += se.relocs.size();
1107 }
1108 return count * sizeof(macho_relocation_info<P>);
1109 }
1110
1111 template <typename A>
1112 void SectionRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1113 {
1114 uint32_t offset = 0;
1115 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1116 const SectionAndEntries& se = *it;
1117 memcpy(&buffer[offset], &se.relocs[0], se.relocs.size()*sizeof(macho_relocation_info<P>));
1118 offset += (se.relocs.size() * sizeof(macho_relocation_info<P>));
1119 }
1120 }
1121
1122
1123 template <>
1124 void SectionRelocationsAtom<x86_64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1125 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1126 {
1127 macho_relocation_info<P> reloc1;
1128 macho_relocation_info<P> reloc2;
1129 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1130 bool external = entry.toTargetUsesExternalReloc;
1131 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1132 bool fromExternal = false;
1133 uint32_t fromSymbolNum = 0;
1134 if ( entry.fromTarget != NULL ) {
1135 fromExternal = entry.fromTargetUsesExternalReloc;
1136 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1137 }
1138
1139
1140 switch ( entry.kind ) {
1141 case ld::Fixup::kindStoreX86BranchPCRel32:
1142 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1143 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1144 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1145 reloc1.set_r_address(address);
1146 reloc1.set_r_symbolnum(symbolNum);
1147 reloc1.set_r_pcrel(true);
1148 reloc1.set_r_length(2);
1149 reloc1.set_r_extern(external);
1150 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1151 relocs.push_back(reloc1);
1152 break;
1153
1154 case ld::Fixup::kindStoreX86BranchPCRel8:
1155 reloc1.set_r_address(address);
1156 reloc1.set_r_symbolnum(symbolNum);
1157 reloc1.set_r_pcrel(true);
1158 reloc1.set_r_length(0);
1159 reloc1.set_r_extern(external);
1160 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1161 relocs.push_back(reloc1);
1162 break;
1163
1164 case ld::Fixup::kindStoreX86PCRel32:
1165 case ld::Fixup::kindStoreTargetAddressX86PCRel32:
1166 reloc1.set_r_address(address);
1167 reloc1.set_r_symbolnum(symbolNum);
1168 reloc1.set_r_pcrel(true);
1169 reloc1.set_r_length(2);
1170 reloc1.set_r_extern(external);
1171 reloc1.set_r_type(X86_64_RELOC_SIGNED);
1172 relocs.push_back(reloc1);
1173 break;
1174
1175 case ld::Fixup::kindStoreX86PCRel32_1:
1176 reloc1.set_r_address(address);
1177 reloc1.set_r_symbolnum(symbolNum);
1178 reloc1.set_r_pcrel(true);
1179 reloc1.set_r_length(2);
1180 reloc1.set_r_extern(external);
1181 reloc1.set_r_type(X86_64_RELOC_SIGNED_1);
1182 relocs.push_back(reloc1);
1183 break;
1184
1185 case ld::Fixup::kindStoreX86PCRel32_2:
1186 reloc1.set_r_address(address);
1187 reloc1.set_r_symbolnum(symbolNum);
1188 reloc1.set_r_pcrel(true);
1189 reloc1.set_r_length(2);
1190 reloc1.set_r_extern(external);
1191 reloc1.set_r_type(X86_64_RELOC_SIGNED_2);
1192 relocs.push_back(reloc1);
1193 break;
1194
1195 case ld::Fixup::kindStoreX86PCRel32_4:
1196 reloc1.set_r_address(address);
1197 reloc1.set_r_symbolnum(symbolNum);
1198 reloc1.set_r_pcrel(true);
1199 reloc1.set_r_length(2);
1200 reloc1.set_r_extern(external);
1201 reloc1.set_r_type(X86_64_RELOC_SIGNED_4);
1202 relocs.push_back(reloc1);
1203 break;
1204
1205 case ld::Fixup::kindStoreX86PCRel32GOTLoad:
1206 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
1207 reloc1.set_r_address(address);
1208 reloc1.set_r_symbolnum(symbolNum);
1209 reloc1.set_r_pcrel(true);
1210 reloc1.set_r_length(2);
1211 reloc1.set_r_extern(external);
1212 reloc1.set_r_type(X86_64_RELOC_GOT_LOAD);
1213 relocs.push_back(reloc1);
1214 break;
1215
1216 case ld::Fixup::kindStoreX86PCRel32GOT:
1217 reloc1.set_r_address(address);
1218 reloc1.set_r_symbolnum(symbolNum);
1219 reloc1.set_r_pcrel(true);
1220 reloc1.set_r_length(2);
1221 reloc1.set_r_extern(external);
1222 reloc1.set_r_type(X86_64_RELOC_GOT);
1223 relocs.push_back(reloc1);
1224 break;
1225
1226 case ld::Fixup::kindStoreLittleEndian64:
1227 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1228 if ( entry.fromTarget != NULL ) {
1229 // this is a pointer-diff
1230 reloc1.set_r_address(address);
1231 reloc1.set_r_symbolnum(symbolNum);
1232 reloc1.set_r_pcrel(false);
1233 reloc1.set_r_length(3);
1234 reloc1.set_r_extern(external);
1235 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1236 reloc2.set_r_address(address);
1237 reloc2.set_r_symbolnum(fromSymbolNum);
1238 reloc2.set_r_pcrel(false);
1239 reloc2.set_r_length(3);
1240 reloc2.set_r_extern(fromExternal);
1241 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1242 relocs.push_back(reloc2);
1243 relocs.push_back(reloc1);
1244 }
1245 else {
1246 // regular pointer
1247 reloc1.set_r_address(address);
1248 reloc1.set_r_symbolnum(symbolNum);
1249 reloc1.set_r_pcrel(false);
1250 reloc1.set_r_length(3);
1251 reloc1.set_r_extern(external);
1252 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1253 relocs.push_back(reloc1);
1254 }
1255 break;
1256
1257 case ld::Fixup::kindStoreLittleEndian32:
1258 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1259 if ( entry.fromTarget != NULL ) {
1260 // this is a pointer-diff
1261 reloc1.set_r_address(address);
1262 reloc1.set_r_symbolnum(symbolNum);
1263 reloc1.set_r_pcrel(false);
1264 reloc1.set_r_length(2);
1265 reloc1.set_r_extern(external);
1266 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1267 reloc2.set_r_address(address);
1268 reloc2.set_r_symbolnum(fromSymbolNum);
1269 reloc2.set_r_pcrel(false);
1270 reloc2.set_r_length(2);
1271 reloc2.set_r_extern(fromExternal);
1272 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1273 relocs.push_back(reloc2);
1274 relocs.push_back(reloc1);
1275 }
1276 else {
1277 // regular pointer
1278 reloc1.set_r_address(address);
1279 reloc1.set_r_symbolnum(symbolNum);
1280 reloc1.set_r_pcrel(false);
1281 reloc1.set_r_length(2);
1282 reloc1.set_r_extern(external);
1283 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1284 relocs.push_back(reloc1);
1285 }
1286 break;
1287 case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad:
1288 reloc1.set_r_address(address);
1289 reloc1.set_r_symbolnum(symbolNum);
1290 reloc1.set_r_pcrel(true);
1291 reloc1.set_r_length(2);
1292 reloc1.set_r_extern(external);
1293 reloc1.set_r_type(X86_64_RELOC_TLV);
1294 relocs.push_back(reloc1);
1295 break;
1296 default:
1297 assert(0 && "need to handle -r reloc");
1298
1299 }
1300
1301 }
1302
1303
1304
1305 template <typename A>
1306 uint32_t SectionRelocationsAtom<A>::sectSymNum(bool external, const ld::Atom* target)
1307 {
1308 if ( target->definition() == ld::Atom::definitionAbsolute )
1309 return R_ABS;
1310 if ( external )
1311 return this->symbolIndex(target); // in external relocations, r_symbolnum field is symbol index
1312 else
1313 return target->machoSection(); // in non-extern relocations, r_symbolnum is mach-o section index of target
1314 }
1315
1316 template <>
1317 void SectionRelocationsAtom<x86>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1318 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1319 {
1320 macho_relocation_info<P> reloc1;
1321 macho_relocation_info<P> reloc2;
1322 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1323 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1324 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1325 bool external = entry.toTargetUsesExternalReloc;
1326 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1327 bool fromExternal = false;
1328 uint32_t fromSymbolNum = 0;
1329 if ( entry.fromTarget != NULL ) {
1330 fromExternal = entry.fromTargetUsesExternalReloc;
1331 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1332 }
1333
1334 switch ( entry.kind ) {
1335 case ld::Fixup::kindStoreX86PCRel32:
1336 case ld::Fixup::kindStoreX86BranchPCRel32:
1337 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1338 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1339 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1340 if ( !external && (entry.toAddend != 0) ) {
1341 // use scattered reloc is target offset is non-zero
1342 sreloc1->set_r_scattered(true);
1343 sreloc1->set_r_pcrel(true);
1344 sreloc1->set_r_length(2);
1345 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1346 sreloc1->set_r_address(address);
1347 sreloc1->set_r_value(entry.toTarget->finalAddress());
1348 }
1349 else {
1350 reloc1.set_r_address(address);
1351 reloc1.set_r_symbolnum(symbolNum);
1352 reloc1.set_r_pcrel(true);
1353 reloc1.set_r_length(2);
1354 reloc1.set_r_extern(external);
1355 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1356 }
1357 relocs.push_back(reloc1);
1358 break;
1359
1360 case ld::Fixup::kindStoreX86BranchPCRel8:
1361 if ( !external && (entry.toAddend != 0) ) {
1362 // use scattered reloc is target offset is non-zero
1363 sreloc1->set_r_scattered(true);
1364 sreloc1->set_r_pcrel(true);
1365 sreloc1->set_r_length(0);
1366 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1367 sreloc1->set_r_address(address);
1368 sreloc1->set_r_value(entry.toTarget->finalAddress());
1369 }
1370 else {
1371 reloc1.set_r_address(address);
1372 reloc1.set_r_symbolnum(symbolNum);
1373 reloc1.set_r_pcrel(true);
1374 reloc1.set_r_length(0);
1375 reloc1.set_r_extern(external);
1376 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1377 }
1378 relocs.push_back(reloc1);
1379 break;
1380
1381 case ld::Fixup::kindStoreX86PCRel16:
1382 if ( !external && (entry.toAddend != 0) ) {
1383 // use scattered reloc is target offset is non-zero
1384 sreloc1->set_r_scattered(true);
1385 sreloc1->set_r_pcrel(true);
1386 sreloc1->set_r_length(1);
1387 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1388 sreloc1->set_r_address(address);
1389 sreloc1->set_r_value(entry.toTarget->finalAddress());
1390 }
1391 else {
1392 reloc1.set_r_address(address);
1393 reloc1.set_r_symbolnum(symbolNum);
1394 reloc1.set_r_pcrel(true);
1395 reloc1.set_r_length(1);
1396 reloc1.set_r_extern(external);
1397 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1398 }
1399 relocs.push_back(reloc1);
1400 break;
1401
1402 case ld::Fixup::kindStoreLittleEndian32:
1403 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1404 if ( entry.fromTarget != NULL ) {
1405 // this is a pointer-diff
1406 sreloc1->set_r_scattered(true);
1407 sreloc1->set_r_pcrel(false);
1408 sreloc1->set_r_length(2);
1409 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1410 sreloc1->set_r_type(GENERIC_RELOC_LOCAL_SECTDIFF);
1411 else
1412 sreloc1->set_r_type(GENERIC_RELOC_SECTDIFF);
1413 sreloc1->set_r_address(address);
1414 if ( entry.toTarget == entry.inAtom ) {
1415 if ( entry.toAddend > entry.toTarget->size() )
1416 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1417 else
1418 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1419 }
1420 else
1421 sreloc1->set_r_value(entry.toTarget->finalAddress());
1422 sreloc2->set_r_scattered(true);
1423 sreloc2->set_r_pcrel(false);
1424 sreloc2->set_r_length(2);
1425 sreloc2->set_r_type(GENERIC_RELOC_PAIR);
1426 sreloc2->set_r_address(0);
1427 if ( entry.fromTarget == entry.inAtom ) {
1428 if ( entry.fromAddend > entry.fromTarget->size() )
1429 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.offsetInAtom);
1430 else
1431 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1432 }
1433 else
1434 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1435 relocs.push_back(reloc1);
1436 relocs.push_back(reloc2);
1437 }
1438 else {
1439 // regular pointer
1440 if ( !external && (entry.toAddend != 0) && (entry.toTarget->symbolTableInclusion() != ld::Atom::symbolTableNotIn) ) {
1441 // use scattered reloc if target offset is non-zero into named atom (5658046)
1442 sreloc1->set_r_scattered(true);
1443 sreloc1->set_r_pcrel(false);
1444 sreloc1->set_r_length(2);
1445 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1446 sreloc1->set_r_address(address);
1447 sreloc1->set_r_value(entry.toTarget->finalAddress());
1448 }
1449 else {
1450 reloc1.set_r_address(address);
1451 reloc1.set_r_symbolnum(symbolNum);
1452 reloc1.set_r_pcrel(false);
1453 reloc1.set_r_length(2);
1454 reloc1.set_r_extern(external);
1455 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1456 }
1457 relocs.push_back(reloc1);
1458 }
1459 break;
1460 case ld::Fixup::kindStoreX86PCRel32TLVLoad:
1461 case ld::Fixup::kindStoreX86Abs32TLVLoad:
1462 case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad:
1463 reloc1.set_r_address(address);
1464 reloc1.set_r_symbolnum(symbolNum);
1465 reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad);
1466 reloc1.set_r_length(2);
1467 reloc1.set_r_extern(external);
1468 reloc1.set_r_type(GENERIC_RLEOC_TLV);
1469 relocs.push_back(reloc1);
1470 break;
1471 default:
1472 assert(0 && "need to handle -r reloc");
1473
1474 }
1475 }
1476
1477
1478
1479 #if SUPPORT_ARCH_arm_any
1480 template <>
1481 void SectionRelocationsAtom<arm>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1482 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1483 {
1484 macho_relocation_info<P> reloc1;
1485 macho_relocation_info<P> reloc2;
1486 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1487 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1488 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1489 bool external = entry.toTargetUsesExternalReloc;
1490 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1491 bool fromExternal = false;
1492 uint32_t fromSymbolNum = 0;
1493 if ( entry.fromTarget != NULL ) {
1494 fromExternal = entry.fromTargetUsesExternalReloc;
1495 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1496 }
1497
1498
1499 switch ( entry.kind ) {
1500 case ld::Fixup::kindStoreTargetAddressARMBranch24:
1501 case ld::Fixup::kindStoreARMBranch24:
1502 case ld::Fixup::kindStoreARMDtraceCallSiteNop:
1503 case ld::Fixup::kindStoreARMDtraceIsEnableSiteClear:
1504 if ( !external && (entry.toAddend != 0) ) {
1505 // use scattered reloc is target offset is non-zero
1506 sreloc1->set_r_scattered(true);
1507 sreloc1->set_r_pcrel(true);
1508 sreloc1->set_r_length(2);
1509 sreloc1->set_r_type(ARM_RELOC_BR24);
1510 sreloc1->set_r_address(address);
1511 sreloc1->set_r_value(entry.toTarget->finalAddress());
1512 }
1513 else {
1514 reloc1.set_r_address(address);
1515 reloc1.set_r_symbolnum(symbolNum);
1516 reloc1.set_r_pcrel(true);
1517 reloc1.set_r_length(2);
1518 reloc1.set_r_extern(external);
1519 reloc1.set_r_type(ARM_RELOC_BR24);
1520 }
1521 relocs.push_back(reloc1);
1522 break;
1523
1524 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
1525 case ld::Fixup::kindStoreThumbBranch22:
1526 case ld::Fixup::kindStoreThumbDtraceCallSiteNop:
1527 case ld::Fixup::kindStoreThumbDtraceIsEnableSiteClear:
1528 if ( !external && (entry.toAddend != 0) ) {
1529 // use scattered reloc is target offset is non-zero
1530 sreloc1->set_r_scattered(true);
1531 sreloc1->set_r_pcrel(true);
1532 sreloc1->set_r_length(2);
1533 sreloc1->set_r_type(ARM_THUMB_RELOC_BR22);
1534 sreloc1->set_r_address(address);
1535 sreloc1->set_r_value(entry.toTarget->finalAddress());
1536 }
1537 else {
1538 reloc1.set_r_address(address);
1539 reloc1.set_r_symbolnum(symbolNum);
1540 reloc1.set_r_pcrel(true);
1541 reloc1.set_r_length(2);
1542 reloc1.set_r_extern(external);
1543 reloc1.set_r_type(ARM_THUMB_RELOC_BR22);
1544 }
1545 relocs.push_back(reloc1);
1546 break;
1547
1548 case ld::Fixup::kindStoreLittleEndian32:
1549 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1550 if ( entry.fromTarget != NULL ) {
1551 // this is a pointer-diff
1552 sreloc1->set_r_scattered(true);
1553 sreloc1->set_r_pcrel(false);
1554 sreloc1->set_r_length(2);
1555 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1556 sreloc1->set_r_type(ARM_RELOC_LOCAL_SECTDIFF);
1557 else
1558 sreloc1->set_r_type(ARM_RELOC_SECTDIFF);
1559 sreloc1->set_r_address(address);
1560 if ( entry.toTarget == entry.inAtom ) {
1561 if ( entry.toAddend > entry.toTarget->size() )
1562 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1563 else
1564 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1565 }
1566 else {
1567 sreloc1->set_r_value(entry.toTarget->finalAddress());
1568 }
1569 sreloc2->set_r_scattered(true);
1570 sreloc2->set_r_pcrel(false);
1571 sreloc2->set_r_length(2);
1572 sreloc2->set_r_type(ARM_RELOC_PAIR);
1573 sreloc2->set_r_address(0);
1574 if ( entry.fromTarget == entry.inAtom ) {
1575 //unsigned int pcBaseOffset = entry.inAtom->isThumb() ? 4 : 8;
1576 //if ( entry.fromAddend > pcBaseOffset )
1577 // sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend-pcBaseOffset);
1578 //else
1579 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1580 }
1581 else {
1582 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1583 }
1584 relocs.push_back(reloc1);
1585 relocs.push_back(reloc2);
1586 }
1587 else {
1588 // regular pointer
1589 if ( !external && (entry.toAddend != 0) ) {
1590 // use scattered reloc is target offset is non-zero
1591 sreloc1->set_r_scattered(true);
1592 sreloc1->set_r_pcrel(false);
1593 sreloc1->set_r_length(2);
1594 sreloc1->set_r_type(ARM_RELOC_VANILLA);
1595 sreloc1->set_r_address(address);
1596 sreloc1->set_r_value(entry.toTarget->finalAddress());
1597 }
1598 else {
1599 reloc1.set_r_address(address);
1600 reloc1.set_r_symbolnum(symbolNum);
1601 reloc1.set_r_pcrel(false);
1602 reloc1.set_r_length(2);
1603 reloc1.set_r_extern(external);
1604 reloc1.set_r_type(ARM_RELOC_VANILLA);
1605 }
1606 relocs.push_back(reloc1);
1607 }
1608 break;
1609
1610 case ld::Fixup::kindStoreARMLow16:
1611 case ld::Fixup::kindStoreARMHigh16:
1612 case ld::Fixup::kindStoreThumbLow16:
1613 case ld::Fixup::kindStoreThumbHigh16:
1614 {
1615 int len = 0;
1616 uint32_t otherHalf = 0;
1617 uint32_t value;
1618 if ( entry.fromTarget != NULL ) {
1619 // this is a sect-diff
1620 value = (entry.toTarget->finalAddress()+entry.toAddend) - (entry.fromTarget->finalAddress()+entry.fromAddend);
1621 }
1622 else {
1623 // this is an absolute address
1624 value = entry.toAddend;
1625 if ( !external )
1626 value += entry.toTarget->finalAddress();
1627 }
1628 switch ( entry.kind ) {
1629 case ld::Fixup::kindStoreARMLow16:
1630 len = 0;
1631 otherHalf = value >> 16;
1632 break;
1633 case ld::Fixup::kindStoreARMHigh16:
1634 len = 1;
1635 otherHalf = value & 0xFFFF;
1636 break;
1637 case ld::Fixup::kindStoreThumbLow16:
1638 len = 2;
1639 otherHalf = value >> 16;
1640 break;
1641 case ld::Fixup::kindStoreThumbHigh16:
1642 len = 3;
1643 otherHalf = value & 0xFFFF;
1644 break;
1645 default:
1646 break;
1647 }
1648 if ( entry.fromTarget != NULL ) {
1649 // this is a sect-diff
1650 sreloc1->set_r_scattered(true);
1651 sreloc1->set_r_pcrel(false);
1652 sreloc1->set_r_length(len);
1653 sreloc1->set_r_type(ARM_RELOC_HALF_SECTDIFF);
1654 sreloc1->set_r_address(address);
1655 sreloc1->set_r_value(entry.toTarget->finalAddress());
1656 sreloc2->set_r_scattered(true);
1657 sreloc2->set_r_pcrel(false);
1658 sreloc2->set_r_length(len);
1659 sreloc2->set_r_type(ARM_RELOC_PAIR);
1660 sreloc2->set_r_address(otherHalf);
1661 if ( entry.fromTarget == entry.inAtom )
1662 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1663 else
1664 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1665 relocs.push_back(reloc1);
1666 relocs.push_back(reloc2);
1667 }
1668 else {
1669 // this is absolute address
1670 if ( !external && (entry.toAddend != 0) ) {
1671 // use scattered reloc is target offset is non-zero
1672 sreloc1->set_r_scattered(true);
1673 sreloc1->set_r_pcrel(false);
1674 sreloc1->set_r_length(len);
1675 sreloc1->set_r_type(ARM_RELOC_HALF);
1676 sreloc1->set_r_address(address);
1677 sreloc1->set_r_value(entry.toTarget->finalAddress());
1678 reloc2.set_r_address(otherHalf);
1679 reloc2.set_r_symbolnum(0);
1680 reloc2.set_r_pcrel(false);
1681 reloc2.set_r_length(len);
1682 reloc2.set_r_extern(false);
1683 reloc2.set_r_type(ARM_RELOC_PAIR);
1684 relocs.push_back(reloc1);
1685 relocs.push_back(reloc2);
1686 }
1687 else {
1688 reloc1.set_r_address(address);
1689 reloc1.set_r_symbolnum(symbolNum);
1690 reloc1.set_r_pcrel(false);
1691 reloc1.set_r_length(len);
1692 reloc1.set_r_extern(external);
1693 reloc1.set_r_type(ARM_RELOC_HALF);
1694 reloc2.set_r_address(otherHalf); // other half
1695 reloc2.set_r_symbolnum(0);
1696 reloc2.set_r_pcrel(false);
1697 reloc2.set_r_length(len);
1698 reloc2.set_r_extern(false);
1699 reloc2.set_r_type(ARM_RELOC_PAIR);
1700 relocs.push_back(reloc1);
1701 relocs.push_back(reloc2);
1702 }
1703 }
1704 }
1705 break;
1706
1707 default:
1708 assert(0 && "need to handle -r reloc");
1709
1710 }
1711 }
1712 #endif
1713
1714 #if SUPPORT_ARCH_arm64
1715 template <>
1716 void SectionRelocationsAtom<arm64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1717 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1718 {
1719 macho_relocation_info<P> reloc1;
1720 macho_relocation_info<P> reloc2;
1721 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1722 bool external = entry.toTargetUsesExternalReloc;
1723 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1724 bool fromExternal = false;
1725 uint32_t fromSymbolNum = 0;
1726 if ( entry.fromTarget != NULL ) {
1727 fromExternal = entry.fromTargetUsesExternalReloc;
1728 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1729 }
1730
1731
1732 switch ( entry.kind ) {
1733 case ld::Fixup::kindStoreARM64Branch26:
1734 if ( entry.toAddend != 0 ) {
1735 assert(entry.toAddend < 0x400000);
1736 reloc2.set_r_address(address);
1737 reloc2.set_r_symbolnum(entry.toAddend);
1738 reloc2.set_r_pcrel(false);
1739 reloc2.set_r_length(2);
1740 reloc2.set_r_extern(false);
1741 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1742 relocs.push_back(reloc2);
1743 }
1744 // fall into next case
1745 case ld::Fixup::kindStoreTargetAddressARM64Branch26:
1746 case ld::Fixup::kindStoreARM64DtraceCallSiteNop:
1747 case ld::Fixup::kindStoreARM64DtraceIsEnableSiteClear:
1748 reloc1.set_r_address(address);
1749 reloc1.set_r_symbolnum(symbolNum);
1750 reloc1.set_r_pcrel(true);
1751 reloc1.set_r_length(2);
1752 reloc1.set_r_extern(external);
1753 reloc1.set_r_type(ARM64_RELOC_BRANCH26);
1754 relocs.push_back(reloc1);
1755 break;
1756
1757 case ld::Fixup::kindStoreARM64Page21:
1758 if ( entry.toAddend != 0 ) {
1759 assert(entry.toAddend < 0x400000);
1760 reloc2.set_r_address(address);
1761 reloc2.set_r_symbolnum(entry.toAddend);
1762 reloc2.set_r_pcrel(false);
1763 reloc2.set_r_length(2);
1764 reloc2.set_r_extern(false);
1765 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1766 relocs.push_back(reloc2);
1767 }
1768 // fall into next case
1769 case ld::Fixup::kindStoreTargetAddressARM64Page21:
1770 reloc1.set_r_address(address);
1771 reloc1.set_r_symbolnum(symbolNum);
1772 reloc1.set_r_pcrel(true);
1773 reloc1.set_r_length(2);
1774 reloc1.set_r_extern(external);
1775 reloc1.set_r_type(ARM64_RELOC_PAGE21);
1776 relocs.push_back(reloc1);
1777 break;
1778
1779 case ld::Fixup::kindStoreARM64PageOff12:
1780 if ( entry.toAddend != 0 ) {
1781 assert(entry.toAddend < 0x400000);
1782 reloc2.set_r_address(address);
1783 reloc2.set_r_symbolnum(entry.toAddend);
1784 reloc2.set_r_pcrel(false);
1785 reloc2.set_r_length(2);
1786 reloc2.set_r_extern(false);
1787 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1788 relocs.push_back(reloc2);
1789 }
1790 // fall into next case
1791 case ld::Fixup::kindStoreTargetAddressARM64PageOff12:
1792 reloc1.set_r_address(address);
1793 reloc1.set_r_symbolnum(symbolNum);
1794 reloc1.set_r_pcrel(false);
1795 reloc1.set_r_length(2);
1796 reloc1.set_r_extern(external);
1797 reloc1.set_r_type(ARM64_RELOC_PAGEOFF12);
1798 relocs.push_back(reloc1);
1799 break;
1800
1801 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21:
1802 case ld::Fixup::kindStoreARM64GOTLoadPage21:
1803 reloc1.set_r_address(address);
1804 reloc1.set_r_symbolnum(symbolNum);
1805 reloc1.set_r_pcrel(true);
1806 reloc1.set_r_length(2);
1807 reloc1.set_r_extern(external);
1808 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGE21);
1809 relocs.push_back(reloc1);
1810 break;
1811
1812 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12:
1813 case ld::Fixup::kindStoreARM64GOTLoadPageOff12:
1814 reloc1.set_r_address(address);
1815 reloc1.set_r_symbolnum(symbolNum);
1816 reloc1.set_r_pcrel(false);
1817 reloc1.set_r_length(2);
1818 reloc1.set_r_extern(external);
1819 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGEOFF12);
1820 relocs.push_back(reloc1);
1821 break;
1822
1823
1824 case ld::Fixup::kindStoreLittleEndian64:
1825 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1826 if ( entry.fromTarget != NULL ) {
1827 // this is a pointer-diff
1828 reloc1.set_r_address(address);
1829 reloc1.set_r_symbolnum(symbolNum);
1830 reloc1.set_r_pcrel(false);
1831 reloc1.set_r_length(3);
1832 reloc1.set_r_extern(external);
1833 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1834 reloc2.set_r_address(address);
1835 reloc2.set_r_symbolnum(fromSymbolNum);
1836 reloc2.set_r_pcrel(false);
1837 reloc2.set_r_length(3);
1838 reloc2.set_r_extern(fromExternal);
1839 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1840 relocs.push_back(reloc2);
1841 relocs.push_back(reloc1);
1842 }
1843 else {
1844 // regular pointer
1845 reloc1.set_r_address(address);
1846 reloc1.set_r_symbolnum(symbolNum);
1847 reloc1.set_r_pcrel(false);
1848 reloc1.set_r_length(3);
1849 reloc1.set_r_extern(external);
1850 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1851 relocs.push_back(reloc1);
1852 }
1853 break;
1854
1855 case ld::Fixup::kindStoreLittleEndian32:
1856 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1857 if ( entry.fromTarget != NULL ) {
1858 // this is a pointer-diff
1859 reloc1.set_r_address(address);
1860 reloc1.set_r_symbolnum(symbolNum);
1861 reloc1.set_r_pcrel(false);
1862 reloc1.set_r_length(2);
1863 reloc1.set_r_extern(external);
1864 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1865 reloc2.set_r_address(address);
1866 reloc2.set_r_symbolnum(fromSymbolNum);
1867 reloc2.set_r_pcrel(false);
1868 reloc2.set_r_length(2);
1869 reloc2.set_r_extern(fromExternal);
1870 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1871 relocs.push_back(reloc2);
1872 relocs.push_back(reloc1);
1873 }
1874 else {
1875 // regular pointer
1876 reloc1.set_r_address(address);
1877 reloc1.set_r_symbolnum(symbolNum);
1878 reloc1.set_r_pcrel(false);
1879 reloc1.set_r_length(2);
1880 reloc1.set_r_extern(external);
1881 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1882 relocs.push_back(reloc1);
1883 }
1884 break;
1885
1886 case ld::Fixup::kindStoreARM64PointerToGOT:
1887 reloc1.set_r_address(address);
1888 reloc1.set_r_symbolnum(symbolNum);
1889 reloc1.set_r_pcrel(false);
1890 reloc1.set_r_length(3);
1891 reloc1.set_r_extern(external);
1892 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1893 relocs.push_back(reloc1);
1894 break;
1895
1896 case ld::Fixup::kindStoreARM64PCRelToGOT:
1897 reloc1.set_r_address(address);
1898 reloc1.set_r_symbolnum(symbolNum);
1899 reloc1.set_r_pcrel(true);
1900 reloc1.set_r_length(2);
1901 reloc1.set_r_extern(external);
1902 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1903 relocs.push_back(reloc1);
1904 break;
1905
1906 default:
1907 assert(0 && "need to handle arm64 -r reloc");
1908
1909 }
1910
1911 }
1912 #endif // SUPPORT_ARCH_arm64
1913
1914
1915 template <typename A>
1916 void SectionRelocationsAtom<A>::addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind kind,
1917 const ld::Atom* inAtom, uint32_t offsetInAtom,
1918 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1919 const ld::Atom* toTarget, uint64_t toAddend,
1920 const ld::Atom* fromTarget, uint64_t fromAddend)
1921 {
1922 Entry entry;
1923 entry.kind = kind;
1924 entry.toTargetUsesExternalReloc = toTargetUsesExternalReloc;
1925 entry.fromTargetUsesExternalReloc = fromTargetExternalReloc;
1926 entry.inAtom = inAtom;
1927 entry.offsetInAtom = offsetInAtom;
1928 entry.toTarget = toTarget;
1929 entry.toAddend = toAddend;
1930 entry.fromTarget = fromTarget;
1931 entry.fromAddend = fromAddend;
1932
1933 static ld::Internal::FinalSection* lastSection = NULL;
1934 static SectionAndEntries* lastSectionAndEntries = NULL;
1935
1936 if ( sect != lastSection ) {
1937 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1938 if ( sect == it->sect ) {
1939 lastSection = sect;
1940 lastSectionAndEntries = &*it;
1941 break;
1942 }
1943 }
1944 if ( sect != lastSection ) {
1945 SectionAndEntries tmp;
1946 tmp.sect = sect;
1947 _entriesBySection.push_back(tmp);
1948 lastSection = sect;
1949 lastSectionAndEntries = &_entriesBySection.back();
1950 }
1951 }
1952 lastSectionAndEntries->entries.push_back(entry);
1953 }
1954
1955 template <typename A>
1956 void SectionRelocationsAtom<A>::encode()
1957 {
1958 // convert each Entry record to one or two reloc records
1959 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1960 SectionAndEntries& se = *it;
1961 for(typename std::vector<Entry>::iterator eit=se.entries.begin(); eit != se.entries.end(); ++eit) {
1962 encodeSectionReloc(se.sect, *eit, se.relocs);
1963 }
1964 }
1965
1966 // update sections with start and count or relocs
1967 uint32_t index = 0;
1968 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1969 SectionAndEntries& se = *it;
1970 se.sect->relocStart = index;
1971 se.sect->relocCount = se.relocs.size();
1972 index += se.sect->relocCount;
1973 }
1974
1975 }
1976
1977
1978
1979 template <typename A>
1980 class IndirectSymbolTableAtom : public ClassicLinkEditAtom
1981 {
1982 public:
1983 IndirectSymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1984 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)) { }
1985
1986 // overrides of ld::Atom
1987 virtual const char* name() const { return "indirect symbol table"; }
1988 virtual uint64_t size() const;
1989 virtual void copyRawContent(uint8_t buffer[]) const;
1990 // overrides of ClassicLinkEditAtom
1991 virtual void encode();
1992
1993 private:
1994 typedef typename A::P P;
1995 typedef typename A::P::E E;
1996 typedef typename A::P::uint_t pint_t;
1997
1998 void encodeStubSection(ld::Internal::FinalSection* sect);
1999 void encodeLazyPointerSection(ld::Internal::FinalSection* sect);
2000 void encodeNonLazyPointerSection(ld::Internal::FinalSection* sect);
2001 uint32_t symIndexOfStubAtom(const ld::Atom*);
2002 uint32_t symIndexOfLazyPointerAtom(const ld::Atom*);
2003 uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*);
2004 uint32_t symbolIndex(const ld::Atom*);
2005
2006
2007 std::vector<uint32_t> _entries;
2008
2009 static ld::Section _s_section;
2010 };
2011
2012 template <typename A>
2013 ld::Section IndirectSymbolTableAtom<A>::_s_section("__LINKEDIT", "__ind_sym_tab", ld::Section::typeLinkEdit, true);
2014
2015
2016
2017
2018 template <typename A>
2019 uint32_t IndirectSymbolTableAtom<A>::symbolIndex(const ld::Atom* atom)
2020 {
2021 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
2022 if ( pos != this->_writer._atomToSymbolIndex.end() )
2023 return pos->second;
2024 //fprintf(stderr, "_atomToSymbolIndex content:\n");
2025 //for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
2026 // fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
2027 //}
2028 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
2029 }
2030
2031 template <typename A>
2032 uint32_t IndirectSymbolTableAtom<A>::symIndexOfStubAtom(const ld::Atom* stubAtom)
2033 {
2034 for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) {
2035 if ( fit->binding == ld::Fixup::bindingDirectlyBound ) {
2036 ld::Atom::ContentType type = fit->u.target->contentType();
2037 if (( type == ld::Atom::typeLazyPointer) || (type == ld::Atom::typeLazyDylibPointer) )
2038 return symIndexOfLazyPointerAtom(fit->u.target);
2039 if ( type == ld::Atom::typeNonLazyPointer )
2040 return symIndexOfNonLazyPointerAtom(fit->u.target);
2041 }
2042 }
2043 throw "internal error: stub missing fixup to lazy pointer";
2044 }
2045
2046
2047 template <typename A>
2048 uint32_t IndirectSymbolTableAtom<A>::symIndexOfLazyPointerAtom(const ld::Atom* lpAtom)
2049 {
2050 for (ld::Fixup::iterator fit = lpAtom->fixupsBegin(); fit != lpAtom->fixupsEnd(); ++fit) {
2051 if ( fit->kind == ld::Fixup::kindLazyTarget ) {
2052 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
2053 return symbolIndex(fit->u.target);
2054 }
2055 }
2056 throw "internal error: lazy pointer missing fixupLazyTarget fixup";
2057 }
2058
2059 template <typename A>
2060 uint32_t IndirectSymbolTableAtom<A>::symIndexOfNonLazyPointerAtom(const ld::Atom* nlpAtom)
2061 {
2062 //fprintf(stderr, "symIndexOfNonLazyPointerAtom(%p) %s\n", nlpAtom, nlpAtom->name());
2063 for (ld::Fixup::iterator fit = nlpAtom->fixupsBegin(); fit != nlpAtom->fixupsEnd(); ++fit) {
2064 // non-lazy-pointer to a stripped symbol => no symbol index
2065 if ( fit->clusterSize != ld::Fixup::k1of1 )
2066 return INDIRECT_SYMBOL_LOCAL;
2067 const ld::Atom* target;
2068 switch ( fit->binding ) {
2069 case ld::Fixup::bindingDirectlyBound:
2070 target = fit->u.target;
2071 break;
2072 case ld::Fixup::bindingsIndirectlyBound:
2073 target = _state.indirectBindingTable[fit->u.bindingIndex];
2074 break;
2075 default:
2076 throw "internal error: unexpected non-lazy pointer binding";
2077 }
2078 bool targetIsGlobal = (target->scope() == ld::Atom::scopeGlobal);
2079 switch ( target->definition() ) {
2080 case ld::Atom::definitionRegular:
2081 if ( targetIsGlobal ) {
2082 if ( _options.outputKind() == Options::kObjectFile ) {
2083 // nlpointer to global symbol uses indirect symbol table in .o files
2084 return symbolIndex(target);
2085 }
2086 else if ( target->combine() == ld::Atom::combineByName ) {
2087 // dyld needs to bind nlpointer to global weak def
2088 return symbolIndex(target);
2089 }
2090 else if ( _options.nameSpace() != Options::kTwoLevelNameSpace ) {
2091 // dyld needs to bind nlpointer to global def linked for flat namespace
2092 return symbolIndex(target);
2093 }
2094 }
2095 break;
2096 case ld::Atom::definitionTentative:
2097 case ld::Atom::definitionAbsolute:
2098 if ( _options.outputKind() == Options::kObjectFile ) {
2099 // tentative def in .o file always uses symbol index
2100 return symbolIndex(target);
2101 }
2102 // dyld needs to bind nlpointer to global def linked for flat namespace
2103 if ( targetIsGlobal && _options.nameSpace() != Options::kTwoLevelNameSpace )
2104 return symbolIndex(target);
2105 break;
2106 case ld::Atom::definitionProxy:
2107 // dyld needs to bind nlpointer to something in another dylib
2108 {
2109 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target->file());
2110 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
2111 throwf("illegal data reference to %s in lazy loaded dylib %s", target->name(), dylib->path());
2112 }
2113 return symbolIndex(target);
2114 }
2115 }
2116 if ( nlpAtom->fixupsBegin() == nlpAtom->fixupsEnd() ) {
2117 // no fixups means this is the ImageLoader cache slot
2118 return INDIRECT_SYMBOL_ABS;
2119 }
2120
2121 // The magic index INDIRECT_SYMBOL_LOCAL tells dyld it should does not need to bind
2122 // this non-lazy pointer.
2123 return INDIRECT_SYMBOL_LOCAL;
2124 }
2125
2126
2127
2128 template <typename A>
2129 void IndirectSymbolTableAtom<A>::encodeStubSection(ld::Internal::FinalSection* sect)
2130 {
2131 sect->indirectSymTabStartIndex = _entries.size();
2132 sect->indirectSymTabElementSize = sect->atoms[0]->size();
2133 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2134 _entries.push_back(symIndexOfStubAtom(*ait));
2135 }
2136 }
2137
2138 template <typename A>
2139 void IndirectSymbolTableAtom<A>::encodeLazyPointerSection(ld::Internal::FinalSection* sect)
2140 {
2141 sect->indirectSymTabStartIndex = _entries.size();
2142 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2143 _entries.push_back(symIndexOfLazyPointerAtom(*ait));
2144 }
2145 }
2146
2147 template <typename A>
2148 void IndirectSymbolTableAtom<A>::encodeNonLazyPointerSection(ld::Internal::FinalSection* sect)
2149 {
2150 sect->indirectSymTabStartIndex = _entries.size();
2151 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2152 _entries.push_back(symIndexOfNonLazyPointerAtom(*ait));
2153 }
2154 }
2155
2156 template <typename A>
2157 void IndirectSymbolTableAtom<A>::encode()
2158 {
2159 // static executables should not have an indirect symbol table, unless PIE
2160 if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() )
2161 return;
2162
2163 // x86_64 kext bundles should not have an indirect symbol table unless using stubs
2164 if ( (this->_options.outputKind() == Options::kKextBundle) && !this->_options.kextsUseStubs() )
2165 return;
2166
2167 // slidable static executables (-static -pie) should not have an indirect symbol table
2168 if ( (this->_options.outputKind() == Options::kStaticExecutable) && this->_options.positionIndependentExecutable() )
2169 return;
2170
2171 // find all special sections that need a range of the indirect symbol table section
2172 for (std::vector<ld::Internal::FinalSection*>::iterator sit = this->_state.sections.begin(); sit != this->_state.sections.end(); ++sit) {
2173 ld::Internal::FinalSection* sect = *sit;
2174 switch ( sect->type() ) {
2175 case ld::Section::typeStub:
2176 case ld::Section::typeStubClose:
2177 this->encodeStubSection(sect);
2178 break;
2179 case ld::Section::typeLazyPointerClose:
2180 case ld::Section::typeLazyPointer:
2181 case ld::Section::typeLazyDylibPointer:
2182 this->encodeLazyPointerSection(sect);
2183 break;
2184 case ld::Section::typeNonLazyPointer:
2185 this->encodeNonLazyPointerSection(sect);
2186 break;
2187 default:
2188 break;
2189 }
2190 }
2191 }
2192
2193 template <typename A>
2194 uint64_t IndirectSymbolTableAtom<A>::size() const
2195 {
2196 return _entries.size() * sizeof(uint32_t);
2197 }
2198
2199 template <typename A>
2200 void IndirectSymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
2201 {
2202 uint32_t* array = (uint32_t*)buffer;
2203 for(unsigned long i=0; i < _entries.size(); ++i) {
2204 E::set32(array[i], _entries[i]);
2205 }
2206 }
2207
2208
2209
2210
2211
2212
2213
2214
2215 } // namespace tool
2216 } // namespace ld
2217
2218 #endif // __LINKEDIT_CLASSIC_HPP__