]> git.saurik.com Git - apple/ld64.git/blob - src/ld/LinkEditClassic.hpp
4df2d5ff452d459d70774c0d5a3443fde8432226
[apple/ld64.git] / src / ld / LinkEditClassic.hpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-*
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #ifndef __LINKEDIT_CLASSIC_HPP__
26 #define __LINKEDIT_CLASSIC_HPP__
27
28 #include <stdlib.h>
29 #include <sys/types.h>
30 #include <errno.h>
31 #include <limits.h>
32 #include <unistd.h>
33
34 #include <vector>
35
36 #include "Options.h"
37 #include "ld.hpp"
38 #include "Architectures.hpp"
39 #include "MachOFileAbstraction.hpp"
40
41 namespace ld {
42 namespace tool {
43
44
45
46 class ClassicLinkEditAtom : public ld::Atom
47 {
48 public:
49
50 // overrides of ld::Atom
51 virtual ld::File* file() const { return NULL; }
52 virtual uint64_t objectAddress() const { return 0; }
53
54 virtual void encode() = 0;
55 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe) { return false; }
56
57 ClassicLinkEditAtom(const Options& opts, ld::Internal& state,
58 OutputFile& writer, const ld::Section& sect,
59 unsigned int pointerSize)
60 : ld::Atom(sect, ld::Atom::definitionRegular,
61 ld::Atom::combineNever, ld::Atom::scopeTranslationUnit,
62 ld::Atom::typeUnclassified, ld::Atom::symbolTableNotIn,
63 false, false, false, ld::Atom::Alignment(log2(pointerSize))),
64 _options(opts), _state(state), _writer(writer) { }
65 protected:
66 const Options& _options;
67 ld::Internal& _state;
68 OutputFile& _writer;
69 };
70
71
72
73 class StringPoolAtom : public ClassicLinkEditAtom
74 {
75 public:
76 StringPoolAtom(const Options& opts, ld::Internal& state,
77 OutputFile& writer, int pointerSize);
78
79 // overrides of ld::Atom
80 virtual const char* name() const { return "string pool"; }
81 virtual uint64_t size() const;
82 virtual void copyRawContent(uint8_t buffer[]) const;
83 // overrides of ClassicLinkEditAtom
84 virtual void encode() { }
85
86 int32_t add(const char* name);
87 int32_t addUnique(const char* name);
88 int32_t emptyString() { return 1; }
89 const char* stringForIndex(int32_t) const;
90 uint32_t currentOffset();
91
92 private:
93 class CStringEquals
94 {
95 public:
96 bool operator()(const char* left, const char* right) const { return (strcmp(left, right) == 0); }
97 };
98 enum { kBufferSize = 0x01000000 };
99 typedef __gnu_cxx::hash_map<const char*, int32_t, __gnu_cxx::hash<const char*>, CStringEquals> StringToOffset;
100
101 const uint32_t _pointerSize;
102 std::vector<char*> _fullBuffers;
103 char* _currentBuffer;
104 uint32_t _currentBufferUsed;
105 StringToOffset _uniqueStrings;
106
107 static ld::Section _s_section;
108 };
109
110 ld::Section StringPoolAtom::_s_section("__LINKEDIT", "__string_pool", ld::Section::typeLinkEdit, true);
111
112
113 StringPoolAtom::StringPoolAtom(const Options& opts, ld::Internal& state, OutputFile& writer, int pointerSize)
114 : ClassicLinkEditAtom(opts, state, writer, _s_section, pointerSize),
115 _pointerSize(pointerSize), _currentBuffer(NULL), _currentBufferUsed(0)
116 {
117 _currentBuffer = new char[kBufferSize];
118 // burn first byte of string pool (so zero is never a valid string offset)
119 _currentBuffer[_currentBufferUsed++] = ' ';
120 // make offset 1 always point to an empty string
121 _currentBuffer[_currentBufferUsed++] = '\0';
122 }
123
124 uint64_t StringPoolAtom::size() const
125 {
126 // pointer size align size
127 return (kBufferSize * _fullBuffers.size() + _currentBufferUsed + _pointerSize-1) & (-_pointerSize);
128 }
129
130 void StringPoolAtom::copyRawContent(uint8_t buffer[]) const
131 {
132 uint64_t offset = 0;
133 for (unsigned int i=0; i < _fullBuffers.size(); ++i) {
134 memcpy(&buffer[offset], _fullBuffers[i], kBufferSize);
135 offset += kBufferSize;
136 }
137 memcpy(&buffer[offset], _currentBuffer, _currentBufferUsed);
138 // zero fill end to align
139 offset += _currentBufferUsed;
140 while ( (offset % _pointerSize) != 0 )
141 buffer[offset++] = 0;
142 }
143
144 int32_t StringPoolAtom::add(const char* str)
145 {
146 int32_t offset = kBufferSize * _fullBuffers.size() + _currentBufferUsed;
147 int lenNeeded = strlcpy(&_currentBuffer[_currentBufferUsed], str, kBufferSize-_currentBufferUsed)+1;
148 if ( (_currentBufferUsed+lenNeeded) < kBufferSize ) {
149 _currentBufferUsed += lenNeeded;
150 }
151 else {
152 int copied = kBufferSize-_currentBufferUsed-1;
153 // change trailing '\0' that strlcpy added to real char
154 _currentBuffer[kBufferSize-1] = str[copied];
155 // alloc next buffer
156 _fullBuffers.push_back(_currentBuffer);
157 _currentBuffer = new char[kBufferSize];
158 _currentBufferUsed = 0;
159 // append rest of string
160 this->add(&str[copied+1]);
161 }
162 return offset;
163 }
164
165 uint32_t StringPoolAtom::currentOffset()
166 {
167 return kBufferSize * _fullBuffers.size() + _currentBufferUsed;
168 }
169
170
171 int32_t StringPoolAtom::addUnique(const char* str)
172 {
173 StringToOffset::iterator pos = _uniqueStrings.find(str);
174 if ( pos != _uniqueStrings.end() ) {
175 return pos->second;
176 }
177 else {
178 int32_t offset = this->add(str);
179 _uniqueStrings[str] = offset;
180 return offset;
181 }
182 }
183
184
185 const char* StringPoolAtom::stringForIndex(int32_t index) const
186 {
187 int32_t currentBufferStartIndex = kBufferSize * _fullBuffers.size();
188 int32_t maxIndex = currentBufferStartIndex + _currentBufferUsed;
189 // check for out of bounds
190 if ( index > maxIndex )
191 return "";
192 // check for index in _currentBuffer
193 if ( index > currentBufferStartIndex )
194 return &_currentBuffer[index-currentBufferStartIndex];
195 // otherwise index is in a full buffer
196 uint32_t fullBufferIndex = index/kBufferSize;
197 return &_fullBuffers[fullBufferIndex][index-(kBufferSize*fullBufferIndex)];
198 }
199
200
201
202 template <typename A>
203 class SymbolTableAtom : public ClassicLinkEditAtom
204 {
205 public:
206 SymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
207 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)),
208 _stabsStringsOffsetStart(0), _stabsStringsOffsetEnd(0),
209 _stabsIndexStart(0), _stabsIndexEnd(0) { }
210
211 // overrides of ld::Atom
212 virtual const char* name() const { return "symbol table"; }
213 virtual uint64_t size() const;
214 virtual void copyRawContent(uint8_t buffer[]) const;
215 // overrides of ClassicLinkEditAtom
216 virtual void encode();
217 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe);
218
219 private:
220 typedef typename A::P P;
221 typedef typename A::P::E E;
222 typedef typename A::P::uint_t pint_t;
223
224 bool addLocal(const ld::Atom* atom, StringPoolAtom* pool);
225 void addGlobal(const ld::Atom* atom, StringPoolAtom* pool);
226 void addImport(const ld::Atom* atom, StringPoolAtom* pool);
227 uint8_t classicOrdinalForProxy(const ld::Atom* atom);
228 uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool);
229 uint64_t valueForStab(const ld::relocatable::File::Stab& stab);
230 uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab);
231
232
233 mutable std::vector<macho_nlist<P> > _globals;
234 mutable std::vector<macho_nlist<P> > _locals;
235 mutable std::vector<macho_nlist<P> > _imports;
236
237 uint32_t _stabsStringsOffsetStart;
238 uint32_t _stabsStringsOffsetEnd;
239 uint32_t _stabsIndexStart;
240 uint32_t _stabsIndexEnd;
241
242 static ld::Section _s_section;
243 static int _s_anonNameIndex;
244
245 };
246
247 template <typename A>
248 ld::Section SymbolTableAtom<A>::_s_section("__LINKEDIT", "__symbol_table", ld::Section::typeLinkEdit, true);
249
250 template <typename A>
251 int SymbolTableAtom<A>::_s_anonNameIndex = 1;
252
253
254 template <typename A>
255 bool SymbolTableAtom<A>::addLocal(const ld::Atom* atom, StringPoolAtom* pool)
256 {
257 macho_nlist<P> entry;
258 assert(atom->symbolTableInclusion() != ld::Atom::symbolTableNotIn);
259
260 // set n_strx
261 const char* symbolName = atom->name();
262 char anonName[32];
263 if ( this->_options.outputKind() == Options::kObjectFile ) {
264 if ( atom->contentType() == ld::Atom::typeCString ) {
265 if ( atom->combine() == ld::Atom::combineByNameAndContent ) {
266 // don't use 'l' labels for x86_64 strings
267 // <rdar://problem/6605499> x86_64 obj-c runtime confused when static lib is stripped
268 sprintf(anonName, "LC%u", _s_anonNameIndex++);
269 symbolName = anonName;
270 }
271 }
272 else if ( atom->contentType() == ld::Atom::typeCFI ) {
273 if ( _options.removeEHLabels() )
274 return false;
275 // synthesize .eh name
276 if ( strcmp(atom->name(), "CIE") == 0 )
277 symbolName = "EH_Frame1";
278 else
279 symbolName = "func.eh";
280 }
281 else if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
282 // make auto-strip anonymous name for symbol
283 sprintf(anonName, "l%03u", _s_anonNameIndex++);
284 symbolName = anonName;
285 }
286 }
287 entry.set_n_strx(pool->add(symbolName));
288
289 // set n_type
290 uint8_t type = N_SECT;
291 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
292 type = N_ABS;
293 }
294 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
295 && (this->_options.outputKind() == Options::kObjectFile) ) {
296 // __OBJC __class has floating abs symbols for each class data structure
297 type = N_ABS;
298 }
299 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
300 type |= N_PEXT;
301 entry.set_n_type(type);
302
303 // set n_sect (section number of implementation )
304 if ( atom->definition() == ld::Atom::definitionAbsolute )
305 entry.set_n_sect(0);
306 else
307 entry.set_n_sect(atom->machoSection());
308
309 // set n_desc
310 uint16_t desc = 0;
311 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
312 desc |= REFERENCED_DYNAMICALLY;
313 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
314 desc |= N_NO_DEAD_STRIP;
315 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) )
316 desc |= N_WEAK_DEF;
317 if ( atom->isThumb() )
318 desc |= N_ARM_THUMB_DEF;
319 entry.set_n_desc(desc);
320
321 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
322 if ( atom->definition() == ld::Atom::definitionAbsolute )
323 entry.set_n_value(atom->objectAddress());
324 else
325 entry.set_n_value(atom->finalAddress());
326
327 // add to array
328 _locals.push_back(entry);
329 return true;
330 }
331
332
333 template <typename A>
334 void SymbolTableAtom<A>::addGlobal(const ld::Atom* atom, StringPoolAtom* pool)
335 {
336 macho_nlist<P> entry;
337
338 // set n_strx
339 const char* symbolName = atom->name();
340 char anonName[32];
341 if ( this->_options.outputKind() == Options::kObjectFile ) {
342 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
343 // make auto-strip anonymous name for symbol
344 sprintf(anonName, "l%03u", _s_anonNameIndex++);
345 symbolName = anonName;
346 }
347 }
348 entry.set_n_strx(pool->add(symbolName));
349
350 // set n_type
351 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
352 entry.set_n_type(N_EXT | N_ABS);
353 }
354 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
355 && (this->_options.outputKind() == Options::kObjectFile) ) {
356 // __OBJC __class has floating abs symbols for each class data structure
357 entry.set_n_type(N_EXT | N_ABS);
358 }
359 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
360 entry.set_n_type(N_EXT | N_INDR);
361 }
362 else {
363 entry.set_n_type(N_EXT | N_SECT);
364 if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (this->_options.outputKind() == Options::kObjectFile) ) {
365 if ( this->_options.keepPrivateExterns() )
366 entry.set_n_type(N_EXT | N_SECT | N_PEXT);
367 }
368 else if ( (atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)
369 && (atom->section().type() == ld::Section::typeMachHeader)
370 && !_options.positionIndependentExecutable() ) {
371 // the __mh_execute_header is historical magic in non-pie executabls and must be an absolute symbol
372 entry.set_n_type(N_EXT | N_ABS);
373 }
374 }
375
376 // set n_sect (section number of implementation)
377 if ( atom->definition() == ld::Atom::definitionAbsolute )
378 entry.set_n_sect(0);
379 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) )
380 entry.set_n_sect(0);
381 else
382 entry.set_n_sect(atom->machoSection());
383
384 // set n_desc
385 uint16_t desc = 0;
386 if ( atom->isThumb() )
387 desc |= N_ARM_THUMB_DEF;
388 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
389 desc |= REFERENCED_DYNAMICALLY;
390 if ( (atom->contentType() == ld::Atom::typeResolver) && (this->_options.outputKind() == Options::kObjectFile) )
391 desc |= N_SYMBOL_RESOLVER;
392 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
393 desc |= N_NO_DEAD_STRIP;
394 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) {
395 desc |= N_WEAK_DEF;
396 // <rdar://problem/6783167> support auto hidden weak symbols: .weak_def_can_be_hidden
397 if ( (atom->scope() == ld::Atom::scopeGlobal) && atom->autoHide() && (this->_options.outputKind() == Options::kObjectFile) )
398 desc |= N_WEAK_REF;
399 }
400 entry.set_n_desc(desc);
401
402 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
403 if ( atom->definition() == ld::Atom::definitionAbsolute )
404 entry.set_n_value(atom->objectAddress());
405 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
406 if ( atom->isAlias() ) {
407 // this re-export also renames
408 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
409 if ( fit->kind == ld::Fixup::kindNoneFollowOn ) {
410 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
411 entry.set_n_value(pool->add(fit->u.target->name()));
412 }
413 }
414 }
415 else
416 entry.set_n_value(entry.n_strx());
417 }
418 else
419 entry.set_n_value(atom->finalAddress());
420
421 // add to array
422 _globals.push_back(entry);
423 }
424
425 template <typename A>
426 uint8_t SymbolTableAtom<A>::classicOrdinalForProxy(const ld::Atom* atom)
427 {
428 assert(atom->definition() == ld::Atom::definitionProxy);
429 // when linking for flat-namespace ordinals are always zero
430 if ( _options.nameSpace() != Options::kTwoLevelNameSpace )
431 return 0;
432 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
433 // when linking -undefined dynamic_lookup, unbound symbols use DYNAMIC_LOOKUP_ORDINAL
434 if ( dylib == NULL ) {
435 if (_options.undefinedTreatment() == Options::kUndefinedDynamicLookup )
436 return DYNAMIC_LOOKUP_ORDINAL;
437 if (_options.allowedUndefined(atom->name()) )
438 return DYNAMIC_LOOKUP_ORDINAL;
439 }
440 assert(dylib != NULL);
441 int ord = this->_writer.dylibToOrdinal(dylib);
442 if ( ord == BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE )
443 return EXECUTABLE_ORDINAL;
444 return ord;
445 }
446
447
448 template <typename A>
449 void SymbolTableAtom<A>::addImport(const ld::Atom* atom, StringPoolAtom* pool)
450 {
451 macho_nlist<P> entry;
452
453 // set n_strx
454 entry.set_n_strx(pool->add(atom->name()));
455
456 // set n_type
457 if ( this->_options.outputKind() == Options::kObjectFile ) {
458 if ( (atom->scope() == ld::Atom::scopeLinkageUnit)
459 && (atom->definition() == ld::Atom::definitionTentative) )
460 entry.set_n_type(N_UNDF | N_EXT | N_PEXT);
461 else
462 entry.set_n_type(N_UNDF | N_EXT);
463 }
464 else {
465 if ( this->_options.prebind() )
466 entry.set_n_type(N_PBUD | N_EXT);
467 else
468 entry.set_n_type(N_UNDF | N_EXT);
469 }
470
471 // set n_sect
472 entry.set_n_sect(0);
473
474 uint16_t desc = 0;
475 if ( this->_options.outputKind() != Options::kObjectFile ) {
476 uint8_t ordinal = this->classicOrdinalForProxy(atom);
477 //fprintf(stderr, "ordinal=%u from reader=%p for symbol=%s\n", ordinal, atom->getFile(), atom->getName());
478 SET_LIBRARY_ORDINAL(desc, ordinal);
479
480 #if 0
481 // set n_desc ( high byte is library ordinal, low byte is reference type )
482 std::map<const ObjectFile::Atom*,ObjectFile::Atom*>::iterator pos = fStubsMap.find(atom);
483 if ( pos != fStubsMap.end() || ( strncmp(atom->getName(), ".objc_class_name_", 17) == 0) )
484 desc |= REFERENCE_FLAG_UNDEFINED_LAZY;
485 else
486 desc |= REFERENCE_FLAG_UNDEFINED_NON_LAZY;
487 #endif
488 }
489 else if ( atom->definition() == ld::Atom::definitionTentative ) {
490 uint8_t align = atom->alignment().powerOf2;
491 // always record custom alignment of common symbols to match what compiler does
492 SET_COMM_ALIGN(desc, align);
493 }
494 if ( (this->_options.outputKind() != Options::kObjectFile)
495 && (atom->definition() == ld::Atom::definitionProxy)
496 && (atom->combine() == ld::Atom::combineByName) ) {
497 desc |= N_REF_TO_WEAK;
498 }
499 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
500 if ( atom->weakImported() || ((dylib != NULL) && dylib->forcedWeakLinked()) )
501 desc |= N_WEAK_REF;
502 entry.set_n_desc(desc);
503
504 // set n_value, zero for import proxy and size for tentative definition
505 if ( atom->definition() == ld::Atom::definitionTentative )
506 entry.set_n_value(atom->size());
507 else
508 entry.set_n_value(0);
509
510 // add to array
511 _imports.push_back(entry);
512 }
513
514 template <typename A>
515 uint8_t SymbolTableAtom<A>::sectionIndexForStab(const ld::relocatable::File::Stab& stab)
516 {
517 // in FUN stabs, n_sect field is 0 for start FUN and 1 for end FUN
518 if ( stab.type == N_FUN )
519 return stab.other;
520 else if ( stab.type == N_GSYM )
521 return 0;
522 else if ( stab.atom != NULL )
523 return stab.atom->machoSection();
524 else
525 return stab.other;
526 }
527
528
529 template <typename A>
530 uint64_t SymbolTableAtom<A>::valueForStab(const ld::relocatable::File::Stab& stab)
531 {
532 switch ( stab.type ) {
533 case N_FUN:
534 if ( stab.atom == NULL ) {
535 // <rdar://problem/5591394> Add support to ld64 for N_FUN stabs when used for symbolic constants
536 return stab.value;
537 }
538 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
539 // end of function N_FUN has size
540 return stab.atom->size();
541 }
542 else {
543 // start of function N_FUN has address
544 return stab.atom->finalAddress();
545 }
546 case N_LBRAC:
547 case N_RBRAC:
548 case N_SLINE:
549 if ( stab.atom == NULL )
550 // some weird assembly files have slines not associated with a function
551 return stab.value;
552 else
553 // all these stab types need their value changed from an offset in the atom to an address
554 return stab.atom->finalAddress() + stab.value;
555 case N_STSYM:
556 case N_LCSYM:
557 case N_BNSYM:
558 // all these need address of atom
559 if ( stab.atom != NULL )
560 return stab.atom->finalAddress();
561 else
562 return 0; // <rdar://problem/7811357> work around for mismatch N_BNSYM
563 case N_ENSYM:
564 return stab.atom->size();
565 case N_SO:
566 if ( stab.atom == NULL ) {
567 return 0;
568 }
569 else {
570 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
571 // end of translation unit N_SO has address of end of last atom
572 return stab.atom->finalAddress() + stab.atom->size();
573 }
574 else {
575 // start of translation unit N_SO has address of end of first atom
576 return stab.atom->finalAddress();
577 }
578 }
579 break;
580 default:
581 return stab.value;
582 }
583 }
584
585 template <typename A>
586 uint32_t SymbolTableAtom<A>::stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool)
587 {
588 switch (stab.type) {
589 case N_SO:
590 if ( (stab.string == NULL) || stab.string[0] == '\0' ) {
591 return pool->emptyString();
592 break;
593 }
594 // fall into uniquing case
595 case N_SOL:
596 case N_BINCL:
597 case N_EXCL:
598 return pool->addUnique(stab.string);
599 break;
600 default:
601 if ( stab.string == NULL )
602 return 0;
603 else if ( stab.string[0] == '\0' )
604 return pool->emptyString();
605 else
606 return pool->add(stab.string);
607 }
608 return 0;
609 }
610
611
612
613 template <typename A>
614 bool SymbolTableAtom<A>::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe)
615 {
616 ssos = _stabsStringsOffsetStart;
617 ssoe = _stabsStringsOffsetEnd;
618 sos = _stabsIndexStart * sizeof(macho_nlist<P>);
619 soe = _stabsIndexEnd * sizeof(macho_nlist<P>);
620 return ( (_stabsIndexStart != _stabsIndexEnd) || (_stabsStringsOffsetStart != _stabsStringsOffsetEnd) );
621 }
622
623
624 template <typename A>
625 void SymbolTableAtom<A>::encode()
626 {
627 uint32_t symbolIndex = 0;
628
629 // make nlist entries for all local symbols
630 std::vector<const ld::Atom*>& localAtoms = this->_writer._localAtoms;
631 std::vector<const ld::Atom*>& globalAtoms = this->_writer._exportedAtoms;
632 _locals.reserve(localAtoms.size()+this->_state.stabs.size());
633 this->_writer._localSymbolsStartIndex = 0;
634 // make nlist entries for all debug notes
635 _stabsIndexStart = symbolIndex;
636 _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset();
637 for (std::vector<ld::relocatable::File::Stab>::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) {
638 macho_nlist<P> entry;
639 entry.set_n_type(sit->type);
640 entry.set_n_sect(sectionIndexForStab(*sit));
641 entry.set_n_desc(sit->desc);
642 entry.set_n_value(valueForStab(*sit));
643 entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom));
644 _locals.push_back(entry);
645 ++symbolIndex;
646 }
647 _stabsIndexEnd = symbolIndex;
648 _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset();
649 for (std::vector<const ld::Atom*>::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) {
650 const ld::Atom* atom = *it;
651 if ( this->addLocal(atom, this->_writer._stringPoolAtom) )
652 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
653 }
654 this->_writer._localSymbolsCount = symbolIndex;
655
656
657 // make nlist entries for all global symbols
658 _globals.reserve(globalAtoms.size());
659 this->_writer._globalSymbolsStartIndex = symbolIndex;
660 for (std::vector<const ld::Atom*>::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) {
661 const ld::Atom* atom = *it;
662 this->addGlobal(atom, this->_writer._stringPoolAtom);
663 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
664 }
665 this->_writer._globalSymbolsCount = symbolIndex - this->_writer._globalSymbolsStartIndex;
666
667 // make nlist entries for all undefined (imported) symbols
668 std::vector<const ld::Atom*>& importAtoms = this->_writer._importedAtoms;
669 _imports.reserve(importAtoms.size());
670 this->_writer._importSymbolsStartIndex = symbolIndex;
671 for (std::vector<const ld::Atom*>::const_iterator it=importAtoms.begin(); it != importAtoms.end(); ++it) {
672 this->addImport(*it, this->_writer._stringPoolAtom);
673 this->_writer._atomToSymbolIndex[*it] = symbolIndex++;
674 }
675 this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex;
676 }
677
678 template <typename A>
679 uint64_t SymbolTableAtom<A>::size() const
680 {
681 return sizeof(macho_nlist<P>) * (_locals.size() + _globals.size() + _imports.size());
682 }
683
684 template <typename A>
685 void SymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
686 {
687 memcpy(&buffer[this->_writer._localSymbolsStartIndex*sizeof(macho_nlist<P>)], &_locals[0],
688 this->_writer._localSymbolsCount*sizeof(macho_nlist<P>));
689 memcpy(&buffer[this->_writer._globalSymbolsStartIndex*sizeof(macho_nlist<P>)], &_globals[0],
690 this->_writer._globalSymbolsCount*sizeof(macho_nlist<P>));
691 memcpy(&buffer[this->_writer._importSymbolsStartIndex *sizeof(macho_nlist<P>)], &_imports[0],
692 this->_writer._importSymbolsCount*sizeof(macho_nlist<P>));
693 }
694
695
696
697
698 class RelocationsAtomAbstract : public ClassicLinkEditAtom
699 {
700 public:
701 RelocationsAtomAbstract(const Options& opts, ld::Internal& state,
702 OutputFile& writer, const ld::Section& sect,
703 unsigned int pointerSize)
704 : ClassicLinkEditAtom(opts, state, writer, sect, pointerSize) { }
705
706 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) = 0;
707 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) = 0;
708 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) = 0;
709 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) = 0;
710 virtual uint64_t relocBaseAddress(ld::Internal& state) = 0;
711 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
712 const ld::Atom* inAtom, uint32_t offsetInAtom,
713 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
714 const ld::Atom* toTarget, uint64_t toAddend,
715 const ld::Atom* fromTarget, uint64_t fromAddend) = 0;
716 protected:
717 uint32_t symbolIndex(const ld::Atom* atom) const;
718
719 };
720
721
722
723 uint32_t RelocationsAtomAbstract::symbolIndex(const ld::Atom* atom) const
724 {
725 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
726 if ( pos != this->_writer._atomToSymbolIndex.end() )
727 return pos->second;
728 fprintf(stderr, "_atomToSymbolIndex content:\n");
729 for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
730 fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
731 }
732 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
733 }
734
735
736 template <typename A>
737 class LocalRelocationsAtom : public RelocationsAtomAbstract
738 {
739 public:
740 LocalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
741 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
742
743 // overrides of ld::Atom
744 virtual const char* name() const { return "local relocations"; }
745 virtual uint64_t size() const;
746 virtual void copyRawContent(uint8_t buffer[]) const;
747 // overrides of ClassicLinkEditAtom
748 virtual void encode() {}
749 // overrides of RelocationsAtomAbstract
750 virtual void addPointerReloc(uint64_t addr, uint32_t symNum);
751 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
752 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
753 virtual uint64_t relocBaseAddress(ld::Internal& state);
754 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum);
755 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
756 const ld::Atom* inAtom, uint32_t offsetInAtom,
757 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
758 const ld::Atom* toTarget, uint64_t toAddend,
759 const ld::Atom* fromTarget, uint64_t fromAddend) { }
760
761 private:
762 typedef typename A::P P;
763 typedef typename A::P::E E;
764 typedef typename A::P::uint_t pint_t;
765
766 std::vector<macho_relocation_info<P> > _relocs;
767
768 static ld::Section _s_section;
769 };
770
771 template <typename A>
772 ld::Section LocalRelocationsAtom<A>::_s_section("__LINKEDIT", "__local_relocs", ld::Section::typeLinkEdit, true);
773
774
775 template <>
776 uint64_t LocalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
777 {
778 if ( _options.outputKind() == Options::kKextBundle ) {
779 // for kext bundles the reloc base address starts at __TEXT segment
780 return _options.baseAddress();
781 }
782 // for all other kinds, the x86_64 reloc base address starts at __DATA segment
783 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
784 ld::Internal::FinalSection* sect = *sit;
785 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
786 return sect->address;
787 }
788 throw "__DATA segment not found";
789 }
790
791 template <typename A>
792 uint64_t LocalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
793 {
794 return _options.baseAddress();
795 }
796
797 template <typename A>
798 void LocalRelocationsAtom<A>::addPointerReloc(uint64_t addr, uint32_t symNum)
799 {
800 macho_relocation_info<P> reloc;
801 reloc.set_r_address(addr);
802 reloc.set_r_symbolnum(symNum);
803 reloc.set_r_pcrel(false);
804 reloc.set_r_length();
805 reloc.set_r_extern(false);
806 reloc.set_r_type(GENERIC_RELOC_VANILLA);
807 _relocs.push_back(reloc);
808 }
809
810 template <typename A>
811 void LocalRelocationsAtom<A>::addTextReloc(uint64_t addr, ld::Fixup::Kind kind, uint64_t targetAddr, uint32_t symNum)
812 {
813 }
814
815
816 template <typename A>
817 uint64_t LocalRelocationsAtom<A>::size() const
818 {
819 return _relocs.size() * sizeof(macho_relocation_info<P>);
820 }
821
822 template <typename A>
823 void LocalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
824 {
825 memcpy(buffer, &_relocs[0], _relocs.size()*sizeof(macho_relocation_info<P>));
826 }
827
828
829
830
831
832
833 template <typename A>
834 class ExternalRelocationsAtom : public RelocationsAtomAbstract
835 {
836 public:
837 ExternalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
838 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
839
840 // overrides of ld::Atom
841 virtual const char* name() const { return "external relocations"; }
842 virtual uint64_t size() const;
843 virtual void copyRawContent(uint8_t buffer[]) const;
844 // overrides of ClassicLinkEditAtom
845 virtual void encode() {}
846 // overrides of RelocationsAtomAbstract
847 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
848 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
849 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*);
850 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*);
851 virtual uint64_t relocBaseAddress(ld::Internal& state);
852 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
853 const ld::Atom* inAtom, uint32_t offsetInAtom,
854 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
855 const ld::Atom* toTarget, uint64_t toAddend,
856 const ld::Atom* fromTarget, uint64_t fromAddend) { }
857
858
859 private:
860 typedef typename A::P P;
861 typedef typename A::P::E E;
862 typedef typename A::P::uint_t pint_t;
863
864 struct LocAndAtom {
865 LocAndAtom(uint64_t l, const ld::Atom* a) : loc(l), atom(a), symbolIndex(0) {}
866
867 uint64_t loc;
868 const ld::Atom* atom;
869 uint32_t symbolIndex;
870
871 bool operator<(const LocAndAtom& rhs) const {
872 // sort first by symbol number
873 if ( this->symbolIndex != rhs.symbolIndex )
874 return (this->symbolIndex < rhs.symbolIndex);
875 // then sort all uses of the same symbol by address
876 return (this->loc < rhs.loc);
877 }
878
879 };
880
881 static uint32_t pointerReloc();
882 static uint32_t callReloc();
883
884 mutable std::vector<LocAndAtom> _pointerLocations;
885 mutable std::vector<LocAndAtom> _callSiteLocations;
886
887 static ld::Section _s_section;
888 };
889
890 template <typename A>
891 ld::Section ExternalRelocationsAtom<A>::_s_section("__LINKEDIT", "__extrn_relocs", ld::Section::typeLinkEdit, true);
892
893 template <>
894 uint64_t ExternalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
895 {
896 // for x86_64 the reloc base address starts at __DATA segment
897 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
898 ld::Internal::FinalSection* sect = *sit;
899 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
900 return sect->address;
901 }
902 throw "__DATA segment not found";
903 }
904
905 template <typename A>
906 uint64_t ExternalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
907 {
908 return 0;
909 }
910
911 template <typename A>
912 void ExternalRelocationsAtom<A>::addExternalPointerReloc(uint64_t addr, const ld::Atom* target)
913 {
914 _pointerLocations.push_back(LocAndAtom(addr, target));
915 }
916
917 template <typename A>
918 void ExternalRelocationsAtom<A>::addExternalCallSiteReloc(uint64_t addr, const ld::Atom* target)
919 {
920 _callSiteLocations.push_back(LocAndAtom(addr, target));
921 }
922
923
924 template <typename A>
925 uint64_t ExternalRelocationsAtom<A>::size() const
926 {
927 if ( _options.outputKind() == Options::kStaticExecutable ) {
928 assert(_pointerLocations.size() == 0);
929 assert(_callSiteLocations.size() == 0);
930 }
931 return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info<P>);
932 }
933
934 #if SUPPORT_ARCH_arm_any
935 template <> uint32_t ExternalRelocationsAtom<arm>::pointerReloc() { return ARM_RELOC_VANILLA; }
936 #endif
937 template <> uint32_t ExternalRelocationsAtom<x86>::pointerReloc() { return GENERIC_RELOC_VANILLA; }
938 template <> uint32_t ExternalRelocationsAtom<x86_64>::pointerReloc() { return X86_64_RELOC_UNSIGNED; }
939
940
941 template <> uint32_t ExternalRelocationsAtom<x86_64>::callReloc() { return X86_64_RELOC_BRANCH; }
942 template <> uint32_t ExternalRelocationsAtom<x86>::callReloc() { return GENERIC_RELOC_VANILLA; }
943 template <typename A>
944 uint32_t ExternalRelocationsAtom<A>::callReloc()
945 {
946 assert(0 && "external call relocs not implemented");
947 return 0;
948 }
949
950
951 template <typename A>
952 void ExternalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
953 {
954 macho_relocation_info<P>* r = (macho_relocation_info<P>*)buffer;
955
956 // assign symbol index, now that symbol table is built
957 for (typename std::vector<LocAndAtom>::iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it) {
958 it->symbolIndex = symbolIndex(it->atom);
959 }
960 std::sort(_pointerLocations.begin(), _pointerLocations.end());
961 for (typename std::vector<LocAndAtom>::const_iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it, ++r) {
962 r->set_r_address(it->loc);
963 r->set_r_symbolnum(it->symbolIndex);
964 r->set_r_pcrel(false);
965 r->set_r_length();
966 r->set_r_extern(true);
967 r->set_r_type(this->pointerReloc());
968 }
969
970 for (typename std::vector<LocAndAtom>::iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it) {
971 it->symbolIndex = symbolIndex(it->atom);
972 }
973 std::sort(_callSiteLocations.begin(), _callSiteLocations.end());
974 for (typename std::vector<LocAndAtom>::const_iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it, ++r) {
975 r->set_r_address(it->loc);
976 r->set_r_symbolnum(it->symbolIndex);
977 r->set_r_pcrel(true);
978 r->set_r_length(2);
979 r->set_r_extern(true);
980 r->set_r_type(this->callReloc());
981 }
982 }
983
984
985 template <typename A>
986 class SectionRelocationsAtom : public RelocationsAtomAbstract
987 {
988 public:
989 SectionRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
990 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
991
992 // overrides of ld::Atom
993 virtual const char* name() const { return "section relocations"; }
994 virtual uint64_t size() const;
995 virtual void copyRawContent(uint8_t buffer[]) const;
996 // overrides of ClassicLinkEditAtom
997 virtual void encode();
998 // overrides of RelocationsAtomAbstract
999 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
1000 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
1001 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
1002 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
1003 virtual uint64_t relocBaseAddress(ld::Internal& state) { return 0; }
1004 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
1005 const ld::Atom* inAtom, uint32_t offsetInAtom,
1006 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1007 const ld::Atom* toTarget, uint64_t toAddend,
1008 const ld::Atom* fromTarget, uint64_t fromAddend);
1009
1010 private:
1011 typedef typename A::P P;
1012 typedef typename A::P::E E;
1013 typedef typename A::P::uint_t pint_t;
1014
1015
1016 struct Entry {
1017 ld::Fixup::Kind kind;
1018 bool toTargetUsesExternalReloc;
1019 bool fromTargetUsesExternalReloc;
1020 const ld::Atom* inAtom;
1021 uint32_t offsetInAtom;
1022 const ld::Atom* toTarget;
1023 uint64_t toAddend;
1024 const ld::Atom* fromTarget;
1025 uint64_t fromAddend;
1026 };
1027 uint32_t sectSymNum(bool external, const ld::Atom* target);
1028 void encodeSectionReloc(ld::Internal::FinalSection* sect,
1029 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs);
1030
1031 struct SectionAndEntries {
1032 ld::Internal::FinalSection* sect;
1033 std::vector<Entry> entries;
1034 std::vector<macho_relocation_info<P> > relocs;
1035 };
1036
1037 std::vector<SectionAndEntries> _entriesBySection;
1038
1039 static ld::Section _s_section;
1040 };
1041
1042 template <typename A>
1043 ld::Section SectionRelocationsAtom<A>::_s_section("__LINKEDIT", "__sect_relocs", ld::Section::typeLinkEdit, true);
1044
1045
1046
1047
1048 template <typename A>
1049 uint64_t SectionRelocationsAtom<A>::size() const
1050 {
1051 uint32_t count = 0;
1052 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1053 const SectionAndEntries& se = *it;
1054 count += se.relocs.size();
1055 }
1056 return count * sizeof(macho_relocation_info<P>);
1057 }
1058
1059 template <typename A>
1060 void SectionRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1061 {
1062 uint32_t offset = 0;
1063 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1064 const SectionAndEntries& se = *it;
1065 memcpy(&buffer[offset], &se.relocs[0], se.relocs.size()*sizeof(macho_relocation_info<P>));
1066 offset += (se.relocs.size() * sizeof(macho_relocation_info<P>));
1067 }
1068 }
1069
1070
1071 template <>
1072 void SectionRelocationsAtom<x86_64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1073 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1074 {
1075 macho_relocation_info<P> reloc1;
1076 macho_relocation_info<P> reloc2;
1077 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1078 bool external = entry.toTargetUsesExternalReloc;
1079 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1080 bool fromExternal = false;
1081 uint32_t fromSymbolNum = 0;
1082 if ( entry.fromTarget != NULL ) {
1083 fromExternal = entry.fromTargetUsesExternalReloc;
1084 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1085 }
1086
1087
1088 switch ( entry.kind ) {
1089 case ld::Fixup::kindStoreX86BranchPCRel32:
1090 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1091 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1092 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1093 reloc1.set_r_address(address);
1094 reloc1.set_r_symbolnum(symbolNum);
1095 reloc1.set_r_pcrel(true);
1096 reloc1.set_r_length(2);
1097 reloc1.set_r_extern(external);
1098 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1099 relocs.push_back(reloc1);
1100 break;
1101
1102 case ld::Fixup::kindStoreX86BranchPCRel8:
1103 reloc1.set_r_address(address);
1104 reloc1.set_r_symbolnum(symbolNum);
1105 reloc1.set_r_pcrel(true);
1106 reloc1.set_r_length(0);
1107 reloc1.set_r_extern(external);
1108 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1109 relocs.push_back(reloc1);
1110 break;
1111
1112 case ld::Fixup::kindStoreX86PCRel32:
1113 case ld::Fixup::kindStoreTargetAddressX86PCRel32:
1114 reloc1.set_r_address(address);
1115 reloc1.set_r_symbolnum(symbolNum);
1116 reloc1.set_r_pcrel(true);
1117 reloc1.set_r_length(2);
1118 reloc1.set_r_extern(external);
1119 reloc1.set_r_type(X86_64_RELOC_SIGNED);
1120 relocs.push_back(reloc1);
1121 break;
1122
1123 case ld::Fixup::kindStoreX86PCRel32_1:
1124 reloc1.set_r_address(address);
1125 reloc1.set_r_symbolnum(symbolNum);
1126 reloc1.set_r_pcrel(true);
1127 reloc1.set_r_length(2);
1128 reloc1.set_r_extern(external);
1129 reloc1.set_r_type(X86_64_RELOC_SIGNED_1);
1130 relocs.push_back(reloc1);
1131 break;
1132
1133 case ld::Fixup::kindStoreX86PCRel32_2:
1134 reloc1.set_r_address(address);
1135 reloc1.set_r_symbolnum(symbolNum);
1136 reloc1.set_r_pcrel(true);
1137 reloc1.set_r_length(2);
1138 reloc1.set_r_extern(external);
1139 reloc1.set_r_type(X86_64_RELOC_SIGNED_2);
1140 relocs.push_back(reloc1);
1141 break;
1142
1143 case ld::Fixup::kindStoreX86PCRel32_4:
1144 reloc1.set_r_address(address);
1145 reloc1.set_r_symbolnum(symbolNum);
1146 reloc1.set_r_pcrel(true);
1147 reloc1.set_r_length(2);
1148 reloc1.set_r_extern(external);
1149 reloc1.set_r_type(X86_64_RELOC_SIGNED_4);
1150 relocs.push_back(reloc1);
1151 break;
1152
1153 case ld::Fixup::kindStoreX86PCRel32GOTLoad:
1154 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
1155 reloc1.set_r_address(address);
1156 reloc1.set_r_symbolnum(symbolNum);
1157 reloc1.set_r_pcrel(true);
1158 reloc1.set_r_length(2);
1159 reloc1.set_r_extern(external);
1160 reloc1.set_r_type(X86_64_RELOC_GOT_LOAD);
1161 relocs.push_back(reloc1);
1162 break;
1163
1164 case ld::Fixup::kindStoreX86PCRel32GOT:
1165 reloc1.set_r_address(address);
1166 reloc1.set_r_symbolnum(symbolNum);
1167 reloc1.set_r_pcrel(true);
1168 reloc1.set_r_length(2);
1169 reloc1.set_r_extern(external);
1170 reloc1.set_r_type(X86_64_RELOC_GOT);
1171 relocs.push_back(reloc1);
1172 break;
1173
1174 case ld::Fixup::kindStoreLittleEndian64:
1175 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1176 if ( entry.fromTarget != NULL ) {
1177 // this is a pointer-diff
1178 reloc1.set_r_address(address);
1179 reloc1.set_r_symbolnum(symbolNum);
1180 reloc1.set_r_pcrel(false);
1181 reloc1.set_r_length(3);
1182 reloc1.set_r_extern(external);
1183 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1184 reloc2.set_r_address(address);
1185 reloc2.set_r_symbolnum(fromSymbolNum);
1186 reloc2.set_r_pcrel(false);
1187 reloc2.set_r_length(3);
1188 reloc2.set_r_extern(fromExternal);
1189 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1190 relocs.push_back(reloc2);
1191 relocs.push_back(reloc1);
1192 }
1193 else {
1194 // regular pointer
1195 reloc1.set_r_address(address);
1196 reloc1.set_r_symbolnum(symbolNum);
1197 reloc1.set_r_pcrel(false);
1198 reloc1.set_r_length(3);
1199 reloc1.set_r_extern(external);
1200 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1201 relocs.push_back(reloc1);
1202 }
1203 break;
1204
1205 case ld::Fixup::kindStoreLittleEndian32:
1206 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1207 if ( entry.fromTarget != NULL ) {
1208 // this is a pointer-diff
1209 reloc1.set_r_address(address);
1210 reloc1.set_r_symbolnum(symbolNum);
1211 reloc1.set_r_pcrel(false);
1212 reloc1.set_r_length(2);
1213 reloc1.set_r_extern(external);
1214 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1215 reloc2.set_r_address(address);
1216 reloc2.set_r_symbolnum(fromSymbolNum);
1217 reloc2.set_r_pcrel(false);
1218 reloc2.set_r_length(2);
1219 reloc2.set_r_extern(fromExternal);
1220 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1221 relocs.push_back(reloc2);
1222 relocs.push_back(reloc1);
1223 }
1224 else {
1225 // regular pointer
1226 reloc1.set_r_address(address);
1227 reloc1.set_r_symbolnum(symbolNum);
1228 reloc1.set_r_pcrel(false);
1229 reloc1.set_r_length(2);
1230 reloc1.set_r_extern(external);
1231 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1232 relocs.push_back(reloc1);
1233 }
1234 break;
1235 case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad:
1236 reloc1.set_r_address(address);
1237 reloc1.set_r_symbolnum(symbolNum);
1238 reloc1.set_r_pcrel(true);
1239 reloc1.set_r_length(2);
1240 reloc1.set_r_extern(external);
1241 reloc1.set_r_type(X86_64_RELOC_TLV);
1242 relocs.push_back(reloc1);
1243 break;
1244 default:
1245 assert(0 && "need to handle -r reloc");
1246
1247 }
1248
1249 }
1250
1251
1252
1253 template <typename A>
1254 uint32_t SectionRelocationsAtom<A>::sectSymNum(bool external, const ld::Atom* target)
1255 {
1256 if ( target->definition() == ld::Atom::definitionAbsolute )
1257 return R_ABS;
1258 if ( external )
1259 return this->symbolIndex(target); // in external relocations, r_symbolnum field is symbol index
1260 else
1261 return target->machoSection(); // in non-extern relocations, r_symbolnum is mach-o section index of target
1262 }
1263
1264 template <>
1265 void SectionRelocationsAtom<x86>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1266 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1267 {
1268 macho_relocation_info<P> reloc1;
1269 macho_relocation_info<P> reloc2;
1270 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1271 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1272 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1273 bool external = entry.toTargetUsesExternalReloc;
1274 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1275 bool fromExternal = false;
1276 uint32_t fromSymbolNum = 0;
1277 if ( entry.fromTarget != NULL ) {
1278 fromExternal = entry.fromTargetUsesExternalReloc;
1279 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1280 }
1281
1282 switch ( entry.kind ) {
1283 case ld::Fixup::kindStoreX86PCRel32:
1284 case ld::Fixup::kindStoreX86BranchPCRel32:
1285 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1286 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1287 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1288 if ( !external && (entry.toAddend != 0) ) {
1289 // use scattered reloc is target offset is non-zero
1290 sreloc1->set_r_scattered(true);
1291 sreloc1->set_r_pcrel(true);
1292 sreloc1->set_r_length(2);
1293 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1294 sreloc1->set_r_address(address);
1295 sreloc1->set_r_value(entry.toTarget->finalAddress());
1296 }
1297 else {
1298 reloc1.set_r_address(address);
1299 reloc1.set_r_symbolnum(symbolNum);
1300 reloc1.set_r_pcrel(true);
1301 reloc1.set_r_length(2);
1302 reloc1.set_r_extern(external);
1303 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1304 }
1305 relocs.push_back(reloc1);
1306 break;
1307
1308 case ld::Fixup::kindStoreX86BranchPCRel8:
1309 if ( !external && (entry.toAddend != 0) ) {
1310 // use scattered reloc is target offset is non-zero
1311 sreloc1->set_r_scattered(true);
1312 sreloc1->set_r_pcrel(true);
1313 sreloc1->set_r_length(0);
1314 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1315 sreloc1->set_r_address(address);
1316 sreloc1->set_r_value(entry.toTarget->finalAddress());
1317 }
1318 else {
1319 reloc1.set_r_address(address);
1320 reloc1.set_r_symbolnum(symbolNum);
1321 reloc1.set_r_pcrel(true);
1322 reloc1.set_r_length(0);
1323 reloc1.set_r_extern(external);
1324 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1325 }
1326 relocs.push_back(reloc1);
1327 break;
1328
1329 case ld::Fixup::kindStoreX86PCRel16:
1330 if ( !external && (entry.toAddend != 0) ) {
1331 // use scattered reloc is target offset is non-zero
1332 sreloc1->set_r_scattered(true);
1333 sreloc1->set_r_pcrel(true);
1334 sreloc1->set_r_length(1);
1335 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1336 sreloc1->set_r_address(address);
1337 sreloc1->set_r_value(entry.toTarget->finalAddress());
1338 }
1339 else {
1340 reloc1.set_r_address(address);
1341 reloc1.set_r_symbolnum(symbolNum);
1342 reloc1.set_r_pcrel(true);
1343 reloc1.set_r_length(1);
1344 reloc1.set_r_extern(external);
1345 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1346 }
1347 relocs.push_back(reloc1);
1348 break;
1349
1350 case ld::Fixup::kindStoreLittleEndian32:
1351 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1352 if ( entry.fromTarget != NULL ) {
1353 // this is a pointer-diff
1354 sreloc1->set_r_scattered(true);
1355 sreloc1->set_r_pcrel(false);
1356 sreloc1->set_r_length(2);
1357 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1358 sreloc1->set_r_type(GENERIC_RELOC_LOCAL_SECTDIFF);
1359 else
1360 sreloc1->set_r_type(GENERIC_RELOC_SECTDIFF);
1361 sreloc1->set_r_address(address);
1362 if ( entry.toTarget == entry.inAtom ) {
1363 if ( entry.toAddend > entry.toTarget->size() )
1364 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1365 else
1366 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1367 }
1368 else
1369 sreloc1->set_r_value(entry.toTarget->finalAddress());
1370 sreloc2->set_r_scattered(true);
1371 sreloc2->set_r_pcrel(false);
1372 sreloc2->set_r_length(2);
1373 sreloc2->set_r_type(GENERIC_RELOC_PAIR);
1374 sreloc2->set_r_address(0);
1375 if ( entry.fromTarget == entry.inAtom ) {
1376 if ( entry.fromAddend > entry.fromTarget->size() )
1377 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.offsetInAtom);
1378 else
1379 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1380 }
1381 else
1382 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1383 relocs.push_back(reloc1);
1384 relocs.push_back(reloc2);
1385 }
1386 else {
1387 // regular pointer
1388 if ( !external && (entry.toAddend != 0) && (entry.toTarget->symbolTableInclusion() != ld::Atom::symbolTableNotIn) ) {
1389 // use scattered reloc if target offset is non-zero into named atom (5658046)
1390 sreloc1->set_r_scattered(true);
1391 sreloc1->set_r_pcrel(false);
1392 sreloc1->set_r_length(2);
1393 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1394 sreloc1->set_r_address(address);
1395 sreloc1->set_r_value(entry.toTarget->finalAddress());
1396 }
1397 else {
1398 reloc1.set_r_address(address);
1399 reloc1.set_r_symbolnum(symbolNum);
1400 reloc1.set_r_pcrel(false);
1401 reloc1.set_r_length(2);
1402 reloc1.set_r_extern(external);
1403 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1404 }
1405 relocs.push_back(reloc1);
1406 }
1407 break;
1408 case ld::Fixup::kindStoreX86PCRel32TLVLoad:
1409 case ld::Fixup::kindStoreX86Abs32TLVLoad:
1410 case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad:
1411 reloc1.set_r_address(address);
1412 reloc1.set_r_symbolnum(symbolNum);
1413 reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad);
1414 reloc1.set_r_length(2);
1415 reloc1.set_r_extern(external);
1416 reloc1.set_r_type(GENERIC_RLEOC_TLV);
1417 relocs.push_back(reloc1);
1418 break;
1419 default:
1420 assert(0 && "need to handle -r reloc");
1421
1422 }
1423 }
1424
1425
1426
1427 #if SUPPORT_ARCH_arm_any
1428 template <>
1429 void SectionRelocationsAtom<arm>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1430 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1431 {
1432 macho_relocation_info<P> reloc1;
1433 macho_relocation_info<P> reloc2;
1434 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1435 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1436 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1437 bool external = entry.toTargetUsesExternalReloc;
1438 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1439 bool fromExternal = false;
1440 uint32_t fromSymbolNum = 0;
1441 if ( entry.fromTarget != NULL ) {
1442 fromExternal = entry.fromTargetUsesExternalReloc;
1443 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1444 }
1445
1446
1447 switch ( entry.kind ) {
1448 case ld::Fixup::kindStoreTargetAddressARMBranch24:
1449 case ld::Fixup::kindStoreARMBranch24:
1450 case ld::Fixup::kindStoreARMDtraceCallSiteNop:
1451 case ld::Fixup::kindStoreARMDtraceIsEnableSiteClear:
1452 if ( !external && (entry.toAddend != 0) ) {
1453 // use scattered reloc is target offset is non-zero
1454 sreloc1->set_r_scattered(true);
1455 sreloc1->set_r_pcrel(true);
1456 sreloc1->set_r_length(2);
1457 sreloc1->set_r_type(ARM_RELOC_BR24);
1458 sreloc1->set_r_address(address);
1459 sreloc1->set_r_value(entry.toTarget->finalAddress());
1460 }
1461 else {
1462 reloc1.set_r_address(address);
1463 reloc1.set_r_symbolnum(symbolNum);
1464 reloc1.set_r_pcrel(true);
1465 reloc1.set_r_length(2);
1466 reloc1.set_r_extern(external);
1467 reloc1.set_r_type(ARM_RELOC_BR24);
1468 }
1469 relocs.push_back(reloc1);
1470 break;
1471
1472 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
1473 case ld::Fixup::kindStoreThumbBranch22:
1474 case ld::Fixup::kindStoreThumbDtraceCallSiteNop:
1475 case ld::Fixup::kindStoreThumbDtraceIsEnableSiteClear:
1476 if ( !external && (entry.toAddend != 0) ) {
1477 // use scattered reloc is target offset is non-zero
1478 sreloc1->set_r_scattered(true);
1479 sreloc1->set_r_pcrel(true);
1480 sreloc1->set_r_length(2);
1481 sreloc1->set_r_type(ARM_THUMB_RELOC_BR22);
1482 sreloc1->set_r_address(address);
1483 sreloc1->set_r_value(entry.toTarget->finalAddress());
1484 }
1485 else {
1486 reloc1.set_r_address(address);
1487 reloc1.set_r_symbolnum(symbolNum);
1488 reloc1.set_r_pcrel(true);
1489 reloc1.set_r_length(2);
1490 reloc1.set_r_extern(external);
1491 reloc1.set_r_type(ARM_THUMB_RELOC_BR22);
1492 }
1493 relocs.push_back(reloc1);
1494 break;
1495
1496 case ld::Fixup::kindStoreLittleEndian32:
1497 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1498 if ( entry.fromTarget != NULL ) {
1499 // this is a pointer-diff
1500 sreloc1->set_r_scattered(true);
1501 sreloc1->set_r_pcrel(false);
1502 sreloc1->set_r_length(2);
1503 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1504 sreloc1->set_r_type(ARM_RELOC_LOCAL_SECTDIFF);
1505 else
1506 sreloc1->set_r_type(ARM_RELOC_SECTDIFF);
1507 sreloc1->set_r_address(address);
1508 if ( entry.toTarget == entry.inAtom ) {
1509 if ( entry.toAddend > entry.toTarget->size() )
1510 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1511 else
1512 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1513 }
1514 else {
1515 sreloc1->set_r_value(entry.toTarget->finalAddress());
1516 }
1517 sreloc2->set_r_scattered(true);
1518 sreloc2->set_r_pcrel(false);
1519 sreloc2->set_r_length(2);
1520 sreloc2->set_r_type(ARM_RELOC_PAIR);
1521 sreloc2->set_r_address(0);
1522 if ( entry.fromTarget == entry.inAtom ) {
1523 //unsigned int pcBaseOffset = entry.inAtom->isThumb() ? 4 : 8;
1524 //if ( entry.fromAddend > pcBaseOffset )
1525 // sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend-pcBaseOffset);
1526 //else
1527 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1528 }
1529 else {
1530 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1531 }
1532 relocs.push_back(reloc1);
1533 relocs.push_back(reloc2);
1534 }
1535 else {
1536 // regular pointer
1537 if ( !external && (entry.toAddend != 0) ) {
1538 // use scattered reloc is target offset is non-zero
1539 sreloc1->set_r_scattered(true);
1540 sreloc1->set_r_pcrel(false);
1541 sreloc1->set_r_length(2);
1542 sreloc1->set_r_type(ARM_RELOC_VANILLA);
1543 sreloc1->set_r_address(address);
1544 sreloc1->set_r_value(entry.toTarget->finalAddress());
1545 }
1546 else {
1547 reloc1.set_r_address(address);
1548 reloc1.set_r_symbolnum(symbolNum);
1549 reloc1.set_r_pcrel(false);
1550 reloc1.set_r_length(2);
1551 reloc1.set_r_extern(external);
1552 reloc1.set_r_type(ARM_RELOC_VANILLA);
1553 }
1554 relocs.push_back(reloc1);
1555 }
1556 break;
1557
1558 case ld::Fixup::kindStoreARMLow16:
1559 case ld::Fixup::kindStoreARMHigh16:
1560 case ld::Fixup::kindStoreThumbLow16:
1561 case ld::Fixup::kindStoreThumbHigh16:
1562 {
1563 int len = 0;
1564 uint32_t otherHalf = 0;
1565 uint32_t value = entry.toTarget->finalAddress()+entry.toAddend;
1566 if ( entry.fromTarget != NULL )
1567 value -= (entry.fromTarget->finalAddress()+entry.fromAddend);
1568 switch ( entry.kind ) {
1569 case ld::Fixup::kindStoreARMLow16:
1570 len = 0;
1571 otherHalf = value >> 16;
1572 break;
1573 case ld::Fixup::kindStoreARMHigh16:
1574 len = 1;
1575 otherHalf = value & 0xFFFF;
1576 break;
1577 case ld::Fixup::kindStoreThumbLow16:
1578 len = 2;
1579 otherHalf = value >> 16;
1580 break;
1581 case ld::Fixup::kindStoreThumbHigh16:
1582 len = 3;
1583 otherHalf = value & 0xFFFF;
1584 break;
1585 default:
1586 break;
1587 }
1588 if ( entry.fromTarget != NULL ) {
1589 // this is a sect-diff
1590 sreloc1->set_r_scattered(true);
1591 sreloc1->set_r_pcrel(false);
1592 sreloc1->set_r_length(len);
1593 sreloc1->set_r_type(ARM_RELOC_HALF_SECTDIFF);
1594 sreloc1->set_r_address(address);
1595 sreloc1->set_r_value(entry.toTarget->finalAddress());
1596 sreloc2->set_r_scattered(true);
1597 sreloc2->set_r_pcrel(false);
1598 sreloc2->set_r_length(len);
1599 sreloc2->set_r_type(ARM_RELOC_PAIR);
1600 sreloc2->set_r_address(otherHalf);
1601 if ( entry.fromTarget == entry.inAtom )
1602 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1603 else
1604 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1605 relocs.push_back(reloc1);
1606 relocs.push_back(reloc2);
1607 }
1608 else {
1609 // this is absolute address
1610 if ( !external && (entry.toAddend != 0) ) {
1611 // use scattered reloc is target offset is non-zero
1612 sreloc1->set_r_scattered(true);
1613 sreloc1->set_r_pcrel(false);
1614 sreloc1->set_r_length(len);
1615 sreloc1->set_r_type(ARM_RELOC_HALF);
1616 sreloc1->set_r_address(address);
1617 sreloc1->set_r_value(entry.toTarget->finalAddress());
1618 reloc2.set_r_address(otherHalf);
1619 reloc2.set_r_symbolnum(0);
1620 reloc2.set_r_pcrel(false);
1621 reloc2.set_r_length(len);
1622 reloc2.set_r_extern(false);
1623 reloc2.set_r_type(ARM_RELOC_PAIR);
1624 relocs.push_back(reloc1);
1625 relocs.push_back(reloc2);
1626 }
1627 else {
1628 reloc1.set_r_address(address);
1629 reloc1.set_r_symbolnum(symbolNum);
1630 reloc1.set_r_pcrel(false);
1631 reloc1.set_r_length(len);
1632 reloc1.set_r_extern(external);
1633 reloc1.set_r_type(ARM_RELOC_HALF);
1634 reloc2.set_r_address(otherHalf); // other half
1635 reloc2.set_r_symbolnum(0);
1636 reloc2.set_r_pcrel(false);
1637 reloc2.set_r_length(len);
1638 reloc2.set_r_extern(false);
1639 reloc2.set_r_type(ARM_RELOC_PAIR);
1640 relocs.push_back(reloc1);
1641 relocs.push_back(reloc2);
1642 }
1643 }
1644 }
1645 break;
1646
1647 default:
1648 assert(0 && "need to handle -r reloc");
1649
1650 }
1651 }
1652 #endif
1653
1654
1655
1656 template <typename A>
1657 void SectionRelocationsAtom<A>::addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind kind,
1658 const ld::Atom* inAtom, uint32_t offsetInAtom,
1659 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1660 const ld::Atom* toTarget, uint64_t toAddend,
1661 const ld::Atom* fromTarget, uint64_t fromAddend)
1662 {
1663 Entry entry;
1664 entry.kind = kind;
1665 entry.toTargetUsesExternalReloc = toTargetUsesExternalReloc;
1666 entry.fromTargetUsesExternalReloc = fromTargetExternalReloc;
1667 entry.inAtom = inAtom;
1668 entry.offsetInAtom = offsetInAtom;
1669 entry.toTarget = toTarget;
1670 entry.toAddend = toAddend;
1671 entry.fromTarget = fromTarget;
1672 entry.fromAddend = fromAddend;
1673
1674 static ld::Internal::FinalSection* lastSection = NULL;
1675 static SectionAndEntries* lastSectionAndEntries = NULL;
1676
1677 if ( sect != lastSection ) {
1678 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1679 if ( sect == it->sect ) {
1680 lastSection = sect;
1681 lastSectionAndEntries = &*it;
1682 break;
1683 }
1684 }
1685 if ( sect != lastSection ) {
1686 SectionAndEntries tmp;
1687 tmp.sect = sect;
1688 _entriesBySection.push_back(tmp);
1689 lastSection = sect;
1690 lastSectionAndEntries = &_entriesBySection.back();
1691 }
1692 }
1693 lastSectionAndEntries->entries.push_back(entry);
1694 }
1695
1696 template <typename A>
1697 void SectionRelocationsAtom<A>::encode()
1698 {
1699 // convert each Entry record to one or two reloc records
1700 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1701 SectionAndEntries& se = *it;
1702 for(typename std::vector<Entry>::iterator eit=se.entries.begin(); eit != se.entries.end(); ++eit) {
1703 encodeSectionReloc(se.sect, *eit, se.relocs);
1704 }
1705 }
1706
1707 // update sections with start and count or relocs
1708 uint32_t index = 0;
1709 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1710 SectionAndEntries& se = *it;
1711 se.sect->relocStart = index;
1712 se.sect->relocCount = se.relocs.size();
1713 index += se.sect->relocCount;
1714 }
1715
1716 }
1717
1718
1719
1720 template <typename A>
1721 class IndirectSymbolTableAtom : public ClassicLinkEditAtom
1722 {
1723 public:
1724 IndirectSymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1725 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)) { }
1726
1727 // overrides of ld::Atom
1728 virtual const char* name() const { return "indirect symbol table"; }
1729 virtual uint64_t size() const;
1730 virtual void copyRawContent(uint8_t buffer[]) const;
1731 // overrides of ClassicLinkEditAtom
1732 virtual void encode();
1733
1734 private:
1735 typedef typename A::P P;
1736 typedef typename A::P::E E;
1737 typedef typename A::P::uint_t pint_t;
1738
1739 void encodeStubSection(ld::Internal::FinalSection* sect);
1740 void encodeLazyPointerSection(ld::Internal::FinalSection* sect);
1741 void encodeNonLazyPointerSection(ld::Internal::FinalSection* sect);
1742 uint32_t symIndexOfStubAtom(const ld::Atom*);
1743 uint32_t symIndexOfLazyPointerAtom(const ld::Atom*);
1744 uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*);
1745 uint32_t symbolIndex(const ld::Atom*);
1746 bool kextBundlesDontHaveIndirectSymbolTable();
1747
1748
1749 std::vector<uint32_t> _entries;
1750
1751 static ld::Section _s_section;
1752 };
1753
1754 template <typename A>
1755 ld::Section IndirectSymbolTableAtom<A>::_s_section("__LINKEDIT", "__ind_sym_tab", ld::Section::typeLinkEdit, true);
1756
1757
1758
1759
1760 template <typename A>
1761 uint32_t IndirectSymbolTableAtom<A>::symbolIndex(const ld::Atom* atom)
1762 {
1763 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
1764 if ( pos != this->_writer._atomToSymbolIndex.end() )
1765 return pos->second;
1766 //fprintf(stderr, "_atomToSymbolIndex content:\n");
1767 //for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
1768 // fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
1769 //}
1770 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
1771 }
1772
1773 template <typename A>
1774 uint32_t IndirectSymbolTableAtom<A>::symIndexOfStubAtom(const ld::Atom* stubAtom)
1775 {
1776 for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) {
1777 if ( fit->binding == ld::Fixup::bindingDirectlyBound ) {
1778 assert((fit->u.target->contentType() == ld::Atom::typeLazyPointer)
1779 || (fit->u.target->contentType() == ld::Atom::typeLazyDylibPointer));
1780 return symIndexOfLazyPointerAtom(fit->u.target);
1781 }
1782 }
1783 throw "internal error: stub missing fixup to lazy pointer";
1784 }
1785
1786
1787 template <typename A>
1788 uint32_t IndirectSymbolTableAtom<A>::symIndexOfLazyPointerAtom(const ld::Atom* lpAtom)
1789 {
1790 for (ld::Fixup::iterator fit = lpAtom->fixupsBegin(); fit != lpAtom->fixupsEnd(); ++fit) {
1791 if ( fit->kind == ld::Fixup::kindLazyTarget ) {
1792 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
1793 return symbolIndex(fit->u.target);
1794 }
1795 }
1796 throw "internal error: lazy pointer missing fixupLazyTarget fixup";
1797 }
1798
1799 template <typename A>
1800 uint32_t IndirectSymbolTableAtom<A>::symIndexOfNonLazyPointerAtom(const ld::Atom* nlpAtom)
1801 {
1802 //fprintf(stderr, "symIndexOfNonLazyPointerAtom(%p) %s\n", nlpAtom, nlpAtom->name());
1803 for (ld::Fixup::iterator fit = nlpAtom->fixupsBegin(); fit != nlpAtom->fixupsEnd(); ++fit) {
1804 // non-lazy-pointer to a stripped symbol => no symbol index
1805 if ( fit->clusterSize != ld::Fixup::k1of1 )
1806 return INDIRECT_SYMBOL_LOCAL;
1807 const ld::Atom* target;
1808 switch ( fit->binding ) {
1809 case ld::Fixup::bindingDirectlyBound:
1810 target = fit->u.target;
1811 break;
1812 case ld::Fixup::bindingsIndirectlyBound:
1813 target = _state.indirectBindingTable[fit->u.bindingIndex];
1814 break;
1815 default:
1816 throw "internal error: unexpected non-lazy pointer binding";
1817 }
1818 bool targetIsGlobal = (target->scope() == ld::Atom::scopeGlobal);
1819 switch ( target->definition() ) {
1820 case ld::Atom::definitionRegular:
1821 if ( targetIsGlobal ) {
1822 if ( _options.outputKind() == Options::kObjectFile ) {
1823 // nlpointer to global symbol uses indirect symbol table in .o files
1824 return symbolIndex(target);
1825 }
1826 else if ( target->combine() == ld::Atom::combineByName ) {
1827 // dyld needs to bind nlpointer to global weak def
1828 return symbolIndex(target);
1829 }
1830 else if ( _options.nameSpace() != Options::kTwoLevelNameSpace ) {
1831 // dyld needs to bind nlpointer to global def linked for flat namespace
1832 return symbolIndex(target);
1833 }
1834 }
1835 break;
1836 case ld::Atom::definitionTentative:
1837 case ld::Atom::definitionAbsolute:
1838 if ( _options.outputKind() == Options::kObjectFile ) {
1839 // tentative def in .o file always uses symbol index
1840 return symbolIndex(target);
1841 }
1842 // dyld needs to bind nlpointer to global def linked for flat namespace
1843 if ( targetIsGlobal && _options.nameSpace() != Options::kTwoLevelNameSpace )
1844 return symbolIndex(target);
1845 break;
1846 case ld::Atom::definitionProxy:
1847 // dyld needs to bind nlpointer to something in another dylib
1848 {
1849 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target->file());
1850 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
1851 throwf("illegal data reference to %s in lazy loaded dylib %s", target->name(), dylib->path());
1852 }
1853 return symbolIndex(target);
1854 }
1855 }
1856 if ( nlpAtom->fixupsBegin() == nlpAtom->fixupsEnd() ) {
1857 // no fixups means this is the ImageLoader cache slot
1858 return INDIRECT_SYMBOL_ABS;
1859 }
1860
1861 // The magic index INDIRECT_SYMBOL_LOCAL tells dyld it should does not need to bind
1862 // this non-lazy pointer.
1863 return INDIRECT_SYMBOL_LOCAL;
1864 }
1865
1866
1867
1868 template <typename A>
1869 void IndirectSymbolTableAtom<A>::encodeStubSection(ld::Internal::FinalSection* sect)
1870 {
1871 sect->indirectSymTabStartIndex = _entries.size();
1872 sect->indirectSymTabElementSize = sect->atoms[0]->size();
1873 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1874 _entries.push_back(symIndexOfStubAtom(*ait));
1875 }
1876 }
1877
1878 template <typename A>
1879 void IndirectSymbolTableAtom<A>::encodeLazyPointerSection(ld::Internal::FinalSection* sect)
1880 {
1881 sect->indirectSymTabStartIndex = _entries.size();
1882 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1883 _entries.push_back(symIndexOfLazyPointerAtom(*ait));
1884 }
1885 }
1886
1887 template <typename A>
1888 void IndirectSymbolTableAtom<A>::encodeNonLazyPointerSection(ld::Internal::FinalSection* sect)
1889 {
1890 sect->indirectSymTabStartIndex = _entries.size();
1891 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1892 _entries.push_back(symIndexOfNonLazyPointerAtom(*ait));
1893 }
1894 }
1895
1896 template <typename A>
1897 bool IndirectSymbolTableAtom<A>::kextBundlesDontHaveIndirectSymbolTable()
1898 {
1899 return true;
1900 }
1901
1902 template <typename A>
1903 void IndirectSymbolTableAtom<A>::encode()
1904 {
1905 // static executables should not have an indirect symbol table, unless PIE
1906 if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() )
1907 return;
1908
1909 // x86_64 kext bundles should not have an indirect symbol table
1910 if ( (this->_options.outputKind() == Options::kKextBundle) && kextBundlesDontHaveIndirectSymbolTable() )
1911 return;
1912
1913 // slidable static executables (-static -pie) should not have an indirect symbol table
1914 if ( (this->_options.outputKind() == Options::kStaticExecutable) && this->_options.positionIndependentExecutable() )
1915 return;
1916
1917 // find all special sections that need a range of the indirect symbol table section
1918 for (std::vector<ld::Internal::FinalSection*>::iterator sit = this->_state.sections.begin(); sit != this->_state.sections.end(); ++sit) {
1919 ld::Internal::FinalSection* sect = *sit;
1920 switch ( sect->type() ) {
1921 case ld::Section::typeStub:
1922 case ld::Section::typeStubClose:
1923 this->encodeStubSection(sect);
1924 break;
1925 case ld::Section::typeLazyPointerClose:
1926 case ld::Section::typeLazyPointer:
1927 case ld::Section::typeLazyDylibPointer:
1928 this->encodeLazyPointerSection(sect);
1929 break;
1930 case ld::Section::typeNonLazyPointer:
1931 this->encodeNonLazyPointerSection(sect);
1932 break;
1933 default:
1934 break;
1935 }
1936 }
1937 }
1938
1939 template <typename A>
1940 uint64_t IndirectSymbolTableAtom<A>::size() const
1941 {
1942 return _entries.size() * sizeof(uint32_t);
1943 }
1944
1945 template <typename A>
1946 void IndirectSymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
1947 {
1948 uint32_t* array = (uint32_t*)buffer;
1949 for(unsigned long i=0; i < _entries.size(); ++i) {
1950 E::set32(array[i], _entries[i]);
1951 }
1952 }
1953
1954
1955
1956
1957
1958
1959
1960
1961 } // namespace tool
1962 } // namespace ld
1963
1964 #endif // __LINKEDIT_CLASSIC_HPP__