]> git.saurik.com Git - apple/ld64.git/blob - src/ld/LinkEditClassic.hpp
ld64-224.1.tar.gz
[apple/ld64.git] / src / ld / LinkEditClassic.hpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-*
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #ifndef __LINKEDIT_CLASSIC_HPP__
26 #define __LINKEDIT_CLASSIC_HPP__
27
28 #include <stdlib.h>
29 #include <sys/types.h>
30 #include <errno.h>
31 #include <limits.h>
32 #include <unistd.h>
33
34 #include <vector>
35 #include <unordered_map>
36
37 #include "Options.h"
38 #include "ld.hpp"
39 #include "Architectures.hpp"
40 #include "MachOFileAbstraction.hpp"
41
42 namespace ld {
43 namespace tool {
44
45
46
47 class ClassicLinkEditAtom : public ld::Atom
48 {
49 public:
50
51 // overrides of ld::Atom
52 virtual ld::File* file() const { return NULL; }
53 virtual uint64_t objectAddress() const { return 0; }
54
55 virtual void encode() = 0;
56 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe) { return false; }
57
58 ClassicLinkEditAtom(const Options& opts, ld::Internal& state,
59 OutputFile& writer, const ld::Section& sect,
60 unsigned int pointerSize)
61 : ld::Atom(sect, ld::Atom::definitionRegular,
62 ld::Atom::combineNever, ld::Atom::scopeTranslationUnit,
63 ld::Atom::typeUnclassified, ld::Atom::symbolTableNotIn,
64 false, false, false, ld::Atom::Alignment(log2(pointerSize))),
65 _options(opts), _state(state), _writer(writer) { }
66 protected:
67 const Options& _options;
68 ld::Internal& _state;
69 OutputFile& _writer;
70 };
71
72
73
74 class StringPoolAtom : public ClassicLinkEditAtom
75 {
76 public:
77 StringPoolAtom(const Options& opts, ld::Internal& state,
78 OutputFile& writer, int pointerSize);
79
80 // overrides of ld::Atom
81 virtual const char* name() const { return "string pool"; }
82 virtual uint64_t size() const;
83 virtual void copyRawContent(uint8_t buffer[]) const;
84 // overrides of ClassicLinkEditAtom
85 virtual void encode() { }
86
87 int32_t add(const char* name);
88 int32_t addUnique(const char* name);
89 int32_t emptyString() { return 1; }
90 const char* stringForIndex(int32_t) const;
91 uint32_t currentOffset();
92
93 private:
94 enum { kBufferSize = 0x01000000 };
95 typedef std::unordered_map<const char*, int32_t, CStringHash, CStringEquals> StringToOffset;
96
97 const uint32_t _pointerSize;
98 std::vector<char*> _fullBuffers;
99 char* _currentBuffer;
100 uint32_t _currentBufferUsed;
101 StringToOffset _uniqueStrings;
102
103 static ld::Section _s_section;
104 };
105
106 ld::Section StringPoolAtom::_s_section("__LINKEDIT", "__string_pool", ld::Section::typeLinkEdit, true);
107
108
109 StringPoolAtom::StringPoolAtom(const Options& opts, ld::Internal& state, OutputFile& writer, int pointerSize)
110 : ClassicLinkEditAtom(opts, state, writer, _s_section, pointerSize),
111 _pointerSize(pointerSize), _currentBuffer(NULL), _currentBufferUsed(0)
112 {
113 _currentBuffer = new char[kBufferSize];
114 // burn first byte of string pool (so zero is never a valid string offset)
115 _currentBuffer[_currentBufferUsed++] = ' ';
116 // make offset 1 always point to an empty string
117 _currentBuffer[_currentBufferUsed++] = '\0';
118 }
119
120 uint64_t StringPoolAtom::size() const
121 {
122 // pointer size align size
123 return (kBufferSize * _fullBuffers.size() + _currentBufferUsed + _pointerSize-1) & (-_pointerSize);
124 }
125
126 void StringPoolAtom::copyRawContent(uint8_t buffer[]) const
127 {
128 uint64_t offset = 0;
129 for (unsigned int i=0; i < _fullBuffers.size(); ++i) {
130 memcpy(&buffer[offset], _fullBuffers[i], kBufferSize);
131 offset += kBufferSize;
132 }
133 memcpy(&buffer[offset], _currentBuffer, _currentBufferUsed);
134 // zero fill end to align
135 offset += _currentBufferUsed;
136 while ( (offset % _pointerSize) != 0 )
137 buffer[offset++] = 0;
138 }
139
140 int32_t StringPoolAtom::add(const char* str)
141 {
142 int32_t offset = kBufferSize * _fullBuffers.size() + _currentBufferUsed;
143 int lenNeeded = strlcpy(&_currentBuffer[_currentBufferUsed], str, kBufferSize-_currentBufferUsed)+1;
144 if ( (_currentBufferUsed+lenNeeded) < kBufferSize ) {
145 _currentBufferUsed += lenNeeded;
146 }
147 else {
148 int copied = kBufferSize-_currentBufferUsed-1;
149 // change trailing '\0' that strlcpy added to real char
150 _currentBuffer[kBufferSize-1] = str[copied];
151 // alloc next buffer
152 _fullBuffers.push_back(_currentBuffer);
153 _currentBuffer = new char[kBufferSize];
154 _currentBufferUsed = 0;
155 // append rest of string
156 this->add(&str[copied+1]);
157 }
158 return offset;
159 }
160
161 uint32_t StringPoolAtom::currentOffset()
162 {
163 return kBufferSize * _fullBuffers.size() + _currentBufferUsed;
164 }
165
166
167 int32_t StringPoolAtom::addUnique(const char* str)
168 {
169 StringToOffset::iterator pos = _uniqueStrings.find(str);
170 if ( pos != _uniqueStrings.end() ) {
171 return pos->second;
172 }
173 else {
174 int32_t offset = this->add(str);
175 _uniqueStrings[str] = offset;
176 return offset;
177 }
178 }
179
180
181 const char* StringPoolAtom::stringForIndex(int32_t index) const
182 {
183 int32_t currentBufferStartIndex = kBufferSize * _fullBuffers.size();
184 int32_t maxIndex = currentBufferStartIndex + _currentBufferUsed;
185 // check for out of bounds
186 if ( index > maxIndex )
187 return "";
188 // check for index in _currentBuffer
189 if ( index > currentBufferStartIndex )
190 return &_currentBuffer[index-currentBufferStartIndex];
191 // otherwise index is in a full buffer
192 uint32_t fullBufferIndex = index/kBufferSize;
193 return &_fullBuffers[fullBufferIndex][index-(kBufferSize*fullBufferIndex)];
194 }
195
196
197
198 template <typename A>
199 class SymbolTableAtom : public ClassicLinkEditAtom
200 {
201 public:
202 SymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
203 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)),
204 _stabsStringsOffsetStart(0), _stabsStringsOffsetEnd(0),
205 _stabsIndexStart(0), _stabsIndexEnd(0) { }
206
207 // overrides of ld::Atom
208 virtual const char* name() const { return "symbol table"; }
209 virtual uint64_t size() const;
210 virtual void copyRawContent(uint8_t buffer[]) const;
211 // overrides of ClassicLinkEditAtom
212 virtual void encode();
213 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe);
214
215 private:
216 typedef typename A::P P;
217 typedef typename A::P::E E;
218 typedef typename A::P::uint_t pint_t;
219
220 bool addLocal(const ld::Atom* atom, StringPoolAtom* pool);
221 void addGlobal(const ld::Atom* atom, StringPoolAtom* pool);
222 void addImport(const ld::Atom* atom, StringPoolAtom* pool);
223 uint8_t classicOrdinalForProxy(const ld::Atom* atom);
224 uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool);
225 uint64_t valueForStab(const ld::relocatable::File::Stab& stab);
226 uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab);
227
228
229 mutable std::vector<macho_nlist<P> > _globals;
230 mutable std::vector<macho_nlist<P> > _locals;
231 mutable std::vector<macho_nlist<P> > _imports;
232
233 uint32_t _stabsStringsOffsetStart;
234 uint32_t _stabsStringsOffsetEnd;
235 uint32_t _stabsIndexStart;
236 uint32_t _stabsIndexEnd;
237
238 static ld::Section _s_section;
239 static int _s_anonNameIndex;
240
241 };
242
243 template <typename A>
244 ld::Section SymbolTableAtom<A>::_s_section("__LINKEDIT", "__symbol_table", ld::Section::typeLinkEdit, true);
245
246 template <typename A>
247 int SymbolTableAtom<A>::_s_anonNameIndex = 1;
248
249
250 template <typename A>
251 bool SymbolTableAtom<A>::addLocal(const ld::Atom* atom, StringPoolAtom* pool)
252 {
253 macho_nlist<P> entry;
254 assert(atom->symbolTableInclusion() != ld::Atom::symbolTableNotIn);
255
256 // set n_strx
257 const char* symbolName = atom->name();
258 char anonName[32];
259 if ( this->_options.outputKind() == Options::kObjectFile ) {
260 if ( atom->contentType() == ld::Atom::typeCString ) {
261 if ( atom->combine() == ld::Atom::combineByNameAndContent ) {
262 // don't use 'l' labels for x86_64 strings
263 // <rdar://problem/6605499> x86_64 obj-c runtime confused when static lib is stripped
264 sprintf(anonName, "LC%u", _s_anonNameIndex++);
265 symbolName = anonName;
266 }
267 }
268 else if ( atom->contentType() == ld::Atom::typeCFI ) {
269 if ( _options.removeEHLabels() )
270 return false;
271 // synthesize .eh name
272 if ( strcmp(atom->name(), "CIE") == 0 )
273 symbolName = "EH_Frame1";
274 else
275 symbolName = "func.eh";
276 }
277 else if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
278 // make auto-strip anonymous name for symbol
279 sprintf(anonName, "l%03u", _s_anonNameIndex++);
280 symbolName = anonName;
281 }
282 }
283 entry.set_n_strx(pool->add(symbolName));
284
285 // set n_type
286 uint8_t type = N_SECT;
287 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
288 type = N_ABS;
289 }
290 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
291 && (this->_options.outputKind() == Options::kObjectFile) ) {
292 // __OBJC __class has floating abs symbols for each class data structure
293 type = N_ABS;
294 }
295 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
296 type |= N_PEXT;
297 entry.set_n_type(type);
298
299 // set n_sect (section number of implementation )
300 if ( atom->definition() == ld::Atom::definitionAbsolute )
301 entry.set_n_sect(0);
302 else
303 entry.set_n_sect(atom->machoSection());
304
305 // set n_desc
306 uint16_t desc = 0;
307 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
308 desc |= REFERENCED_DYNAMICALLY;
309 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
310 desc |= N_NO_DEAD_STRIP;
311 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) )
312 desc |= N_WEAK_DEF;
313 if ( atom->isThumb() )
314 desc |= N_ARM_THUMB_DEF;
315 entry.set_n_desc(desc);
316
317 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
318 if ( atom->definition() == ld::Atom::definitionAbsolute )
319 entry.set_n_value(atom->objectAddress());
320 else
321 entry.set_n_value(atom->finalAddress());
322
323 // add to array
324 _locals.push_back(entry);
325 return true;
326 }
327
328
329 template <typename A>
330 void SymbolTableAtom<A>::addGlobal(const ld::Atom* atom, StringPoolAtom* pool)
331 {
332 macho_nlist<P> entry;
333
334 // set n_strx
335 const char* symbolName = atom->name();
336 char anonName[32];
337 if ( this->_options.outputKind() == Options::kObjectFile ) {
338 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
339 // make auto-strip anonymous name for symbol
340 sprintf(anonName, "l%03u", _s_anonNameIndex++);
341 symbolName = anonName;
342 }
343 }
344 entry.set_n_strx(pool->add(symbolName));
345
346 // set n_type
347 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
348 entry.set_n_type(N_EXT | N_ABS);
349 }
350 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
351 && (this->_options.outputKind() == Options::kObjectFile) ) {
352 // __OBJC __class has floating abs symbols for each class data structure
353 entry.set_n_type(N_EXT | N_ABS);
354 }
355 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
356 entry.set_n_type(N_EXT | N_INDR);
357 }
358 else {
359 entry.set_n_type(N_EXT | N_SECT);
360 if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (this->_options.outputKind() == Options::kObjectFile) ) {
361 if ( this->_options.keepPrivateExterns() )
362 entry.set_n_type(N_EXT | N_SECT | N_PEXT);
363 }
364 else if ( (atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)
365 && (atom->section().type() == ld::Section::typeMachHeader)
366 && !_options.positionIndependentExecutable() ) {
367 // the __mh_execute_header is historical magic in non-pie executabls and must be an absolute symbol
368 entry.set_n_type(N_EXT | N_ABS);
369 }
370 }
371
372 // set n_sect (section number of implementation)
373 if ( atom->definition() == ld::Atom::definitionAbsolute )
374 entry.set_n_sect(0);
375 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) )
376 entry.set_n_sect(0);
377 else
378 entry.set_n_sect(atom->machoSection());
379
380 // set n_desc
381 uint16_t desc = 0;
382 if ( atom->isThumb() )
383 desc |= N_ARM_THUMB_DEF;
384 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
385 desc |= REFERENCED_DYNAMICALLY;
386 if ( (atom->contentType() == ld::Atom::typeResolver) && (this->_options.outputKind() == Options::kObjectFile) )
387 desc |= N_SYMBOL_RESOLVER;
388 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
389 desc |= N_NO_DEAD_STRIP;
390 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) {
391 desc |= N_WEAK_DEF;
392 // <rdar://problem/6783167> support auto hidden weak symbols: .weak_def_can_be_hidden
393 if ( (atom->scope() == ld::Atom::scopeGlobal) && atom->autoHide() && (this->_options.outputKind() == Options::kObjectFile) )
394 desc |= N_WEAK_REF;
395 }
396 entry.set_n_desc(desc);
397
398 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
399 if ( atom->definition() == ld::Atom::definitionAbsolute )
400 entry.set_n_value(atom->objectAddress());
401 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
402 if ( atom->isAlias() ) {
403 // this re-export also renames
404 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
405 if ( fit->kind == ld::Fixup::kindNoneFollowOn ) {
406 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
407 entry.set_n_value(pool->add(fit->u.target->name()));
408 }
409 }
410 }
411 else
412 entry.set_n_value(entry.n_strx());
413 }
414 else
415 entry.set_n_value(atom->finalAddress());
416
417 // add to array
418 _globals.push_back(entry);
419 }
420
421 template <typename A>
422 uint8_t SymbolTableAtom<A>::classicOrdinalForProxy(const ld::Atom* atom)
423 {
424 assert(atom->definition() == ld::Atom::definitionProxy);
425 // when linking for flat-namespace ordinals are always zero
426 if ( _options.nameSpace() != Options::kTwoLevelNameSpace )
427 return 0;
428 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
429 // when linking -undefined dynamic_lookup, unbound symbols use DYNAMIC_LOOKUP_ORDINAL
430 if ( dylib == NULL ) {
431 if (_options.undefinedTreatment() == Options::kUndefinedDynamicLookup )
432 return DYNAMIC_LOOKUP_ORDINAL;
433 if (_options.allowedUndefined(atom->name()) )
434 return DYNAMIC_LOOKUP_ORDINAL;
435 }
436 assert(dylib != NULL);
437 int ord = this->_writer.dylibToOrdinal(dylib);
438 if ( ord == BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE )
439 return EXECUTABLE_ORDINAL;
440 return ord;
441 }
442
443
444 template <typename A>
445 void SymbolTableAtom<A>::addImport(const ld::Atom* atom, StringPoolAtom* pool)
446 {
447 macho_nlist<P> entry;
448
449 // set n_strx
450 entry.set_n_strx(pool->add(atom->name()));
451
452 // set n_type
453 if ( this->_options.outputKind() == Options::kObjectFile ) {
454 if ( (atom->scope() == ld::Atom::scopeLinkageUnit)
455 && (atom->definition() == ld::Atom::definitionTentative) )
456 entry.set_n_type(N_UNDF | N_EXT | N_PEXT);
457 else
458 entry.set_n_type(N_UNDF | N_EXT);
459 }
460 else {
461 if ( this->_options.prebind() )
462 entry.set_n_type(N_PBUD | N_EXT);
463 else
464 entry.set_n_type(N_UNDF | N_EXT);
465 }
466
467 // set n_sect
468 entry.set_n_sect(0);
469
470 uint16_t desc = 0;
471 if ( this->_options.outputKind() != Options::kObjectFile ) {
472 uint8_t ordinal = this->classicOrdinalForProxy(atom);
473 //fprintf(stderr, "ordinal=%u from reader=%p for symbol=%s\n", ordinal, atom->getFile(), atom->getName());
474 SET_LIBRARY_ORDINAL(desc, ordinal);
475
476 #if 0
477 // set n_desc ( high byte is library ordinal, low byte is reference type )
478 std::map<const ObjectFile::Atom*,ObjectFile::Atom*>::iterator pos = fStubsMap.find(atom);
479 if ( pos != fStubsMap.end() || ( strncmp(atom->getName(), ".objc_class_name_", 17) == 0) )
480 desc |= REFERENCE_FLAG_UNDEFINED_LAZY;
481 else
482 desc |= REFERENCE_FLAG_UNDEFINED_NON_LAZY;
483 #endif
484 }
485 else if ( atom->definition() == ld::Atom::definitionTentative ) {
486 uint8_t align = atom->alignment().powerOf2;
487 // always record custom alignment of common symbols to match what compiler does
488 SET_COMM_ALIGN(desc, align);
489 }
490 if ( (this->_options.outputKind() != Options::kObjectFile)
491 && (atom->definition() == ld::Atom::definitionProxy)
492 && (atom->combine() == ld::Atom::combineByName) ) {
493 desc |= N_REF_TO_WEAK;
494 }
495 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
496 if ( atom->weakImported() || ((dylib != NULL) && dylib->forcedWeakLinked()) )
497 desc |= N_WEAK_REF;
498 entry.set_n_desc(desc);
499
500 // set n_value, zero for import proxy and size for tentative definition
501 if ( atom->definition() == ld::Atom::definitionTentative )
502 entry.set_n_value(atom->size());
503 else
504 entry.set_n_value(0);
505
506 // add to array
507 _imports.push_back(entry);
508 }
509
510 template <typename A>
511 uint8_t SymbolTableAtom<A>::sectionIndexForStab(const ld::relocatable::File::Stab& stab)
512 {
513 // in FUN stabs, n_sect field is 0 for start FUN and 1 for end FUN
514 if ( stab.type == N_FUN )
515 return stab.other;
516 else if ( stab.type == N_GSYM )
517 return 0;
518 else if ( stab.atom != NULL )
519 return stab.atom->machoSection();
520 else
521 return stab.other;
522 }
523
524
525 template <typename A>
526 uint64_t SymbolTableAtom<A>::valueForStab(const ld::relocatable::File::Stab& stab)
527 {
528 switch ( stab.type ) {
529 case N_FUN:
530 if ( stab.atom == NULL ) {
531 // <rdar://problem/5591394> Add support to ld64 for N_FUN stabs when used for symbolic constants
532 return stab.value;
533 }
534 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
535 // end of function N_FUN has size
536 return stab.atom->size();
537 }
538 else {
539 // start of function N_FUN has address
540 return stab.atom->finalAddress();
541 }
542 case N_LBRAC:
543 case N_RBRAC:
544 case N_SLINE:
545 if ( stab.atom == NULL )
546 // some weird assembly files have slines not associated with a function
547 return stab.value;
548 else
549 // all these stab types need their value changed from an offset in the atom to an address
550 return stab.atom->finalAddress() + stab.value;
551 case N_STSYM:
552 case N_LCSYM:
553 case N_BNSYM:
554 // all these need address of atom
555 if ( stab.atom != NULL )
556 return stab.atom->finalAddress();
557 else
558 return 0; // <rdar://problem/7811357> work around for mismatch N_BNSYM
559 case N_ENSYM:
560 return stab.atom->size();
561 case N_SO:
562 if ( stab.atom == NULL ) {
563 return 0;
564 }
565 else {
566 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
567 // end of translation unit N_SO has address of end of last atom
568 return stab.atom->finalAddress() + stab.atom->size();
569 }
570 else {
571 // start of translation unit N_SO has address of end of first atom
572 return stab.atom->finalAddress();
573 }
574 }
575 break;
576 default:
577 return stab.value;
578 }
579 }
580
581 template <typename A>
582 uint32_t SymbolTableAtom<A>::stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool)
583 {
584 switch (stab.type) {
585 case N_SO:
586 if ( (stab.string == NULL) || stab.string[0] == '\0' ) {
587 return pool->emptyString();
588 break;
589 }
590 // fall into uniquing case
591 case N_SOL:
592 case N_BINCL:
593 case N_EXCL:
594 return pool->addUnique(stab.string);
595 break;
596 default:
597 if ( stab.string == NULL )
598 return 0;
599 else if ( stab.string[0] == '\0' )
600 return pool->emptyString();
601 else
602 return pool->add(stab.string);
603 }
604 return 0;
605 }
606
607
608
609 template <typename A>
610 bool SymbolTableAtom<A>::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe)
611 {
612 ssos = _stabsStringsOffsetStart;
613 ssoe = _stabsStringsOffsetEnd;
614 sos = _stabsIndexStart * sizeof(macho_nlist<P>);
615 soe = _stabsIndexEnd * sizeof(macho_nlist<P>);
616 return ( (_stabsIndexStart != _stabsIndexEnd) || (_stabsStringsOffsetStart != _stabsStringsOffsetEnd) );
617 }
618
619
620 template <typename A>
621 void SymbolTableAtom<A>::encode()
622 {
623 uint32_t symbolIndex = 0;
624
625 // make nlist entries for all local symbols
626 std::vector<const ld::Atom*>& localAtoms = this->_writer._localAtoms;
627 std::vector<const ld::Atom*>& globalAtoms = this->_writer._exportedAtoms;
628 _locals.reserve(localAtoms.size()+this->_state.stabs.size());
629 this->_writer._localSymbolsStartIndex = 0;
630 // make nlist entries for all debug notes
631 _stabsIndexStart = symbolIndex;
632 _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset();
633 for (std::vector<ld::relocatable::File::Stab>::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) {
634 macho_nlist<P> entry;
635 entry.set_n_type(sit->type);
636 entry.set_n_sect(sectionIndexForStab(*sit));
637 entry.set_n_desc(sit->desc);
638 entry.set_n_value(valueForStab(*sit));
639 entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom));
640 _locals.push_back(entry);
641 ++symbolIndex;
642 }
643 _stabsIndexEnd = symbolIndex;
644 _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset();
645 for (std::vector<const ld::Atom*>::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) {
646 const ld::Atom* atom = *it;
647 if ( this->addLocal(atom, this->_writer._stringPoolAtom) )
648 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
649 }
650 this->_writer._localSymbolsCount = symbolIndex;
651
652
653 // make nlist entries for all global symbols
654 _globals.reserve(globalAtoms.size());
655 this->_writer._globalSymbolsStartIndex = symbolIndex;
656 for (std::vector<const ld::Atom*>::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) {
657 const ld::Atom* atom = *it;
658 this->addGlobal(atom, this->_writer._stringPoolAtom);
659 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
660 }
661 this->_writer._globalSymbolsCount = symbolIndex - this->_writer._globalSymbolsStartIndex;
662
663 // make nlist entries for all undefined (imported) symbols
664 std::vector<const ld::Atom*>& importAtoms = this->_writer._importedAtoms;
665 _imports.reserve(importAtoms.size());
666 this->_writer._importSymbolsStartIndex = symbolIndex;
667 for (std::vector<const ld::Atom*>::const_iterator it=importAtoms.begin(); it != importAtoms.end(); ++it) {
668 this->addImport(*it, this->_writer._stringPoolAtom);
669 this->_writer._atomToSymbolIndex[*it] = symbolIndex++;
670 }
671 this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex;
672 }
673
674 template <typename A>
675 uint64_t SymbolTableAtom<A>::size() const
676 {
677 return sizeof(macho_nlist<P>) * (_locals.size() + _globals.size() + _imports.size());
678 }
679
680 template <typename A>
681 void SymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
682 {
683 memcpy(&buffer[this->_writer._localSymbolsStartIndex*sizeof(macho_nlist<P>)], &_locals[0],
684 this->_writer._localSymbolsCount*sizeof(macho_nlist<P>));
685 memcpy(&buffer[this->_writer._globalSymbolsStartIndex*sizeof(macho_nlist<P>)], &_globals[0],
686 this->_writer._globalSymbolsCount*sizeof(macho_nlist<P>));
687 memcpy(&buffer[this->_writer._importSymbolsStartIndex *sizeof(macho_nlist<P>)], &_imports[0],
688 this->_writer._importSymbolsCount*sizeof(macho_nlist<P>));
689 }
690
691
692
693
694 class RelocationsAtomAbstract : public ClassicLinkEditAtom
695 {
696 public:
697 RelocationsAtomAbstract(const Options& opts, ld::Internal& state,
698 OutputFile& writer, const ld::Section& sect,
699 unsigned int pointerSize)
700 : ClassicLinkEditAtom(opts, state, writer, sect, pointerSize) { }
701
702 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) = 0;
703 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) = 0;
704 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) = 0;
705 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) = 0;
706 virtual uint64_t relocBaseAddress(ld::Internal& state) = 0;
707 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
708 const ld::Atom* inAtom, uint32_t offsetInAtom,
709 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
710 const ld::Atom* toTarget, uint64_t toAddend,
711 const ld::Atom* fromTarget, uint64_t fromAddend) = 0;
712 protected:
713 uint32_t symbolIndex(const ld::Atom* atom) const;
714
715 };
716
717
718
719 uint32_t RelocationsAtomAbstract::symbolIndex(const ld::Atom* atom) const
720 {
721 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
722 if ( pos != this->_writer._atomToSymbolIndex.end() )
723 return pos->second;
724 fprintf(stderr, "_atomToSymbolIndex content:\n");
725 for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
726 fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
727 }
728 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
729 }
730
731
732 template <typename A>
733 class LocalRelocationsAtom : public RelocationsAtomAbstract
734 {
735 public:
736 LocalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
737 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
738
739 // overrides of ld::Atom
740 virtual const char* name() const { return "local relocations"; }
741 virtual uint64_t size() const;
742 virtual void copyRawContent(uint8_t buffer[]) const;
743 // overrides of ClassicLinkEditAtom
744 virtual void encode() {}
745 // overrides of RelocationsAtomAbstract
746 virtual void addPointerReloc(uint64_t addr, uint32_t symNum);
747 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
748 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
749 virtual uint64_t relocBaseAddress(ld::Internal& state);
750 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum);
751 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
752 const ld::Atom* inAtom, uint32_t offsetInAtom,
753 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
754 const ld::Atom* toTarget, uint64_t toAddend,
755 const ld::Atom* fromTarget, uint64_t fromAddend) { }
756
757 private:
758 typedef typename A::P P;
759 typedef typename A::P::E E;
760 typedef typename A::P::uint_t pint_t;
761
762 std::vector<macho_relocation_info<P> > _relocs;
763
764 static ld::Section _s_section;
765 };
766
767 template <typename A>
768 ld::Section LocalRelocationsAtom<A>::_s_section("__LINKEDIT", "__local_relocs", ld::Section::typeLinkEdit, true);
769
770
771 template <>
772 uint64_t LocalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
773 {
774 if ( _options.outputKind() == Options::kKextBundle ) {
775 // for kext bundles the reloc base address starts at __TEXT segment
776 return _options.baseAddress();
777 }
778 // for all other kinds, the x86_64 reloc base address starts at __DATA segment
779 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
780 ld::Internal::FinalSection* sect = *sit;
781 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
782 return sect->address;
783 }
784 throw "__DATA segment not found";
785 }
786
787 template <typename A>
788 uint64_t LocalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
789 {
790 return _options.baseAddress();
791 }
792
793 template <typename A>
794 void LocalRelocationsAtom<A>::addPointerReloc(uint64_t addr, uint32_t symNum)
795 {
796 macho_relocation_info<P> reloc;
797 reloc.set_r_address(addr);
798 reloc.set_r_symbolnum(symNum);
799 reloc.set_r_pcrel(false);
800 reloc.set_r_length();
801 reloc.set_r_extern(false);
802 reloc.set_r_type(GENERIC_RELOC_VANILLA);
803 _relocs.push_back(reloc);
804 }
805
806 template <typename A>
807 void LocalRelocationsAtom<A>::addTextReloc(uint64_t addr, ld::Fixup::Kind kind, uint64_t targetAddr, uint32_t symNum)
808 {
809 }
810
811
812 template <typename A>
813 uint64_t LocalRelocationsAtom<A>::size() const
814 {
815 return _relocs.size() * sizeof(macho_relocation_info<P>);
816 }
817
818 template <typename A>
819 void LocalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
820 {
821 memcpy(buffer, &_relocs[0], _relocs.size()*sizeof(macho_relocation_info<P>));
822 }
823
824
825
826
827
828
829 template <typename A>
830 class ExternalRelocationsAtom : public RelocationsAtomAbstract
831 {
832 public:
833 ExternalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
834 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
835
836 // overrides of ld::Atom
837 virtual const char* name() const { return "external relocations"; }
838 virtual uint64_t size() const;
839 virtual void copyRawContent(uint8_t buffer[]) const;
840 // overrides of ClassicLinkEditAtom
841 virtual void encode() {}
842 // overrides of RelocationsAtomAbstract
843 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
844 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
845 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*);
846 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*);
847 virtual uint64_t relocBaseAddress(ld::Internal& state);
848 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
849 const ld::Atom* inAtom, uint32_t offsetInAtom,
850 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
851 const ld::Atom* toTarget, uint64_t toAddend,
852 const ld::Atom* fromTarget, uint64_t fromAddend) { }
853
854
855 private:
856 typedef typename A::P P;
857 typedef typename A::P::E E;
858 typedef typename A::P::uint_t pint_t;
859
860 struct LocAndAtom {
861 LocAndAtom(uint64_t l, const ld::Atom* a) : loc(l), atom(a), symbolIndex(0) {}
862
863 uint64_t loc;
864 const ld::Atom* atom;
865 uint32_t symbolIndex;
866
867 bool operator<(const LocAndAtom& rhs) const {
868 // sort first by symbol number
869 if ( this->symbolIndex != rhs.symbolIndex )
870 return (this->symbolIndex < rhs.symbolIndex);
871 // then sort all uses of the same symbol by address
872 return (this->loc < rhs.loc);
873 }
874
875 };
876
877 static uint32_t pointerReloc();
878 static uint32_t callReloc();
879
880 mutable std::vector<LocAndAtom> _pointerLocations;
881 mutable std::vector<LocAndAtom> _callSiteLocations;
882
883 static ld::Section _s_section;
884 };
885
886 template <typename A>
887 ld::Section ExternalRelocationsAtom<A>::_s_section("__LINKEDIT", "__extrn_relocs", ld::Section::typeLinkEdit, true);
888
889 template <>
890 uint64_t ExternalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
891 {
892 // for x86_64 the reloc base address starts at __DATA segment
893 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
894 ld::Internal::FinalSection* sect = *sit;
895 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
896 return sect->address;
897 }
898 throw "__DATA segment not found";
899 }
900
901 template <typename A>
902 uint64_t ExternalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
903 {
904 return 0;
905 }
906
907 template <typename A>
908 void ExternalRelocationsAtom<A>::addExternalPointerReloc(uint64_t addr, const ld::Atom* target)
909 {
910 _pointerLocations.push_back(LocAndAtom(addr, target));
911 }
912
913 template <typename A>
914 void ExternalRelocationsAtom<A>::addExternalCallSiteReloc(uint64_t addr, const ld::Atom* target)
915 {
916 _callSiteLocations.push_back(LocAndAtom(addr, target));
917 }
918
919
920 template <typename A>
921 uint64_t ExternalRelocationsAtom<A>::size() const
922 {
923 if ( _options.outputKind() == Options::kStaticExecutable ) {
924 assert(_pointerLocations.size() == 0);
925 assert(_callSiteLocations.size() == 0);
926 }
927 return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info<P>);
928 }
929
930 #if SUPPORT_ARCH_arm64
931 template <> uint32_t ExternalRelocationsAtom<arm64>::pointerReloc() { return ARM64_RELOC_UNSIGNED; }
932 #endif
933 #if SUPPORT_ARCH_arm_any
934 template <> uint32_t ExternalRelocationsAtom<arm>::pointerReloc() { return ARM_RELOC_VANILLA; }
935 #endif
936 template <> uint32_t ExternalRelocationsAtom<x86>::pointerReloc() { return GENERIC_RELOC_VANILLA; }
937 template <> uint32_t ExternalRelocationsAtom<x86_64>::pointerReloc() { return X86_64_RELOC_UNSIGNED; }
938
939
940 template <> uint32_t ExternalRelocationsAtom<x86_64>::callReloc() { return X86_64_RELOC_BRANCH; }
941 template <> uint32_t ExternalRelocationsAtom<x86>::callReloc() { return GENERIC_RELOC_VANILLA; }
942 #if SUPPORT_ARCH_arm64
943 template <> uint32_t ExternalRelocationsAtom<arm64>::callReloc() { return ARM64_RELOC_BRANCH26; }
944 #endif
945
946 template <typename A>
947 uint32_t ExternalRelocationsAtom<A>::callReloc()
948 {
949 assert(0 && "external call relocs not implemented");
950 return 0;
951 }
952
953
954 template <typename A>
955 void ExternalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
956 {
957 macho_relocation_info<P>* r = (macho_relocation_info<P>*)buffer;
958
959 // assign symbol index, now that symbol table is built
960 for (typename std::vector<LocAndAtom>::iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it) {
961 it->symbolIndex = symbolIndex(it->atom);
962 }
963 std::sort(_pointerLocations.begin(), _pointerLocations.end());
964 for (typename std::vector<LocAndAtom>::const_iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it, ++r) {
965 r->set_r_address(it->loc);
966 r->set_r_symbolnum(it->symbolIndex);
967 r->set_r_pcrel(false);
968 r->set_r_length();
969 r->set_r_extern(true);
970 r->set_r_type(this->pointerReloc());
971 }
972
973 for (typename std::vector<LocAndAtom>::iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it) {
974 it->symbolIndex = symbolIndex(it->atom);
975 }
976 std::sort(_callSiteLocations.begin(), _callSiteLocations.end());
977 for (typename std::vector<LocAndAtom>::const_iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it, ++r) {
978 r->set_r_address(it->loc);
979 r->set_r_symbolnum(it->symbolIndex);
980 r->set_r_pcrel(true);
981 r->set_r_length(2);
982 r->set_r_extern(true);
983 r->set_r_type(this->callReloc());
984 }
985 }
986
987
988 template <typename A>
989 class SectionRelocationsAtom : public RelocationsAtomAbstract
990 {
991 public:
992 SectionRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
993 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
994
995 // overrides of ld::Atom
996 virtual const char* name() const { return "section relocations"; }
997 virtual uint64_t size() const;
998 virtual void copyRawContent(uint8_t buffer[]) const;
999 // overrides of ClassicLinkEditAtom
1000 virtual void encode();
1001 // overrides of RelocationsAtomAbstract
1002 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
1003 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
1004 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
1005 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
1006 virtual uint64_t relocBaseAddress(ld::Internal& state) { return 0; }
1007 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
1008 const ld::Atom* inAtom, uint32_t offsetInAtom,
1009 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1010 const ld::Atom* toTarget, uint64_t toAddend,
1011 const ld::Atom* fromTarget, uint64_t fromAddend);
1012
1013 private:
1014 typedef typename A::P P;
1015 typedef typename A::P::E E;
1016 typedef typename A::P::uint_t pint_t;
1017
1018
1019 struct Entry {
1020 ld::Fixup::Kind kind;
1021 bool toTargetUsesExternalReloc;
1022 bool fromTargetUsesExternalReloc;
1023 const ld::Atom* inAtom;
1024 uint32_t offsetInAtom;
1025 const ld::Atom* toTarget;
1026 uint64_t toAddend;
1027 const ld::Atom* fromTarget;
1028 uint64_t fromAddend;
1029 };
1030 uint32_t sectSymNum(bool external, const ld::Atom* target);
1031 void encodeSectionReloc(ld::Internal::FinalSection* sect,
1032 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs);
1033
1034 struct SectionAndEntries {
1035 ld::Internal::FinalSection* sect;
1036 std::vector<Entry> entries;
1037 std::vector<macho_relocation_info<P> > relocs;
1038 };
1039
1040 std::vector<SectionAndEntries> _entriesBySection;
1041
1042 static ld::Section _s_section;
1043 };
1044
1045 template <typename A>
1046 ld::Section SectionRelocationsAtom<A>::_s_section("__LINKEDIT", "__sect_relocs", ld::Section::typeLinkEdit, true);
1047
1048
1049
1050
1051 template <typename A>
1052 uint64_t SectionRelocationsAtom<A>::size() const
1053 {
1054 uint32_t count = 0;
1055 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1056 const SectionAndEntries& se = *it;
1057 count += se.relocs.size();
1058 }
1059 return count * sizeof(macho_relocation_info<P>);
1060 }
1061
1062 template <typename A>
1063 void SectionRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1064 {
1065 uint32_t offset = 0;
1066 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1067 const SectionAndEntries& se = *it;
1068 memcpy(&buffer[offset], &se.relocs[0], se.relocs.size()*sizeof(macho_relocation_info<P>));
1069 offset += (se.relocs.size() * sizeof(macho_relocation_info<P>));
1070 }
1071 }
1072
1073
1074 template <>
1075 void SectionRelocationsAtom<x86_64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1076 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1077 {
1078 macho_relocation_info<P> reloc1;
1079 macho_relocation_info<P> reloc2;
1080 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1081 bool external = entry.toTargetUsesExternalReloc;
1082 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1083 bool fromExternal = false;
1084 uint32_t fromSymbolNum = 0;
1085 if ( entry.fromTarget != NULL ) {
1086 fromExternal = entry.fromTargetUsesExternalReloc;
1087 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1088 }
1089
1090
1091 switch ( entry.kind ) {
1092 case ld::Fixup::kindStoreX86BranchPCRel32:
1093 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1094 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1095 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1096 reloc1.set_r_address(address);
1097 reloc1.set_r_symbolnum(symbolNum);
1098 reloc1.set_r_pcrel(true);
1099 reloc1.set_r_length(2);
1100 reloc1.set_r_extern(external);
1101 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1102 relocs.push_back(reloc1);
1103 break;
1104
1105 case ld::Fixup::kindStoreX86BranchPCRel8:
1106 reloc1.set_r_address(address);
1107 reloc1.set_r_symbolnum(symbolNum);
1108 reloc1.set_r_pcrel(true);
1109 reloc1.set_r_length(0);
1110 reloc1.set_r_extern(external);
1111 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1112 relocs.push_back(reloc1);
1113 break;
1114
1115 case ld::Fixup::kindStoreX86PCRel32:
1116 case ld::Fixup::kindStoreTargetAddressX86PCRel32:
1117 reloc1.set_r_address(address);
1118 reloc1.set_r_symbolnum(symbolNum);
1119 reloc1.set_r_pcrel(true);
1120 reloc1.set_r_length(2);
1121 reloc1.set_r_extern(external);
1122 reloc1.set_r_type(X86_64_RELOC_SIGNED);
1123 relocs.push_back(reloc1);
1124 break;
1125
1126 case ld::Fixup::kindStoreX86PCRel32_1:
1127 reloc1.set_r_address(address);
1128 reloc1.set_r_symbolnum(symbolNum);
1129 reloc1.set_r_pcrel(true);
1130 reloc1.set_r_length(2);
1131 reloc1.set_r_extern(external);
1132 reloc1.set_r_type(X86_64_RELOC_SIGNED_1);
1133 relocs.push_back(reloc1);
1134 break;
1135
1136 case ld::Fixup::kindStoreX86PCRel32_2:
1137 reloc1.set_r_address(address);
1138 reloc1.set_r_symbolnum(symbolNum);
1139 reloc1.set_r_pcrel(true);
1140 reloc1.set_r_length(2);
1141 reloc1.set_r_extern(external);
1142 reloc1.set_r_type(X86_64_RELOC_SIGNED_2);
1143 relocs.push_back(reloc1);
1144 break;
1145
1146 case ld::Fixup::kindStoreX86PCRel32_4:
1147 reloc1.set_r_address(address);
1148 reloc1.set_r_symbolnum(symbolNum);
1149 reloc1.set_r_pcrel(true);
1150 reloc1.set_r_length(2);
1151 reloc1.set_r_extern(external);
1152 reloc1.set_r_type(X86_64_RELOC_SIGNED_4);
1153 relocs.push_back(reloc1);
1154 break;
1155
1156 case ld::Fixup::kindStoreX86PCRel32GOTLoad:
1157 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
1158 reloc1.set_r_address(address);
1159 reloc1.set_r_symbolnum(symbolNum);
1160 reloc1.set_r_pcrel(true);
1161 reloc1.set_r_length(2);
1162 reloc1.set_r_extern(external);
1163 reloc1.set_r_type(X86_64_RELOC_GOT_LOAD);
1164 relocs.push_back(reloc1);
1165 break;
1166
1167 case ld::Fixup::kindStoreX86PCRel32GOT:
1168 reloc1.set_r_address(address);
1169 reloc1.set_r_symbolnum(symbolNum);
1170 reloc1.set_r_pcrel(true);
1171 reloc1.set_r_length(2);
1172 reloc1.set_r_extern(external);
1173 reloc1.set_r_type(X86_64_RELOC_GOT);
1174 relocs.push_back(reloc1);
1175 break;
1176
1177 case ld::Fixup::kindStoreLittleEndian64:
1178 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1179 if ( entry.fromTarget != NULL ) {
1180 // this is a pointer-diff
1181 reloc1.set_r_address(address);
1182 reloc1.set_r_symbolnum(symbolNum);
1183 reloc1.set_r_pcrel(false);
1184 reloc1.set_r_length(3);
1185 reloc1.set_r_extern(external);
1186 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1187 reloc2.set_r_address(address);
1188 reloc2.set_r_symbolnum(fromSymbolNum);
1189 reloc2.set_r_pcrel(false);
1190 reloc2.set_r_length(3);
1191 reloc2.set_r_extern(fromExternal);
1192 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1193 relocs.push_back(reloc2);
1194 relocs.push_back(reloc1);
1195 }
1196 else {
1197 // regular pointer
1198 reloc1.set_r_address(address);
1199 reloc1.set_r_symbolnum(symbolNum);
1200 reloc1.set_r_pcrel(false);
1201 reloc1.set_r_length(3);
1202 reloc1.set_r_extern(external);
1203 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1204 relocs.push_back(reloc1);
1205 }
1206 break;
1207
1208 case ld::Fixup::kindStoreLittleEndian32:
1209 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1210 if ( entry.fromTarget != NULL ) {
1211 // this is a pointer-diff
1212 reloc1.set_r_address(address);
1213 reloc1.set_r_symbolnum(symbolNum);
1214 reloc1.set_r_pcrel(false);
1215 reloc1.set_r_length(2);
1216 reloc1.set_r_extern(external);
1217 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1218 reloc2.set_r_address(address);
1219 reloc2.set_r_symbolnum(fromSymbolNum);
1220 reloc2.set_r_pcrel(false);
1221 reloc2.set_r_length(2);
1222 reloc2.set_r_extern(fromExternal);
1223 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1224 relocs.push_back(reloc2);
1225 relocs.push_back(reloc1);
1226 }
1227 else {
1228 // regular pointer
1229 reloc1.set_r_address(address);
1230 reloc1.set_r_symbolnum(symbolNum);
1231 reloc1.set_r_pcrel(false);
1232 reloc1.set_r_length(2);
1233 reloc1.set_r_extern(external);
1234 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1235 relocs.push_back(reloc1);
1236 }
1237 break;
1238 case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad:
1239 reloc1.set_r_address(address);
1240 reloc1.set_r_symbolnum(symbolNum);
1241 reloc1.set_r_pcrel(true);
1242 reloc1.set_r_length(2);
1243 reloc1.set_r_extern(external);
1244 reloc1.set_r_type(X86_64_RELOC_TLV);
1245 relocs.push_back(reloc1);
1246 break;
1247 default:
1248 assert(0 && "need to handle -r reloc");
1249
1250 }
1251
1252 }
1253
1254
1255
1256 template <typename A>
1257 uint32_t SectionRelocationsAtom<A>::sectSymNum(bool external, const ld::Atom* target)
1258 {
1259 if ( target->definition() == ld::Atom::definitionAbsolute )
1260 return R_ABS;
1261 if ( external )
1262 return this->symbolIndex(target); // in external relocations, r_symbolnum field is symbol index
1263 else
1264 return target->machoSection(); // in non-extern relocations, r_symbolnum is mach-o section index of target
1265 }
1266
1267 template <>
1268 void SectionRelocationsAtom<x86>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1269 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1270 {
1271 macho_relocation_info<P> reloc1;
1272 macho_relocation_info<P> reloc2;
1273 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1274 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1275 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1276 bool external = entry.toTargetUsesExternalReloc;
1277 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1278 bool fromExternal = false;
1279 uint32_t fromSymbolNum = 0;
1280 if ( entry.fromTarget != NULL ) {
1281 fromExternal = entry.fromTargetUsesExternalReloc;
1282 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1283 }
1284
1285 switch ( entry.kind ) {
1286 case ld::Fixup::kindStoreX86PCRel32:
1287 case ld::Fixup::kindStoreX86BranchPCRel32:
1288 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1289 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1290 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1291 if ( !external && (entry.toAddend != 0) ) {
1292 // use scattered reloc is target offset is non-zero
1293 sreloc1->set_r_scattered(true);
1294 sreloc1->set_r_pcrel(true);
1295 sreloc1->set_r_length(2);
1296 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1297 sreloc1->set_r_address(address);
1298 sreloc1->set_r_value(entry.toTarget->finalAddress());
1299 }
1300 else {
1301 reloc1.set_r_address(address);
1302 reloc1.set_r_symbolnum(symbolNum);
1303 reloc1.set_r_pcrel(true);
1304 reloc1.set_r_length(2);
1305 reloc1.set_r_extern(external);
1306 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1307 }
1308 relocs.push_back(reloc1);
1309 break;
1310
1311 case ld::Fixup::kindStoreX86BranchPCRel8:
1312 if ( !external && (entry.toAddend != 0) ) {
1313 // use scattered reloc is target offset is non-zero
1314 sreloc1->set_r_scattered(true);
1315 sreloc1->set_r_pcrel(true);
1316 sreloc1->set_r_length(0);
1317 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1318 sreloc1->set_r_address(address);
1319 sreloc1->set_r_value(entry.toTarget->finalAddress());
1320 }
1321 else {
1322 reloc1.set_r_address(address);
1323 reloc1.set_r_symbolnum(symbolNum);
1324 reloc1.set_r_pcrel(true);
1325 reloc1.set_r_length(0);
1326 reloc1.set_r_extern(external);
1327 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1328 }
1329 relocs.push_back(reloc1);
1330 break;
1331
1332 case ld::Fixup::kindStoreX86PCRel16:
1333 if ( !external && (entry.toAddend != 0) ) {
1334 // use scattered reloc is target offset is non-zero
1335 sreloc1->set_r_scattered(true);
1336 sreloc1->set_r_pcrel(true);
1337 sreloc1->set_r_length(1);
1338 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1339 sreloc1->set_r_address(address);
1340 sreloc1->set_r_value(entry.toTarget->finalAddress());
1341 }
1342 else {
1343 reloc1.set_r_address(address);
1344 reloc1.set_r_symbolnum(symbolNum);
1345 reloc1.set_r_pcrel(true);
1346 reloc1.set_r_length(1);
1347 reloc1.set_r_extern(external);
1348 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1349 }
1350 relocs.push_back(reloc1);
1351 break;
1352
1353 case ld::Fixup::kindStoreLittleEndian32:
1354 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1355 if ( entry.fromTarget != NULL ) {
1356 // this is a pointer-diff
1357 sreloc1->set_r_scattered(true);
1358 sreloc1->set_r_pcrel(false);
1359 sreloc1->set_r_length(2);
1360 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1361 sreloc1->set_r_type(GENERIC_RELOC_LOCAL_SECTDIFF);
1362 else
1363 sreloc1->set_r_type(GENERIC_RELOC_SECTDIFF);
1364 sreloc1->set_r_address(address);
1365 if ( entry.toTarget == entry.inAtom ) {
1366 if ( entry.toAddend > entry.toTarget->size() )
1367 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1368 else
1369 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1370 }
1371 else
1372 sreloc1->set_r_value(entry.toTarget->finalAddress());
1373 sreloc2->set_r_scattered(true);
1374 sreloc2->set_r_pcrel(false);
1375 sreloc2->set_r_length(2);
1376 sreloc2->set_r_type(GENERIC_RELOC_PAIR);
1377 sreloc2->set_r_address(0);
1378 if ( entry.fromTarget == entry.inAtom ) {
1379 if ( entry.fromAddend > entry.fromTarget->size() )
1380 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.offsetInAtom);
1381 else
1382 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1383 }
1384 else
1385 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1386 relocs.push_back(reloc1);
1387 relocs.push_back(reloc2);
1388 }
1389 else {
1390 // regular pointer
1391 if ( !external && (entry.toAddend != 0) && (entry.toTarget->symbolTableInclusion() != ld::Atom::symbolTableNotIn) ) {
1392 // use scattered reloc if target offset is non-zero into named atom (5658046)
1393 sreloc1->set_r_scattered(true);
1394 sreloc1->set_r_pcrel(false);
1395 sreloc1->set_r_length(2);
1396 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1397 sreloc1->set_r_address(address);
1398 sreloc1->set_r_value(entry.toTarget->finalAddress());
1399 }
1400 else {
1401 reloc1.set_r_address(address);
1402 reloc1.set_r_symbolnum(symbolNum);
1403 reloc1.set_r_pcrel(false);
1404 reloc1.set_r_length(2);
1405 reloc1.set_r_extern(external);
1406 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1407 }
1408 relocs.push_back(reloc1);
1409 }
1410 break;
1411 case ld::Fixup::kindStoreX86PCRel32TLVLoad:
1412 case ld::Fixup::kindStoreX86Abs32TLVLoad:
1413 case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad:
1414 reloc1.set_r_address(address);
1415 reloc1.set_r_symbolnum(symbolNum);
1416 reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad);
1417 reloc1.set_r_length(2);
1418 reloc1.set_r_extern(external);
1419 reloc1.set_r_type(GENERIC_RLEOC_TLV);
1420 relocs.push_back(reloc1);
1421 break;
1422 default:
1423 assert(0 && "need to handle -r reloc");
1424
1425 }
1426 }
1427
1428
1429
1430 #if SUPPORT_ARCH_arm_any
1431 template <>
1432 void SectionRelocationsAtom<arm>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1433 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1434 {
1435 macho_relocation_info<P> reloc1;
1436 macho_relocation_info<P> reloc2;
1437 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1438 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1439 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1440 bool external = entry.toTargetUsesExternalReloc;
1441 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1442 bool fromExternal = false;
1443 uint32_t fromSymbolNum = 0;
1444 if ( entry.fromTarget != NULL ) {
1445 fromExternal = entry.fromTargetUsesExternalReloc;
1446 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1447 }
1448
1449
1450 switch ( entry.kind ) {
1451 case ld::Fixup::kindStoreTargetAddressARMBranch24:
1452 case ld::Fixup::kindStoreARMBranch24:
1453 case ld::Fixup::kindStoreARMDtraceCallSiteNop:
1454 case ld::Fixup::kindStoreARMDtraceIsEnableSiteClear:
1455 if ( !external && (entry.toAddend != 0) ) {
1456 // use scattered reloc is target offset is non-zero
1457 sreloc1->set_r_scattered(true);
1458 sreloc1->set_r_pcrel(true);
1459 sreloc1->set_r_length(2);
1460 sreloc1->set_r_type(ARM_RELOC_BR24);
1461 sreloc1->set_r_address(address);
1462 sreloc1->set_r_value(entry.toTarget->finalAddress());
1463 }
1464 else {
1465 reloc1.set_r_address(address);
1466 reloc1.set_r_symbolnum(symbolNum);
1467 reloc1.set_r_pcrel(true);
1468 reloc1.set_r_length(2);
1469 reloc1.set_r_extern(external);
1470 reloc1.set_r_type(ARM_RELOC_BR24);
1471 }
1472 relocs.push_back(reloc1);
1473 break;
1474
1475 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
1476 case ld::Fixup::kindStoreThumbBranch22:
1477 case ld::Fixup::kindStoreThumbDtraceCallSiteNop:
1478 case ld::Fixup::kindStoreThumbDtraceIsEnableSiteClear:
1479 if ( !external && (entry.toAddend != 0) ) {
1480 // use scattered reloc is target offset is non-zero
1481 sreloc1->set_r_scattered(true);
1482 sreloc1->set_r_pcrel(true);
1483 sreloc1->set_r_length(2);
1484 sreloc1->set_r_type(ARM_THUMB_RELOC_BR22);
1485 sreloc1->set_r_address(address);
1486 sreloc1->set_r_value(entry.toTarget->finalAddress());
1487 }
1488 else {
1489 reloc1.set_r_address(address);
1490 reloc1.set_r_symbolnum(symbolNum);
1491 reloc1.set_r_pcrel(true);
1492 reloc1.set_r_length(2);
1493 reloc1.set_r_extern(external);
1494 reloc1.set_r_type(ARM_THUMB_RELOC_BR22);
1495 }
1496 relocs.push_back(reloc1);
1497 break;
1498
1499 case ld::Fixup::kindStoreLittleEndian32:
1500 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1501 if ( entry.fromTarget != NULL ) {
1502 // this is a pointer-diff
1503 sreloc1->set_r_scattered(true);
1504 sreloc1->set_r_pcrel(false);
1505 sreloc1->set_r_length(2);
1506 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1507 sreloc1->set_r_type(ARM_RELOC_LOCAL_SECTDIFF);
1508 else
1509 sreloc1->set_r_type(ARM_RELOC_SECTDIFF);
1510 sreloc1->set_r_address(address);
1511 if ( entry.toTarget == entry.inAtom ) {
1512 if ( entry.toAddend > entry.toTarget->size() )
1513 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1514 else
1515 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1516 }
1517 else {
1518 sreloc1->set_r_value(entry.toTarget->finalAddress());
1519 }
1520 sreloc2->set_r_scattered(true);
1521 sreloc2->set_r_pcrel(false);
1522 sreloc2->set_r_length(2);
1523 sreloc2->set_r_type(ARM_RELOC_PAIR);
1524 sreloc2->set_r_address(0);
1525 if ( entry.fromTarget == entry.inAtom ) {
1526 //unsigned int pcBaseOffset = entry.inAtom->isThumb() ? 4 : 8;
1527 //if ( entry.fromAddend > pcBaseOffset )
1528 // sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend-pcBaseOffset);
1529 //else
1530 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1531 }
1532 else {
1533 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1534 }
1535 relocs.push_back(reloc1);
1536 relocs.push_back(reloc2);
1537 }
1538 else {
1539 // regular pointer
1540 if ( !external && (entry.toAddend != 0) ) {
1541 // use scattered reloc is target offset is non-zero
1542 sreloc1->set_r_scattered(true);
1543 sreloc1->set_r_pcrel(false);
1544 sreloc1->set_r_length(2);
1545 sreloc1->set_r_type(ARM_RELOC_VANILLA);
1546 sreloc1->set_r_address(address);
1547 sreloc1->set_r_value(entry.toTarget->finalAddress());
1548 }
1549 else {
1550 reloc1.set_r_address(address);
1551 reloc1.set_r_symbolnum(symbolNum);
1552 reloc1.set_r_pcrel(false);
1553 reloc1.set_r_length(2);
1554 reloc1.set_r_extern(external);
1555 reloc1.set_r_type(ARM_RELOC_VANILLA);
1556 }
1557 relocs.push_back(reloc1);
1558 }
1559 break;
1560
1561 case ld::Fixup::kindStoreARMLow16:
1562 case ld::Fixup::kindStoreARMHigh16:
1563 case ld::Fixup::kindStoreThumbLow16:
1564 case ld::Fixup::kindStoreThumbHigh16:
1565 {
1566 int len = 0;
1567 uint32_t otherHalf = 0;
1568 uint32_t value = entry.toTarget->finalAddress()+entry.toAddend;
1569 if ( entry.fromTarget != NULL )
1570 value -= (entry.fromTarget->finalAddress()+entry.fromAddend);
1571 switch ( entry.kind ) {
1572 case ld::Fixup::kindStoreARMLow16:
1573 len = 0;
1574 otherHalf = value >> 16;
1575 break;
1576 case ld::Fixup::kindStoreARMHigh16:
1577 len = 1;
1578 otherHalf = value & 0xFFFF;
1579 break;
1580 case ld::Fixup::kindStoreThumbLow16:
1581 len = 2;
1582 otherHalf = value >> 16;
1583 break;
1584 case ld::Fixup::kindStoreThumbHigh16:
1585 len = 3;
1586 otherHalf = value & 0xFFFF;
1587 break;
1588 default:
1589 break;
1590 }
1591 if ( entry.fromTarget != NULL ) {
1592 // this is a sect-diff
1593 sreloc1->set_r_scattered(true);
1594 sreloc1->set_r_pcrel(false);
1595 sreloc1->set_r_length(len);
1596 sreloc1->set_r_type(ARM_RELOC_HALF_SECTDIFF);
1597 sreloc1->set_r_address(address);
1598 sreloc1->set_r_value(entry.toTarget->finalAddress());
1599 sreloc2->set_r_scattered(true);
1600 sreloc2->set_r_pcrel(false);
1601 sreloc2->set_r_length(len);
1602 sreloc2->set_r_type(ARM_RELOC_PAIR);
1603 sreloc2->set_r_address(otherHalf);
1604 if ( entry.fromTarget == entry.inAtom )
1605 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1606 else
1607 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1608 relocs.push_back(reloc1);
1609 relocs.push_back(reloc2);
1610 }
1611 else {
1612 // this is absolute address
1613 if ( !external && (entry.toAddend != 0) ) {
1614 // use scattered reloc is target offset is non-zero
1615 sreloc1->set_r_scattered(true);
1616 sreloc1->set_r_pcrel(false);
1617 sreloc1->set_r_length(len);
1618 sreloc1->set_r_type(ARM_RELOC_HALF);
1619 sreloc1->set_r_address(address);
1620 sreloc1->set_r_value(entry.toTarget->finalAddress());
1621 reloc2.set_r_address(otherHalf);
1622 reloc2.set_r_symbolnum(0);
1623 reloc2.set_r_pcrel(false);
1624 reloc2.set_r_length(len);
1625 reloc2.set_r_extern(false);
1626 reloc2.set_r_type(ARM_RELOC_PAIR);
1627 relocs.push_back(reloc1);
1628 relocs.push_back(reloc2);
1629 }
1630 else {
1631 reloc1.set_r_address(address);
1632 reloc1.set_r_symbolnum(symbolNum);
1633 reloc1.set_r_pcrel(false);
1634 reloc1.set_r_length(len);
1635 reloc1.set_r_extern(external);
1636 reloc1.set_r_type(ARM_RELOC_HALF);
1637 reloc2.set_r_address(otherHalf); // other half
1638 reloc2.set_r_symbolnum(0);
1639 reloc2.set_r_pcrel(false);
1640 reloc2.set_r_length(len);
1641 reloc2.set_r_extern(false);
1642 reloc2.set_r_type(ARM_RELOC_PAIR);
1643 relocs.push_back(reloc1);
1644 relocs.push_back(reloc2);
1645 }
1646 }
1647 }
1648 break;
1649
1650 default:
1651 assert(0 && "need to handle -r reloc");
1652
1653 }
1654 }
1655 #endif
1656
1657 #if SUPPORT_ARCH_arm64
1658 template <>
1659 void SectionRelocationsAtom<arm64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1660 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1661 {
1662 macho_relocation_info<P> reloc1;
1663 macho_relocation_info<P> reloc2;
1664 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1665 bool external = entry.toTargetUsesExternalReloc;
1666 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1667 bool fromExternal = false;
1668 uint32_t fromSymbolNum = 0;
1669 if ( entry.fromTarget != NULL ) {
1670 fromExternal = entry.fromTargetUsesExternalReloc;
1671 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1672 }
1673
1674
1675 switch ( entry.kind ) {
1676 case ld::Fixup::kindStoreARM64Branch26:
1677 if ( entry.toAddend != 0 ) {
1678 assert(entry.toAddend < 0x400000);
1679 reloc2.set_r_address(address);
1680 reloc2.set_r_symbolnum(entry.toAddend);
1681 reloc2.set_r_pcrel(false);
1682 reloc2.set_r_length(2);
1683 reloc2.set_r_extern(false);
1684 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1685 relocs.push_back(reloc2);
1686 }
1687 // fall into next case
1688 case ld::Fixup::kindStoreTargetAddressARM64Branch26:
1689 case ld::Fixup::kindStoreARM64DtraceCallSiteNop:
1690 case ld::Fixup::kindStoreARM64DtraceIsEnableSiteClear:
1691 reloc1.set_r_address(address);
1692 reloc1.set_r_symbolnum(symbolNum);
1693 reloc1.set_r_pcrel(true);
1694 reloc1.set_r_length(2);
1695 reloc1.set_r_extern(external);
1696 reloc1.set_r_type(ARM64_RELOC_BRANCH26);
1697 relocs.push_back(reloc1);
1698 break;
1699
1700 case ld::Fixup::kindStoreARM64Page21:
1701 if ( entry.toAddend != 0 ) {
1702 assert(entry.toAddend < 0x400000);
1703 reloc2.set_r_address(address);
1704 reloc2.set_r_symbolnum(entry.toAddend);
1705 reloc2.set_r_pcrel(false);
1706 reloc2.set_r_length(2);
1707 reloc2.set_r_extern(false);
1708 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1709 relocs.push_back(reloc2);
1710 }
1711 // fall into next case
1712 case ld::Fixup::kindStoreTargetAddressARM64Page21:
1713 reloc1.set_r_address(address);
1714 reloc1.set_r_symbolnum(symbolNum);
1715 reloc1.set_r_pcrel(true);
1716 reloc1.set_r_length(2);
1717 reloc1.set_r_extern(external);
1718 reloc1.set_r_type(ARM64_RELOC_PAGE21);
1719 relocs.push_back(reloc1);
1720 break;
1721
1722 case ld::Fixup::kindStoreARM64PageOff12:
1723 if ( entry.toAddend != 0 ) {
1724 assert(entry.toAddend < 0x400000);
1725 reloc2.set_r_address(address);
1726 reloc2.set_r_symbolnum(entry.toAddend);
1727 reloc2.set_r_pcrel(false);
1728 reloc2.set_r_length(2);
1729 reloc2.set_r_extern(false);
1730 reloc2.set_r_type(ARM64_RELOC_ADDEND);
1731 relocs.push_back(reloc2);
1732 }
1733 // fall into next case
1734 case ld::Fixup::kindStoreTargetAddressARM64PageOff12:
1735 reloc1.set_r_address(address);
1736 reloc1.set_r_symbolnum(symbolNum);
1737 reloc1.set_r_pcrel(false);
1738 reloc1.set_r_length(2);
1739 reloc1.set_r_extern(external);
1740 reloc1.set_r_type(ARM64_RELOC_PAGEOFF12);
1741 relocs.push_back(reloc1);
1742 break;
1743
1744 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPage21:
1745 case ld::Fixup::kindStoreARM64GOTLoadPage21:
1746 reloc1.set_r_address(address);
1747 reloc1.set_r_symbolnum(symbolNum);
1748 reloc1.set_r_pcrel(true);
1749 reloc1.set_r_length(2);
1750 reloc1.set_r_extern(external);
1751 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGE21);
1752 relocs.push_back(reloc1);
1753 break;
1754
1755 case ld::Fixup::kindStoreTargetAddressARM64GOTLoadPageOff12:
1756 case ld::Fixup::kindStoreARM64GOTLoadPageOff12:
1757 reloc1.set_r_address(address);
1758 reloc1.set_r_symbolnum(symbolNum);
1759 reloc1.set_r_pcrel(false);
1760 reloc1.set_r_length(2);
1761 reloc1.set_r_extern(external);
1762 reloc1.set_r_type(ARM64_RELOC_GOT_LOAD_PAGEOFF12);
1763 relocs.push_back(reloc1);
1764 break;
1765
1766
1767 case ld::Fixup::kindStoreLittleEndian64:
1768 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1769 if ( entry.fromTarget != NULL ) {
1770 // this is a pointer-diff
1771 reloc1.set_r_address(address);
1772 reloc1.set_r_symbolnum(symbolNum);
1773 reloc1.set_r_pcrel(false);
1774 reloc1.set_r_length(3);
1775 reloc1.set_r_extern(external);
1776 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1777 reloc2.set_r_address(address);
1778 reloc2.set_r_symbolnum(fromSymbolNum);
1779 reloc2.set_r_pcrel(false);
1780 reloc2.set_r_length(3);
1781 reloc2.set_r_extern(fromExternal);
1782 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1783 relocs.push_back(reloc2);
1784 relocs.push_back(reloc1);
1785 }
1786 else {
1787 // regular pointer
1788 reloc1.set_r_address(address);
1789 reloc1.set_r_symbolnum(symbolNum);
1790 reloc1.set_r_pcrel(false);
1791 reloc1.set_r_length(3);
1792 reloc1.set_r_extern(external);
1793 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1794 relocs.push_back(reloc1);
1795 }
1796 break;
1797
1798 case ld::Fixup::kindStoreLittleEndian32:
1799 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1800 if ( entry.fromTarget != NULL ) {
1801 // this is a pointer-diff
1802 reloc1.set_r_address(address);
1803 reloc1.set_r_symbolnum(symbolNum);
1804 reloc1.set_r_pcrel(false);
1805 reloc1.set_r_length(2);
1806 reloc1.set_r_extern(external);
1807 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1808 reloc2.set_r_address(address);
1809 reloc2.set_r_symbolnum(fromSymbolNum);
1810 reloc2.set_r_pcrel(false);
1811 reloc2.set_r_length(2);
1812 reloc2.set_r_extern(fromExternal);
1813 reloc2.set_r_type(ARM64_RELOC_SUBTRACTOR);
1814 relocs.push_back(reloc2);
1815 relocs.push_back(reloc1);
1816 }
1817 else {
1818 // regular pointer
1819 reloc1.set_r_address(address);
1820 reloc1.set_r_symbolnum(symbolNum);
1821 reloc1.set_r_pcrel(false);
1822 reloc1.set_r_length(2);
1823 reloc1.set_r_extern(external);
1824 reloc1.set_r_type(ARM64_RELOC_UNSIGNED);
1825 relocs.push_back(reloc1);
1826 }
1827 break;
1828
1829 case ld::Fixup::kindStoreARM64PointerToGOT:
1830 reloc1.set_r_address(address);
1831 reloc1.set_r_symbolnum(symbolNum);
1832 reloc1.set_r_pcrel(false);
1833 reloc1.set_r_length(3);
1834 reloc1.set_r_extern(external);
1835 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1836 relocs.push_back(reloc1);
1837 break;
1838
1839 case ld::Fixup::kindStoreARM64PCRelToGOT:
1840 reloc1.set_r_address(address);
1841 reloc1.set_r_symbolnum(symbolNum);
1842 reloc1.set_r_pcrel(true);
1843 reloc1.set_r_length(2);
1844 reloc1.set_r_extern(external);
1845 reloc1.set_r_type(ARM64_RELOC_POINTER_TO_GOT);
1846 relocs.push_back(reloc1);
1847 break;
1848
1849 default:
1850 assert(0 && "need to handle arm64 -r reloc");
1851
1852 }
1853
1854 }
1855 #endif // SUPPORT_ARCH_arm64
1856
1857
1858 template <typename A>
1859 void SectionRelocationsAtom<A>::addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind kind,
1860 const ld::Atom* inAtom, uint32_t offsetInAtom,
1861 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1862 const ld::Atom* toTarget, uint64_t toAddend,
1863 const ld::Atom* fromTarget, uint64_t fromAddend)
1864 {
1865 Entry entry;
1866 entry.kind = kind;
1867 entry.toTargetUsesExternalReloc = toTargetUsesExternalReloc;
1868 entry.fromTargetUsesExternalReloc = fromTargetExternalReloc;
1869 entry.inAtom = inAtom;
1870 entry.offsetInAtom = offsetInAtom;
1871 entry.toTarget = toTarget;
1872 entry.toAddend = toAddend;
1873 entry.fromTarget = fromTarget;
1874 entry.fromAddend = fromAddend;
1875
1876 static ld::Internal::FinalSection* lastSection = NULL;
1877 static SectionAndEntries* lastSectionAndEntries = NULL;
1878
1879 if ( sect != lastSection ) {
1880 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1881 if ( sect == it->sect ) {
1882 lastSection = sect;
1883 lastSectionAndEntries = &*it;
1884 break;
1885 }
1886 }
1887 if ( sect != lastSection ) {
1888 SectionAndEntries tmp;
1889 tmp.sect = sect;
1890 _entriesBySection.push_back(tmp);
1891 lastSection = sect;
1892 lastSectionAndEntries = &_entriesBySection.back();
1893 }
1894 }
1895 lastSectionAndEntries->entries.push_back(entry);
1896 }
1897
1898 template <typename A>
1899 void SectionRelocationsAtom<A>::encode()
1900 {
1901 // convert each Entry record to one or two reloc records
1902 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1903 SectionAndEntries& se = *it;
1904 for(typename std::vector<Entry>::iterator eit=se.entries.begin(); eit != se.entries.end(); ++eit) {
1905 encodeSectionReloc(se.sect, *eit, se.relocs);
1906 }
1907 }
1908
1909 // update sections with start and count or relocs
1910 uint32_t index = 0;
1911 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1912 SectionAndEntries& se = *it;
1913 se.sect->relocStart = index;
1914 se.sect->relocCount = se.relocs.size();
1915 index += se.sect->relocCount;
1916 }
1917
1918 }
1919
1920
1921
1922 template <typename A>
1923 class IndirectSymbolTableAtom : public ClassicLinkEditAtom
1924 {
1925 public:
1926 IndirectSymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1927 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)) { }
1928
1929 // overrides of ld::Atom
1930 virtual const char* name() const { return "indirect symbol table"; }
1931 virtual uint64_t size() const;
1932 virtual void copyRawContent(uint8_t buffer[]) const;
1933 // overrides of ClassicLinkEditAtom
1934 virtual void encode();
1935
1936 private:
1937 typedef typename A::P P;
1938 typedef typename A::P::E E;
1939 typedef typename A::P::uint_t pint_t;
1940
1941 void encodeStubSection(ld::Internal::FinalSection* sect);
1942 void encodeLazyPointerSection(ld::Internal::FinalSection* sect);
1943 void encodeNonLazyPointerSection(ld::Internal::FinalSection* sect);
1944 uint32_t symIndexOfStubAtom(const ld::Atom*);
1945 uint32_t symIndexOfLazyPointerAtom(const ld::Atom*);
1946 uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*);
1947 uint32_t symbolIndex(const ld::Atom*);
1948 bool kextBundlesDontHaveIndirectSymbolTable();
1949
1950
1951 std::vector<uint32_t> _entries;
1952
1953 static ld::Section _s_section;
1954 };
1955
1956 template <typename A>
1957 ld::Section IndirectSymbolTableAtom<A>::_s_section("__LINKEDIT", "__ind_sym_tab", ld::Section::typeLinkEdit, true);
1958
1959
1960
1961
1962 template <typename A>
1963 uint32_t IndirectSymbolTableAtom<A>::symbolIndex(const ld::Atom* atom)
1964 {
1965 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
1966 if ( pos != this->_writer._atomToSymbolIndex.end() )
1967 return pos->second;
1968 //fprintf(stderr, "_atomToSymbolIndex content:\n");
1969 //for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
1970 // fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
1971 //}
1972 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
1973 }
1974
1975 template <typename A>
1976 uint32_t IndirectSymbolTableAtom<A>::symIndexOfStubAtom(const ld::Atom* stubAtom)
1977 {
1978 for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) {
1979 if ( fit->binding == ld::Fixup::bindingDirectlyBound ) {
1980 assert((fit->u.target->contentType() == ld::Atom::typeLazyPointer)
1981 || (fit->u.target->contentType() == ld::Atom::typeLazyDylibPointer));
1982 return symIndexOfLazyPointerAtom(fit->u.target);
1983 }
1984 }
1985 throw "internal error: stub missing fixup to lazy pointer";
1986 }
1987
1988
1989 template <typename A>
1990 uint32_t IndirectSymbolTableAtom<A>::symIndexOfLazyPointerAtom(const ld::Atom* lpAtom)
1991 {
1992 for (ld::Fixup::iterator fit = lpAtom->fixupsBegin(); fit != lpAtom->fixupsEnd(); ++fit) {
1993 if ( fit->kind == ld::Fixup::kindLazyTarget ) {
1994 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
1995 return symbolIndex(fit->u.target);
1996 }
1997 }
1998 throw "internal error: lazy pointer missing fixupLazyTarget fixup";
1999 }
2000
2001 template <typename A>
2002 uint32_t IndirectSymbolTableAtom<A>::symIndexOfNonLazyPointerAtom(const ld::Atom* nlpAtom)
2003 {
2004 //fprintf(stderr, "symIndexOfNonLazyPointerAtom(%p) %s\n", nlpAtom, nlpAtom->name());
2005 for (ld::Fixup::iterator fit = nlpAtom->fixupsBegin(); fit != nlpAtom->fixupsEnd(); ++fit) {
2006 // non-lazy-pointer to a stripped symbol => no symbol index
2007 if ( fit->clusterSize != ld::Fixup::k1of1 )
2008 return INDIRECT_SYMBOL_LOCAL;
2009 const ld::Atom* target;
2010 switch ( fit->binding ) {
2011 case ld::Fixup::bindingDirectlyBound:
2012 target = fit->u.target;
2013 break;
2014 case ld::Fixup::bindingsIndirectlyBound:
2015 target = _state.indirectBindingTable[fit->u.bindingIndex];
2016 break;
2017 default:
2018 throw "internal error: unexpected non-lazy pointer binding";
2019 }
2020 bool targetIsGlobal = (target->scope() == ld::Atom::scopeGlobal);
2021 switch ( target->definition() ) {
2022 case ld::Atom::definitionRegular:
2023 if ( targetIsGlobal ) {
2024 if ( _options.outputKind() == Options::kObjectFile ) {
2025 // nlpointer to global symbol uses indirect symbol table in .o files
2026 return symbolIndex(target);
2027 }
2028 else if ( target->combine() == ld::Atom::combineByName ) {
2029 // dyld needs to bind nlpointer to global weak def
2030 return symbolIndex(target);
2031 }
2032 else if ( _options.nameSpace() != Options::kTwoLevelNameSpace ) {
2033 // dyld needs to bind nlpointer to global def linked for flat namespace
2034 return symbolIndex(target);
2035 }
2036 }
2037 break;
2038 case ld::Atom::definitionTentative:
2039 case ld::Atom::definitionAbsolute:
2040 if ( _options.outputKind() == Options::kObjectFile ) {
2041 // tentative def in .o file always uses symbol index
2042 return symbolIndex(target);
2043 }
2044 // dyld needs to bind nlpointer to global def linked for flat namespace
2045 if ( targetIsGlobal && _options.nameSpace() != Options::kTwoLevelNameSpace )
2046 return symbolIndex(target);
2047 break;
2048 case ld::Atom::definitionProxy:
2049 // dyld needs to bind nlpointer to something in another dylib
2050 {
2051 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target->file());
2052 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
2053 throwf("illegal data reference to %s in lazy loaded dylib %s", target->name(), dylib->path());
2054 }
2055 return symbolIndex(target);
2056 }
2057 }
2058 if ( nlpAtom->fixupsBegin() == nlpAtom->fixupsEnd() ) {
2059 // no fixups means this is the ImageLoader cache slot
2060 return INDIRECT_SYMBOL_ABS;
2061 }
2062
2063 // The magic index INDIRECT_SYMBOL_LOCAL tells dyld it should does not need to bind
2064 // this non-lazy pointer.
2065 return INDIRECT_SYMBOL_LOCAL;
2066 }
2067
2068
2069
2070 template <typename A>
2071 void IndirectSymbolTableAtom<A>::encodeStubSection(ld::Internal::FinalSection* sect)
2072 {
2073 sect->indirectSymTabStartIndex = _entries.size();
2074 sect->indirectSymTabElementSize = sect->atoms[0]->size();
2075 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2076 _entries.push_back(symIndexOfStubAtom(*ait));
2077 }
2078 }
2079
2080 template <typename A>
2081 void IndirectSymbolTableAtom<A>::encodeLazyPointerSection(ld::Internal::FinalSection* sect)
2082 {
2083 sect->indirectSymTabStartIndex = _entries.size();
2084 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2085 _entries.push_back(symIndexOfLazyPointerAtom(*ait));
2086 }
2087 }
2088
2089 template <typename A>
2090 void IndirectSymbolTableAtom<A>::encodeNonLazyPointerSection(ld::Internal::FinalSection* sect)
2091 {
2092 sect->indirectSymTabStartIndex = _entries.size();
2093 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
2094 _entries.push_back(symIndexOfNonLazyPointerAtom(*ait));
2095 }
2096 }
2097
2098 template <typename A>
2099 bool IndirectSymbolTableAtom<A>::kextBundlesDontHaveIndirectSymbolTable()
2100 {
2101 return true;
2102 }
2103
2104 template <typename A>
2105 void IndirectSymbolTableAtom<A>::encode()
2106 {
2107 // static executables should not have an indirect symbol table, unless PIE
2108 if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() )
2109 return;
2110
2111 // x86_64 kext bundles should not have an indirect symbol table
2112 if ( (this->_options.outputKind() == Options::kKextBundle) && kextBundlesDontHaveIndirectSymbolTable() )
2113 return;
2114
2115 // slidable static executables (-static -pie) should not have an indirect symbol table
2116 if ( (this->_options.outputKind() == Options::kStaticExecutable) && this->_options.positionIndependentExecutable() )
2117 return;
2118
2119 // find all special sections that need a range of the indirect symbol table section
2120 for (std::vector<ld::Internal::FinalSection*>::iterator sit = this->_state.sections.begin(); sit != this->_state.sections.end(); ++sit) {
2121 ld::Internal::FinalSection* sect = *sit;
2122 switch ( sect->type() ) {
2123 case ld::Section::typeStub:
2124 case ld::Section::typeStubClose:
2125 this->encodeStubSection(sect);
2126 break;
2127 case ld::Section::typeLazyPointerClose:
2128 case ld::Section::typeLazyPointer:
2129 case ld::Section::typeLazyDylibPointer:
2130 this->encodeLazyPointerSection(sect);
2131 break;
2132 case ld::Section::typeNonLazyPointer:
2133 this->encodeNonLazyPointerSection(sect);
2134 break;
2135 default:
2136 break;
2137 }
2138 }
2139 }
2140
2141 template <typename A>
2142 uint64_t IndirectSymbolTableAtom<A>::size() const
2143 {
2144 return _entries.size() * sizeof(uint32_t);
2145 }
2146
2147 template <typename A>
2148 void IndirectSymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
2149 {
2150 uint32_t* array = (uint32_t*)buffer;
2151 for(unsigned long i=0; i < _entries.size(); ++i) {
2152 E::set32(array[i], _entries[i]);
2153 }
2154 }
2155
2156
2157
2158
2159
2160
2161
2162
2163 } // namespace tool
2164 } // namespace ld
2165
2166 #endif // __LINKEDIT_CLASSIC_HPP__