]> git.saurik.com Git - apple/ld64.git/blob - src/ld/LinkEditClassic.hpp
60a4fa99924a6f5719a0e19f137bcd3157316c5d
[apple/ld64.git] / src / ld / LinkEditClassic.hpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-*
2 *
3 * Copyright (c) 2009-2010 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #ifndef __LINKEDIT_CLASSIC_HPP__
26 #define __LINKEDIT_CLASSIC_HPP__
27
28 #include <stdlib.h>
29 #include <sys/types.h>
30 #include <errno.h>
31 #include <limits.h>
32 #include <unistd.h>
33
34 #include <vector>
35 #include <unordered_map>
36
37 #include "Options.h"
38 #include "ld.hpp"
39 #include "Architectures.hpp"
40 #include "MachOFileAbstraction.hpp"
41
42 namespace ld {
43 namespace tool {
44
45
46
47 class ClassicLinkEditAtom : public ld::Atom
48 {
49 public:
50
51 // overrides of ld::Atom
52 virtual ld::File* file() const { return NULL; }
53 virtual uint64_t objectAddress() const { return 0; }
54
55 virtual void encode() = 0;
56 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe) { return false; }
57
58 ClassicLinkEditAtom(const Options& opts, ld::Internal& state,
59 OutputFile& writer, const ld::Section& sect,
60 unsigned int pointerSize)
61 : ld::Atom(sect, ld::Atom::definitionRegular,
62 ld::Atom::combineNever, ld::Atom::scopeTranslationUnit,
63 ld::Atom::typeUnclassified, ld::Atom::symbolTableNotIn,
64 false, false, false, ld::Atom::Alignment(log2(pointerSize))),
65 _options(opts), _state(state), _writer(writer) { }
66 protected:
67 const Options& _options;
68 ld::Internal& _state;
69 OutputFile& _writer;
70 };
71
72
73
74 class StringPoolAtom : public ClassicLinkEditAtom
75 {
76 public:
77 StringPoolAtom(const Options& opts, ld::Internal& state,
78 OutputFile& writer, int pointerSize);
79
80 // overrides of ld::Atom
81 virtual const char* name() const { return "string pool"; }
82 virtual uint64_t size() const;
83 virtual void copyRawContent(uint8_t buffer[]) const;
84 // overrides of ClassicLinkEditAtom
85 virtual void encode() { }
86
87 int32_t add(const char* name);
88 int32_t addUnique(const char* name);
89 int32_t emptyString() { return 1; }
90 const char* stringForIndex(int32_t) const;
91 uint32_t currentOffset();
92
93 private:
94 enum { kBufferSize = 0x01000000 };
95 typedef std::unordered_map<const char*, int32_t, CStringHash, CStringEquals> StringToOffset;
96
97 const uint32_t _pointerSize;
98 std::vector<char*> _fullBuffers;
99 char* _currentBuffer;
100 uint32_t _currentBufferUsed;
101 StringToOffset _uniqueStrings;
102
103 static ld::Section _s_section;
104 };
105
106 ld::Section StringPoolAtom::_s_section("__LINKEDIT", "__string_pool", ld::Section::typeLinkEdit, true);
107
108
109 StringPoolAtom::StringPoolAtom(const Options& opts, ld::Internal& state, OutputFile& writer, int pointerSize)
110 : ClassicLinkEditAtom(opts, state, writer, _s_section, pointerSize),
111 _pointerSize(pointerSize), _currentBuffer(NULL), _currentBufferUsed(0)
112 {
113 _currentBuffer = new char[kBufferSize];
114 // burn first byte of string pool (so zero is never a valid string offset)
115 _currentBuffer[_currentBufferUsed++] = ' ';
116 // make offset 1 always point to an empty string
117 _currentBuffer[_currentBufferUsed++] = '\0';
118 }
119
120 uint64_t StringPoolAtom::size() const
121 {
122 // pointer size align size
123 return (kBufferSize * _fullBuffers.size() + _currentBufferUsed + _pointerSize-1) & (-_pointerSize);
124 }
125
126 void StringPoolAtom::copyRawContent(uint8_t buffer[]) const
127 {
128 uint64_t offset = 0;
129 for (unsigned int i=0; i < _fullBuffers.size(); ++i) {
130 memcpy(&buffer[offset], _fullBuffers[i], kBufferSize);
131 offset += kBufferSize;
132 }
133 memcpy(&buffer[offset], _currentBuffer, _currentBufferUsed);
134 // zero fill end to align
135 offset += _currentBufferUsed;
136 while ( (offset % _pointerSize) != 0 )
137 buffer[offset++] = 0;
138 }
139
140 int32_t StringPoolAtom::add(const char* str)
141 {
142 int32_t offset = kBufferSize * _fullBuffers.size() + _currentBufferUsed;
143 int lenNeeded = strlcpy(&_currentBuffer[_currentBufferUsed], str, kBufferSize-_currentBufferUsed)+1;
144 if ( (_currentBufferUsed+lenNeeded) < kBufferSize ) {
145 _currentBufferUsed += lenNeeded;
146 }
147 else {
148 int copied = kBufferSize-_currentBufferUsed-1;
149 // change trailing '\0' that strlcpy added to real char
150 _currentBuffer[kBufferSize-1] = str[copied];
151 // alloc next buffer
152 _fullBuffers.push_back(_currentBuffer);
153 _currentBuffer = new char[kBufferSize];
154 _currentBufferUsed = 0;
155 // append rest of string
156 this->add(&str[copied+1]);
157 }
158 return offset;
159 }
160
161 uint32_t StringPoolAtom::currentOffset()
162 {
163 return kBufferSize * _fullBuffers.size() + _currentBufferUsed;
164 }
165
166
167 int32_t StringPoolAtom::addUnique(const char* str)
168 {
169 StringToOffset::iterator pos = _uniqueStrings.find(str);
170 if ( pos != _uniqueStrings.end() ) {
171 return pos->second;
172 }
173 else {
174 int32_t offset = this->add(str);
175 _uniqueStrings[str] = offset;
176 return offset;
177 }
178 }
179
180
181 const char* StringPoolAtom::stringForIndex(int32_t index) const
182 {
183 int32_t currentBufferStartIndex = kBufferSize * _fullBuffers.size();
184 int32_t maxIndex = currentBufferStartIndex + _currentBufferUsed;
185 // check for out of bounds
186 if ( index > maxIndex )
187 return "";
188 // check for index in _currentBuffer
189 if ( index > currentBufferStartIndex )
190 return &_currentBuffer[index-currentBufferStartIndex];
191 // otherwise index is in a full buffer
192 uint32_t fullBufferIndex = index/kBufferSize;
193 return &_fullBuffers[fullBufferIndex][index-(kBufferSize*fullBufferIndex)];
194 }
195
196
197
198 template <typename A>
199 class SymbolTableAtom : public ClassicLinkEditAtom
200 {
201 public:
202 SymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
203 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)),
204 _stabsStringsOffsetStart(0), _stabsStringsOffsetEnd(0),
205 _stabsIndexStart(0), _stabsIndexEnd(0) { }
206
207 // overrides of ld::Atom
208 virtual const char* name() const { return "symbol table"; }
209 virtual uint64_t size() const;
210 virtual void copyRawContent(uint8_t buffer[]) const;
211 // overrides of ClassicLinkEditAtom
212 virtual void encode();
213 virtual bool hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe);
214
215 private:
216 typedef typename A::P P;
217 typedef typename A::P::E E;
218 typedef typename A::P::uint_t pint_t;
219
220 bool addLocal(const ld::Atom* atom, StringPoolAtom* pool);
221 void addGlobal(const ld::Atom* atom, StringPoolAtom* pool);
222 void addImport(const ld::Atom* atom, StringPoolAtom* pool);
223 uint8_t classicOrdinalForProxy(const ld::Atom* atom);
224 uint32_t stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool);
225 uint64_t valueForStab(const ld::relocatable::File::Stab& stab);
226 uint8_t sectionIndexForStab(const ld::relocatable::File::Stab& stab);
227
228
229 mutable std::vector<macho_nlist<P> > _globals;
230 mutable std::vector<macho_nlist<P> > _locals;
231 mutable std::vector<macho_nlist<P> > _imports;
232
233 uint32_t _stabsStringsOffsetStart;
234 uint32_t _stabsStringsOffsetEnd;
235 uint32_t _stabsIndexStart;
236 uint32_t _stabsIndexEnd;
237
238 static ld::Section _s_section;
239 static int _s_anonNameIndex;
240
241 };
242
243 template <typename A>
244 ld::Section SymbolTableAtom<A>::_s_section("__LINKEDIT", "__symbol_table", ld::Section::typeLinkEdit, true);
245
246 template <typename A>
247 int SymbolTableAtom<A>::_s_anonNameIndex = 1;
248
249
250 template <typename A>
251 bool SymbolTableAtom<A>::addLocal(const ld::Atom* atom, StringPoolAtom* pool)
252 {
253 macho_nlist<P> entry;
254 assert(atom->symbolTableInclusion() != ld::Atom::symbolTableNotIn);
255
256 // set n_strx
257 const char* symbolName = atom->name();
258 char anonName[32];
259 if ( this->_options.outputKind() == Options::kObjectFile ) {
260 if ( atom->contentType() == ld::Atom::typeCString ) {
261 if ( atom->combine() == ld::Atom::combineByNameAndContent ) {
262 // don't use 'l' labels for x86_64 strings
263 // <rdar://problem/6605499> x86_64 obj-c runtime confused when static lib is stripped
264 sprintf(anonName, "LC%u", _s_anonNameIndex++);
265 symbolName = anonName;
266 }
267 }
268 else if ( atom->contentType() == ld::Atom::typeCFI ) {
269 if ( _options.removeEHLabels() )
270 return false;
271 // synthesize .eh name
272 if ( strcmp(atom->name(), "CIE") == 0 )
273 symbolName = "EH_Frame1";
274 else
275 symbolName = "func.eh";
276 }
277 else if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
278 // make auto-strip anonymous name for symbol
279 sprintf(anonName, "l%03u", _s_anonNameIndex++);
280 symbolName = anonName;
281 }
282 }
283 entry.set_n_strx(pool->add(symbolName));
284
285 // set n_type
286 uint8_t type = N_SECT;
287 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
288 type = N_ABS;
289 }
290 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
291 && (this->_options.outputKind() == Options::kObjectFile) ) {
292 // __OBJC __class has floating abs symbols for each class data structure
293 type = N_ABS;
294 }
295 if ( atom->scope() == ld::Atom::scopeLinkageUnit )
296 type |= N_PEXT;
297 entry.set_n_type(type);
298
299 // set n_sect (section number of implementation )
300 if ( atom->definition() == ld::Atom::definitionAbsolute )
301 entry.set_n_sect(0);
302 else
303 entry.set_n_sect(atom->machoSection());
304
305 // set n_desc
306 uint16_t desc = 0;
307 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
308 desc |= REFERENCED_DYNAMICALLY;
309 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
310 desc |= N_NO_DEAD_STRIP;
311 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) )
312 desc |= N_WEAK_DEF;
313 if ( atom->isThumb() )
314 desc |= N_ARM_THUMB_DEF;
315 entry.set_n_desc(desc);
316
317 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
318 if ( atom->definition() == ld::Atom::definitionAbsolute )
319 entry.set_n_value(atom->objectAddress());
320 else
321 entry.set_n_value(atom->finalAddress());
322
323 // add to array
324 _locals.push_back(entry);
325 return true;
326 }
327
328
329 template <typename A>
330 void SymbolTableAtom<A>::addGlobal(const ld::Atom* atom, StringPoolAtom* pool)
331 {
332 macho_nlist<P> entry;
333
334 // set n_strx
335 const char* symbolName = atom->name();
336 char anonName[32];
337 if ( this->_options.outputKind() == Options::kObjectFile ) {
338 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInWithRandomAutoStripLabel ) {
339 // make auto-strip anonymous name for symbol
340 sprintf(anonName, "l%03u", _s_anonNameIndex++);
341 symbolName = anonName;
342 }
343 }
344 entry.set_n_strx(pool->add(symbolName));
345
346 // set n_type
347 if ( atom->definition() == ld::Atom::definitionAbsolute ) {
348 entry.set_n_type(N_EXT | N_ABS);
349 }
350 else if ( (atom->section().type() == ld::Section::typeObjC1Classes)
351 && (this->_options.outputKind() == Options::kObjectFile) ) {
352 // __OBJC __class has floating abs symbols for each class data structure
353 entry.set_n_type(N_EXT | N_ABS);
354 }
355 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
356 entry.set_n_type(N_EXT | N_INDR);
357 }
358 else {
359 entry.set_n_type(N_EXT | N_SECT);
360 if ( (atom->scope() == ld::Atom::scopeLinkageUnit) && (this->_options.outputKind() == Options::kObjectFile) ) {
361 if ( this->_options.keepPrivateExterns() )
362 entry.set_n_type(N_EXT | N_SECT | N_PEXT);
363 }
364 else if ( (atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip)
365 && (atom->section().type() == ld::Section::typeMachHeader)
366 && !_options.positionIndependentExecutable() ) {
367 // the __mh_execute_header is historical magic in non-pie executabls and must be an absolute symbol
368 entry.set_n_type(N_EXT | N_ABS);
369 }
370 }
371
372 // set n_sect (section number of implementation)
373 if ( atom->definition() == ld::Atom::definitionAbsolute )
374 entry.set_n_sect(0);
375 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) )
376 entry.set_n_sect(0);
377 else
378 entry.set_n_sect(atom->machoSection());
379
380 // set n_desc
381 uint16_t desc = 0;
382 if ( atom->isThumb() )
383 desc |= N_ARM_THUMB_DEF;
384 if ( atom->symbolTableInclusion() == ld::Atom::symbolTableInAndNeverStrip )
385 desc |= REFERENCED_DYNAMICALLY;
386 if ( (atom->contentType() == ld::Atom::typeResolver) && (this->_options.outputKind() == Options::kObjectFile) )
387 desc |= N_SYMBOL_RESOLVER;
388 if ( atom->dontDeadStrip() && (this->_options.outputKind() == Options::kObjectFile) )
389 desc |= N_NO_DEAD_STRIP;
390 if ( (atom->definition() == ld::Atom::definitionRegular) && (atom->combine() == ld::Atom::combineByName) ) {
391 desc |= N_WEAK_DEF;
392 // <rdar://problem/6783167> support auto hidden weak symbols: .weak_def_can_be_hidden
393 if ( (atom->scope() == ld::Atom::scopeGlobal) && atom->autoHide() && (this->_options.outputKind() == Options::kObjectFile) )
394 desc |= N_WEAK_REF;
395 }
396 entry.set_n_desc(desc);
397
398 // set n_value ( address this symbol will be at if this executable is loaded at it preferred address )
399 if ( atom->definition() == ld::Atom::definitionAbsolute )
400 entry.set_n_value(atom->objectAddress());
401 else if ( (atom->definition() == ld::Atom::definitionProxy) && (atom->scope() == ld::Atom::scopeGlobal) ) {
402 if ( atom->isAlias() ) {
403 // this re-export also renames
404 for (ld::Fixup::iterator fit = atom->fixupsBegin(); fit != atom->fixupsEnd(); ++fit) {
405 if ( fit->kind == ld::Fixup::kindNoneFollowOn ) {
406 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
407 entry.set_n_value(pool->add(fit->u.target->name()));
408 }
409 }
410 }
411 else
412 entry.set_n_value(entry.n_strx());
413 }
414 else
415 entry.set_n_value(atom->finalAddress());
416
417 // add to array
418 _globals.push_back(entry);
419 }
420
421 template <typename A>
422 uint8_t SymbolTableAtom<A>::classicOrdinalForProxy(const ld::Atom* atom)
423 {
424 assert(atom->definition() == ld::Atom::definitionProxy);
425 // when linking for flat-namespace ordinals are always zero
426 if ( _options.nameSpace() != Options::kTwoLevelNameSpace )
427 return 0;
428 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
429 // when linking -undefined dynamic_lookup, unbound symbols use DYNAMIC_LOOKUP_ORDINAL
430 if ( dylib == NULL ) {
431 if (_options.undefinedTreatment() == Options::kUndefinedDynamicLookup )
432 return DYNAMIC_LOOKUP_ORDINAL;
433 if (_options.allowedUndefined(atom->name()) )
434 return DYNAMIC_LOOKUP_ORDINAL;
435 }
436 assert(dylib != NULL);
437 int ord = this->_writer.dylibToOrdinal(dylib);
438 if ( ord == BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE )
439 return EXECUTABLE_ORDINAL;
440 return ord;
441 }
442
443
444 template <typename A>
445 void SymbolTableAtom<A>::addImport(const ld::Atom* atom, StringPoolAtom* pool)
446 {
447 macho_nlist<P> entry;
448
449 // set n_strx
450 entry.set_n_strx(pool->add(atom->name()));
451
452 // set n_type
453 if ( this->_options.outputKind() == Options::kObjectFile ) {
454 if ( (atom->scope() == ld::Atom::scopeLinkageUnit)
455 && (atom->definition() == ld::Atom::definitionTentative) )
456 entry.set_n_type(N_UNDF | N_EXT | N_PEXT);
457 else
458 entry.set_n_type(N_UNDF | N_EXT);
459 }
460 else {
461 if ( this->_options.prebind() )
462 entry.set_n_type(N_PBUD | N_EXT);
463 else
464 entry.set_n_type(N_UNDF | N_EXT);
465 }
466
467 // set n_sect
468 entry.set_n_sect(0);
469
470 uint16_t desc = 0;
471 if ( this->_options.outputKind() != Options::kObjectFile ) {
472 uint8_t ordinal = this->classicOrdinalForProxy(atom);
473 //fprintf(stderr, "ordinal=%u from reader=%p for symbol=%s\n", ordinal, atom->getFile(), atom->getName());
474 SET_LIBRARY_ORDINAL(desc, ordinal);
475
476 #if 0
477 // set n_desc ( high byte is library ordinal, low byte is reference type )
478 std::map<const ObjectFile::Atom*,ObjectFile::Atom*>::iterator pos = fStubsMap.find(atom);
479 if ( pos != fStubsMap.end() || ( strncmp(atom->getName(), ".objc_class_name_", 17) == 0) )
480 desc |= REFERENCE_FLAG_UNDEFINED_LAZY;
481 else
482 desc |= REFERENCE_FLAG_UNDEFINED_NON_LAZY;
483 #endif
484 }
485 else if ( atom->definition() == ld::Atom::definitionTentative ) {
486 uint8_t align = atom->alignment().powerOf2;
487 // always record custom alignment of common symbols to match what compiler does
488 SET_COMM_ALIGN(desc, align);
489 }
490 if ( (this->_options.outputKind() != Options::kObjectFile)
491 && (atom->definition() == ld::Atom::definitionProxy)
492 && (atom->combine() == ld::Atom::combineByName) ) {
493 desc |= N_REF_TO_WEAK;
494 }
495 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(atom->file());
496 if ( atom->weakImported() || ((dylib != NULL) && dylib->forcedWeakLinked()) )
497 desc |= N_WEAK_REF;
498 entry.set_n_desc(desc);
499
500 // set n_value, zero for import proxy and size for tentative definition
501 if ( atom->definition() == ld::Atom::definitionTentative )
502 entry.set_n_value(atom->size());
503 else
504 entry.set_n_value(0);
505
506 // add to array
507 _imports.push_back(entry);
508 }
509
510 template <typename A>
511 uint8_t SymbolTableAtom<A>::sectionIndexForStab(const ld::relocatable::File::Stab& stab)
512 {
513 // in FUN stabs, n_sect field is 0 for start FUN and 1 for end FUN
514 if ( stab.type == N_FUN )
515 return stab.other;
516 else if ( stab.type == N_GSYM )
517 return 0;
518 else if ( stab.atom != NULL )
519 return stab.atom->machoSection();
520 else
521 return stab.other;
522 }
523
524
525 template <typename A>
526 uint64_t SymbolTableAtom<A>::valueForStab(const ld::relocatable::File::Stab& stab)
527 {
528 switch ( stab.type ) {
529 case N_FUN:
530 if ( stab.atom == NULL ) {
531 // <rdar://problem/5591394> Add support to ld64 for N_FUN stabs when used for symbolic constants
532 return stab.value;
533 }
534 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
535 // end of function N_FUN has size
536 return stab.atom->size();
537 }
538 else {
539 // start of function N_FUN has address
540 return stab.atom->finalAddress();
541 }
542 case N_LBRAC:
543 case N_RBRAC:
544 case N_SLINE:
545 if ( stab.atom == NULL )
546 // some weird assembly files have slines not associated with a function
547 return stab.value;
548 else
549 // all these stab types need their value changed from an offset in the atom to an address
550 return stab.atom->finalAddress() + stab.value;
551 case N_STSYM:
552 case N_LCSYM:
553 case N_BNSYM:
554 // all these need address of atom
555 if ( stab.atom != NULL )
556 return stab.atom->finalAddress();
557 else
558 return 0; // <rdar://problem/7811357> work around for mismatch N_BNSYM
559 case N_ENSYM:
560 return stab.atom->size();
561 case N_SO:
562 if ( stab.atom == NULL ) {
563 return 0;
564 }
565 else {
566 if ( (stab.string == NULL) || (strlen(stab.string) == 0) ) {
567 // end of translation unit N_SO has address of end of last atom
568 return stab.atom->finalAddress() + stab.atom->size();
569 }
570 else {
571 // start of translation unit N_SO has address of end of first atom
572 return stab.atom->finalAddress();
573 }
574 }
575 break;
576 default:
577 return stab.value;
578 }
579 }
580
581 template <typename A>
582 uint32_t SymbolTableAtom<A>::stringOffsetForStab(const ld::relocatable::File::Stab& stab, StringPoolAtom* pool)
583 {
584 switch (stab.type) {
585 case N_SO:
586 if ( (stab.string == NULL) || stab.string[0] == '\0' ) {
587 return pool->emptyString();
588 break;
589 }
590 // fall into uniquing case
591 case N_SOL:
592 case N_BINCL:
593 case N_EXCL:
594 return pool->addUnique(stab.string);
595 break;
596 default:
597 if ( stab.string == NULL )
598 return 0;
599 else if ( stab.string[0] == '\0' )
600 return pool->emptyString();
601 else
602 return pool->add(stab.string);
603 }
604 return 0;
605 }
606
607
608
609 template <typename A>
610 bool SymbolTableAtom<A>::hasStabs(uint32_t& ssos, uint32_t& ssoe, uint32_t& sos, uint32_t& soe)
611 {
612 ssos = _stabsStringsOffsetStart;
613 ssoe = _stabsStringsOffsetEnd;
614 sos = _stabsIndexStart * sizeof(macho_nlist<P>);
615 soe = _stabsIndexEnd * sizeof(macho_nlist<P>);
616 return ( (_stabsIndexStart != _stabsIndexEnd) || (_stabsStringsOffsetStart != _stabsStringsOffsetEnd) );
617 }
618
619
620 template <typename A>
621 void SymbolTableAtom<A>::encode()
622 {
623 uint32_t symbolIndex = 0;
624
625 // make nlist entries for all local symbols
626 std::vector<const ld::Atom*>& localAtoms = this->_writer._localAtoms;
627 std::vector<const ld::Atom*>& globalAtoms = this->_writer._exportedAtoms;
628 _locals.reserve(localAtoms.size()+this->_state.stabs.size());
629 this->_writer._localSymbolsStartIndex = 0;
630 // make nlist entries for all debug notes
631 _stabsIndexStart = symbolIndex;
632 _stabsStringsOffsetStart = this->_writer._stringPoolAtom->currentOffset();
633 for (std::vector<ld::relocatable::File::Stab>::const_iterator sit=this->_state.stabs.begin(); sit != this->_state.stabs.end(); ++sit) {
634 macho_nlist<P> entry;
635 entry.set_n_type(sit->type);
636 entry.set_n_sect(sectionIndexForStab(*sit));
637 entry.set_n_desc(sit->desc);
638 entry.set_n_value(valueForStab(*sit));
639 entry.set_n_strx(stringOffsetForStab(*sit, this->_writer._stringPoolAtom));
640 _locals.push_back(entry);
641 ++symbolIndex;
642 }
643 _stabsIndexEnd = symbolIndex;
644 _stabsStringsOffsetEnd = this->_writer._stringPoolAtom->currentOffset();
645 for (std::vector<const ld::Atom*>::const_iterator it=localAtoms.begin(); it != localAtoms.end(); ++it) {
646 const ld::Atom* atom = *it;
647 if ( this->addLocal(atom, this->_writer._stringPoolAtom) )
648 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
649 }
650 this->_writer._localSymbolsCount = symbolIndex;
651
652
653 // make nlist entries for all global symbols
654 _globals.reserve(globalAtoms.size());
655 this->_writer._globalSymbolsStartIndex = symbolIndex;
656 for (std::vector<const ld::Atom*>::const_iterator it=globalAtoms.begin(); it != globalAtoms.end(); ++it) {
657 const ld::Atom* atom = *it;
658 this->addGlobal(atom, this->_writer._stringPoolAtom);
659 this->_writer._atomToSymbolIndex[atom] = symbolIndex++;
660 }
661 this->_writer._globalSymbolsCount = symbolIndex - this->_writer._globalSymbolsStartIndex;
662
663 // make nlist entries for all undefined (imported) symbols
664 std::vector<const ld::Atom*>& importAtoms = this->_writer._importedAtoms;
665 _imports.reserve(importAtoms.size());
666 this->_writer._importSymbolsStartIndex = symbolIndex;
667 for (std::vector<const ld::Atom*>::const_iterator it=importAtoms.begin(); it != importAtoms.end(); ++it) {
668 this->addImport(*it, this->_writer._stringPoolAtom);
669 this->_writer._atomToSymbolIndex[*it] = symbolIndex++;
670 }
671 this->_writer._importSymbolsCount = symbolIndex - this->_writer._importSymbolsStartIndex;
672 }
673
674 template <typename A>
675 uint64_t SymbolTableAtom<A>::size() const
676 {
677 return sizeof(macho_nlist<P>) * (_locals.size() + _globals.size() + _imports.size());
678 }
679
680 template <typename A>
681 void SymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
682 {
683 memcpy(&buffer[this->_writer._localSymbolsStartIndex*sizeof(macho_nlist<P>)], &_locals[0],
684 this->_writer._localSymbolsCount*sizeof(macho_nlist<P>));
685 memcpy(&buffer[this->_writer._globalSymbolsStartIndex*sizeof(macho_nlist<P>)], &_globals[0],
686 this->_writer._globalSymbolsCount*sizeof(macho_nlist<P>));
687 memcpy(&buffer[this->_writer._importSymbolsStartIndex *sizeof(macho_nlist<P>)], &_imports[0],
688 this->_writer._importSymbolsCount*sizeof(macho_nlist<P>));
689 }
690
691
692
693
694 class RelocationsAtomAbstract : public ClassicLinkEditAtom
695 {
696 public:
697 RelocationsAtomAbstract(const Options& opts, ld::Internal& state,
698 OutputFile& writer, const ld::Section& sect,
699 unsigned int pointerSize)
700 : ClassicLinkEditAtom(opts, state, writer, sect, pointerSize) { }
701
702 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) = 0;
703 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) = 0;
704 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) = 0;
705 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) = 0;
706 virtual uint64_t relocBaseAddress(ld::Internal& state) = 0;
707 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
708 const ld::Atom* inAtom, uint32_t offsetInAtom,
709 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
710 const ld::Atom* toTarget, uint64_t toAddend,
711 const ld::Atom* fromTarget, uint64_t fromAddend) = 0;
712 protected:
713 uint32_t symbolIndex(const ld::Atom* atom) const;
714
715 };
716
717
718
719 uint32_t RelocationsAtomAbstract::symbolIndex(const ld::Atom* atom) const
720 {
721 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
722 if ( pos != this->_writer._atomToSymbolIndex.end() )
723 return pos->second;
724 fprintf(stderr, "_atomToSymbolIndex content:\n");
725 for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
726 fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
727 }
728 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
729 }
730
731
732 template <typename A>
733 class LocalRelocationsAtom : public RelocationsAtomAbstract
734 {
735 public:
736 LocalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
737 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
738
739 // overrides of ld::Atom
740 virtual const char* name() const { return "local relocations"; }
741 virtual uint64_t size() const;
742 virtual void copyRawContent(uint8_t buffer[]) const;
743 // overrides of ClassicLinkEditAtom
744 virtual void encode() {}
745 // overrides of RelocationsAtomAbstract
746 virtual void addPointerReloc(uint64_t addr, uint32_t symNum);
747 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
748 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
749 virtual uint64_t relocBaseAddress(ld::Internal& state);
750 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum);
751 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
752 const ld::Atom* inAtom, uint32_t offsetInAtom,
753 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
754 const ld::Atom* toTarget, uint64_t toAddend,
755 const ld::Atom* fromTarget, uint64_t fromAddend) { }
756
757 private:
758 typedef typename A::P P;
759 typedef typename A::P::E E;
760 typedef typename A::P::uint_t pint_t;
761
762 std::vector<macho_relocation_info<P> > _relocs;
763
764 static ld::Section _s_section;
765 };
766
767 template <typename A>
768 ld::Section LocalRelocationsAtom<A>::_s_section("__LINKEDIT", "__local_relocs", ld::Section::typeLinkEdit, true);
769
770
771 template <>
772 uint64_t LocalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
773 {
774 if ( _options.outputKind() == Options::kKextBundle ) {
775 // for kext bundles the reloc base address starts at __TEXT segment
776 return _options.baseAddress();
777 }
778 // for all other kinds, the x86_64 reloc base address starts at __DATA segment
779 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
780 ld::Internal::FinalSection* sect = *sit;
781 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
782 return sect->address;
783 }
784 throw "__DATA segment not found";
785 }
786
787 template <typename A>
788 uint64_t LocalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
789 {
790 return _options.baseAddress();
791 }
792
793 template <typename A>
794 void LocalRelocationsAtom<A>::addPointerReloc(uint64_t addr, uint32_t symNum)
795 {
796 macho_relocation_info<P> reloc;
797 reloc.set_r_address(addr);
798 reloc.set_r_symbolnum(symNum);
799 reloc.set_r_pcrel(false);
800 reloc.set_r_length();
801 reloc.set_r_extern(false);
802 reloc.set_r_type(GENERIC_RELOC_VANILLA);
803 _relocs.push_back(reloc);
804 }
805
806 template <typename A>
807 void LocalRelocationsAtom<A>::addTextReloc(uint64_t addr, ld::Fixup::Kind kind, uint64_t targetAddr, uint32_t symNum)
808 {
809 }
810
811
812 template <typename A>
813 uint64_t LocalRelocationsAtom<A>::size() const
814 {
815 return _relocs.size() * sizeof(macho_relocation_info<P>);
816 }
817
818 template <typename A>
819 void LocalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
820 {
821 memcpy(buffer, &_relocs[0], _relocs.size()*sizeof(macho_relocation_info<P>));
822 }
823
824
825
826
827
828
829 template <typename A>
830 class ExternalRelocationsAtom : public RelocationsAtomAbstract
831 {
832 public:
833 ExternalRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
834 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
835
836 // overrides of ld::Atom
837 virtual const char* name() const { return "external relocations"; }
838 virtual uint64_t size() const;
839 virtual void copyRawContent(uint8_t buffer[]) const;
840 // overrides of ClassicLinkEditAtom
841 virtual void encode() {}
842 // overrides of RelocationsAtomAbstract
843 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
844 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
845 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*);
846 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*);
847 virtual uint64_t relocBaseAddress(ld::Internal& state);
848 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
849 const ld::Atom* inAtom, uint32_t offsetInAtom,
850 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
851 const ld::Atom* toTarget, uint64_t toAddend,
852 const ld::Atom* fromTarget, uint64_t fromAddend) { }
853
854
855 private:
856 typedef typename A::P P;
857 typedef typename A::P::E E;
858 typedef typename A::P::uint_t pint_t;
859
860 struct LocAndAtom {
861 LocAndAtom(uint64_t l, const ld::Atom* a) : loc(l), atom(a), symbolIndex(0) {}
862
863 uint64_t loc;
864 const ld::Atom* atom;
865 uint32_t symbolIndex;
866
867 bool operator<(const LocAndAtom& rhs) const {
868 // sort first by symbol number
869 if ( this->symbolIndex != rhs.symbolIndex )
870 return (this->symbolIndex < rhs.symbolIndex);
871 // then sort all uses of the same symbol by address
872 return (this->loc < rhs.loc);
873 }
874
875 };
876
877 static uint32_t pointerReloc();
878 static uint32_t callReloc();
879
880 mutable std::vector<LocAndAtom> _pointerLocations;
881 mutable std::vector<LocAndAtom> _callSiteLocations;
882
883 static ld::Section _s_section;
884 };
885
886 template <typename A>
887 ld::Section ExternalRelocationsAtom<A>::_s_section("__LINKEDIT", "__extrn_relocs", ld::Section::typeLinkEdit, true);
888
889 template <>
890 uint64_t ExternalRelocationsAtom<x86_64>::relocBaseAddress(ld::Internal& state)
891 {
892 // for x86_64 the reloc base address starts at __DATA segment
893 for (std::vector<ld::Internal::FinalSection*>::iterator sit = state.sections.begin(); sit != state.sections.end(); ++sit) {
894 ld::Internal::FinalSection* sect = *sit;
895 if ( strcmp(sect->segmentName(), "__DATA") == 0 )
896 return sect->address;
897 }
898 throw "__DATA segment not found";
899 }
900
901 template <typename A>
902 uint64_t ExternalRelocationsAtom<A>::relocBaseAddress(ld::Internal& state)
903 {
904 return 0;
905 }
906
907 template <typename A>
908 void ExternalRelocationsAtom<A>::addExternalPointerReloc(uint64_t addr, const ld::Atom* target)
909 {
910 _pointerLocations.push_back(LocAndAtom(addr, target));
911 }
912
913 template <typename A>
914 void ExternalRelocationsAtom<A>::addExternalCallSiteReloc(uint64_t addr, const ld::Atom* target)
915 {
916 _callSiteLocations.push_back(LocAndAtom(addr, target));
917 }
918
919
920 template <typename A>
921 uint64_t ExternalRelocationsAtom<A>::size() const
922 {
923 if ( _options.outputKind() == Options::kStaticExecutable ) {
924 assert(_pointerLocations.size() == 0);
925 assert(_callSiteLocations.size() == 0);
926 }
927 return (_pointerLocations.size() + _callSiteLocations.size()) * sizeof(macho_relocation_info<P>);
928 }
929
930 #if SUPPORT_ARCH_arm_any
931 template <> uint32_t ExternalRelocationsAtom<arm>::pointerReloc() { return ARM_RELOC_VANILLA; }
932 #endif
933 template <> uint32_t ExternalRelocationsAtom<x86>::pointerReloc() { return GENERIC_RELOC_VANILLA; }
934 template <> uint32_t ExternalRelocationsAtom<x86_64>::pointerReloc() { return X86_64_RELOC_UNSIGNED; }
935
936
937 template <> uint32_t ExternalRelocationsAtom<x86_64>::callReloc() { return X86_64_RELOC_BRANCH; }
938 template <> uint32_t ExternalRelocationsAtom<x86>::callReloc() { return GENERIC_RELOC_VANILLA; }
939 template <typename A>
940 uint32_t ExternalRelocationsAtom<A>::callReloc()
941 {
942 assert(0 && "external call relocs not implemented");
943 return 0;
944 }
945
946
947 template <typename A>
948 void ExternalRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
949 {
950 macho_relocation_info<P>* r = (macho_relocation_info<P>*)buffer;
951
952 // assign symbol index, now that symbol table is built
953 for (typename std::vector<LocAndAtom>::iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it) {
954 it->symbolIndex = symbolIndex(it->atom);
955 }
956 std::sort(_pointerLocations.begin(), _pointerLocations.end());
957 for (typename std::vector<LocAndAtom>::const_iterator it = _pointerLocations.begin(); it != _pointerLocations.end(); ++it, ++r) {
958 r->set_r_address(it->loc);
959 r->set_r_symbolnum(it->symbolIndex);
960 r->set_r_pcrel(false);
961 r->set_r_length();
962 r->set_r_extern(true);
963 r->set_r_type(this->pointerReloc());
964 }
965
966 for (typename std::vector<LocAndAtom>::iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it) {
967 it->symbolIndex = symbolIndex(it->atom);
968 }
969 std::sort(_callSiteLocations.begin(), _callSiteLocations.end());
970 for (typename std::vector<LocAndAtom>::const_iterator it = _callSiteLocations.begin(); it != _callSiteLocations.end(); ++it, ++r) {
971 r->set_r_address(it->loc);
972 r->set_r_symbolnum(it->symbolIndex);
973 r->set_r_pcrel(true);
974 r->set_r_length(2);
975 r->set_r_extern(true);
976 r->set_r_type(this->callReloc());
977 }
978 }
979
980
981 template <typename A>
982 class SectionRelocationsAtom : public RelocationsAtomAbstract
983 {
984 public:
985 SectionRelocationsAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
986 : RelocationsAtomAbstract(opts, state, writer, _s_section, sizeof(pint_t)) { }
987
988 // overrides of ld::Atom
989 virtual const char* name() const { return "section relocations"; }
990 virtual uint64_t size() const;
991 virtual void copyRawContent(uint8_t buffer[]) const;
992 // overrides of ClassicLinkEditAtom
993 virtual void encode();
994 // overrides of RelocationsAtomAbstract
995 virtual void addPointerReloc(uint64_t addr, uint32_t symNum) {}
996 virtual void addTextReloc(uint64_t addr, ld::Fixup::Kind k, uint64_t targetAddr, uint32_t symNum) {}
997 virtual void addExternalPointerReloc(uint64_t addr, const ld::Atom*) {}
998 virtual void addExternalCallSiteReloc(uint64_t addr, const ld::Atom*) {}
999 virtual uint64_t relocBaseAddress(ld::Internal& state) { return 0; }
1000 virtual void addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind,
1001 const ld::Atom* inAtom, uint32_t offsetInAtom,
1002 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1003 const ld::Atom* toTarget, uint64_t toAddend,
1004 const ld::Atom* fromTarget, uint64_t fromAddend);
1005
1006 private:
1007 typedef typename A::P P;
1008 typedef typename A::P::E E;
1009 typedef typename A::P::uint_t pint_t;
1010
1011
1012 struct Entry {
1013 ld::Fixup::Kind kind;
1014 bool toTargetUsesExternalReloc;
1015 bool fromTargetUsesExternalReloc;
1016 const ld::Atom* inAtom;
1017 uint32_t offsetInAtom;
1018 const ld::Atom* toTarget;
1019 uint64_t toAddend;
1020 const ld::Atom* fromTarget;
1021 uint64_t fromAddend;
1022 };
1023 uint32_t sectSymNum(bool external, const ld::Atom* target);
1024 void encodeSectionReloc(ld::Internal::FinalSection* sect,
1025 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs);
1026
1027 struct SectionAndEntries {
1028 ld::Internal::FinalSection* sect;
1029 std::vector<Entry> entries;
1030 std::vector<macho_relocation_info<P> > relocs;
1031 };
1032
1033 std::vector<SectionAndEntries> _entriesBySection;
1034
1035 static ld::Section _s_section;
1036 };
1037
1038 template <typename A>
1039 ld::Section SectionRelocationsAtom<A>::_s_section("__LINKEDIT", "__sect_relocs", ld::Section::typeLinkEdit, true);
1040
1041
1042
1043
1044 template <typename A>
1045 uint64_t SectionRelocationsAtom<A>::size() const
1046 {
1047 uint32_t count = 0;
1048 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1049 const SectionAndEntries& se = *it;
1050 count += se.relocs.size();
1051 }
1052 return count * sizeof(macho_relocation_info<P>);
1053 }
1054
1055 template <typename A>
1056 void SectionRelocationsAtom<A>::copyRawContent(uint8_t buffer[]) const
1057 {
1058 uint32_t offset = 0;
1059 for(typename std::vector<SectionAndEntries>::const_iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1060 const SectionAndEntries& se = *it;
1061 memcpy(&buffer[offset], &se.relocs[0], se.relocs.size()*sizeof(macho_relocation_info<P>));
1062 offset += (se.relocs.size() * sizeof(macho_relocation_info<P>));
1063 }
1064 }
1065
1066
1067 template <>
1068 void SectionRelocationsAtom<x86_64>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1069 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1070 {
1071 macho_relocation_info<P> reloc1;
1072 macho_relocation_info<P> reloc2;
1073 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1074 bool external = entry.toTargetUsesExternalReloc;
1075 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1076 bool fromExternal = false;
1077 uint32_t fromSymbolNum = 0;
1078 if ( entry.fromTarget != NULL ) {
1079 fromExternal = entry.fromTargetUsesExternalReloc;
1080 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1081 }
1082
1083
1084 switch ( entry.kind ) {
1085 case ld::Fixup::kindStoreX86BranchPCRel32:
1086 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1087 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1088 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1089 reloc1.set_r_address(address);
1090 reloc1.set_r_symbolnum(symbolNum);
1091 reloc1.set_r_pcrel(true);
1092 reloc1.set_r_length(2);
1093 reloc1.set_r_extern(external);
1094 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1095 relocs.push_back(reloc1);
1096 break;
1097
1098 case ld::Fixup::kindStoreX86BranchPCRel8:
1099 reloc1.set_r_address(address);
1100 reloc1.set_r_symbolnum(symbolNum);
1101 reloc1.set_r_pcrel(true);
1102 reloc1.set_r_length(0);
1103 reloc1.set_r_extern(external);
1104 reloc1.set_r_type(X86_64_RELOC_BRANCH);
1105 relocs.push_back(reloc1);
1106 break;
1107
1108 case ld::Fixup::kindStoreX86PCRel32:
1109 case ld::Fixup::kindStoreTargetAddressX86PCRel32:
1110 reloc1.set_r_address(address);
1111 reloc1.set_r_symbolnum(symbolNum);
1112 reloc1.set_r_pcrel(true);
1113 reloc1.set_r_length(2);
1114 reloc1.set_r_extern(external);
1115 reloc1.set_r_type(X86_64_RELOC_SIGNED);
1116 relocs.push_back(reloc1);
1117 break;
1118
1119 case ld::Fixup::kindStoreX86PCRel32_1:
1120 reloc1.set_r_address(address);
1121 reloc1.set_r_symbolnum(symbolNum);
1122 reloc1.set_r_pcrel(true);
1123 reloc1.set_r_length(2);
1124 reloc1.set_r_extern(external);
1125 reloc1.set_r_type(X86_64_RELOC_SIGNED_1);
1126 relocs.push_back(reloc1);
1127 break;
1128
1129 case ld::Fixup::kindStoreX86PCRel32_2:
1130 reloc1.set_r_address(address);
1131 reloc1.set_r_symbolnum(symbolNum);
1132 reloc1.set_r_pcrel(true);
1133 reloc1.set_r_length(2);
1134 reloc1.set_r_extern(external);
1135 reloc1.set_r_type(X86_64_RELOC_SIGNED_2);
1136 relocs.push_back(reloc1);
1137 break;
1138
1139 case ld::Fixup::kindStoreX86PCRel32_4:
1140 reloc1.set_r_address(address);
1141 reloc1.set_r_symbolnum(symbolNum);
1142 reloc1.set_r_pcrel(true);
1143 reloc1.set_r_length(2);
1144 reloc1.set_r_extern(external);
1145 reloc1.set_r_type(X86_64_RELOC_SIGNED_4);
1146 relocs.push_back(reloc1);
1147 break;
1148
1149 case ld::Fixup::kindStoreX86PCRel32GOTLoad:
1150 case ld::Fixup::kindStoreTargetAddressX86PCRel32GOTLoad:
1151 reloc1.set_r_address(address);
1152 reloc1.set_r_symbolnum(symbolNum);
1153 reloc1.set_r_pcrel(true);
1154 reloc1.set_r_length(2);
1155 reloc1.set_r_extern(external);
1156 reloc1.set_r_type(X86_64_RELOC_GOT_LOAD);
1157 relocs.push_back(reloc1);
1158 break;
1159
1160 case ld::Fixup::kindStoreX86PCRel32GOT:
1161 reloc1.set_r_address(address);
1162 reloc1.set_r_symbolnum(symbolNum);
1163 reloc1.set_r_pcrel(true);
1164 reloc1.set_r_length(2);
1165 reloc1.set_r_extern(external);
1166 reloc1.set_r_type(X86_64_RELOC_GOT);
1167 relocs.push_back(reloc1);
1168 break;
1169
1170 case ld::Fixup::kindStoreLittleEndian64:
1171 case ld::Fixup::kindStoreTargetAddressLittleEndian64:
1172 if ( entry.fromTarget != NULL ) {
1173 // this is a pointer-diff
1174 reloc1.set_r_address(address);
1175 reloc1.set_r_symbolnum(symbolNum);
1176 reloc1.set_r_pcrel(false);
1177 reloc1.set_r_length(3);
1178 reloc1.set_r_extern(external);
1179 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1180 reloc2.set_r_address(address);
1181 reloc2.set_r_symbolnum(fromSymbolNum);
1182 reloc2.set_r_pcrel(false);
1183 reloc2.set_r_length(3);
1184 reloc2.set_r_extern(fromExternal);
1185 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1186 relocs.push_back(reloc2);
1187 relocs.push_back(reloc1);
1188 }
1189 else {
1190 // regular pointer
1191 reloc1.set_r_address(address);
1192 reloc1.set_r_symbolnum(symbolNum);
1193 reloc1.set_r_pcrel(false);
1194 reloc1.set_r_length(3);
1195 reloc1.set_r_extern(external);
1196 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1197 relocs.push_back(reloc1);
1198 }
1199 break;
1200
1201 case ld::Fixup::kindStoreLittleEndian32:
1202 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1203 if ( entry.fromTarget != NULL ) {
1204 // this is a pointer-diff
1205 reloc1.set_r_address(address);
1206 reloc1.set_r_symbolnum(symbolNum);
1207 reloc1.set_r_pcrel(false);
1208 reloc1.set_r_length(2);
1209 reloc1.set_r_extern(external);
1210 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1211 reloc2.set_r_address(address);
1212 reloc2.set_r_symbolnum(fromSymbolNum);
1213 reloc2.set_r_pcrel(false);
1214 reloc2.set_r_length(2);
1215 reloc2.set_r_extern(fromExternal);
1216 reloc2.set_r_type(X86_64_RELOC_SUBTRACTOR);
1217 relocs.push_back(reloc2);
1218 relocs.push_back(reloc1);
1219 }
1220 else {
1221 // regular pointer
1222 reloc1.set_r_address(address);
1223 reloc1.set_r_symbolnum(symbolNum);
1224 reloc1.set_r_pcrel(false);
1225 reloc1.set_r_length(2);
1226 reloc1.set_r_extern(external);
1227 reloc1.set_r_type(X86_64_RELOC_UNSIGNED);
1228 relocs.push_back(reloc1);
1229 }
1230 break;
1231 case ld::Fixup::kindStoreTargetAddressX86PCRel32TLVLoad:
1232 reloc1.set_r_address(address);
1233 reloc1.set_r_symbolnum(symbolNum);
1234 reloc1.set_r_pcrel(true);
1235 reloc1.set_r_length(2);
1236 reloc1.set_r_extern(external);
1237 reloc1.set_r_type(X86_64_RELOC_TLV);
1238 relocs.push_back(reloc1);
1239 break;
1240 default:
1241 assert(0 && "need to handle -r reloc");
1242
1243 }
1244
1245 }
1246
1247
1248
1249 template <typename A>
1250 uint32_t SectionRelocationsAtom<A>::sectSymNum(bool external, const ld::Atom* target)
1251 {
1252 if ( target->definition() == ld::Atom::definitionAbsolute )
1253 return R_ABS;
1254 if ( external )
1255 return this->symbolIndex(target); // in external relocations, r_symbolnum field is symbol index
1256 else
1257 return target->machoSection(); // in non-extern relocations, r_symbolnum is mach-o section index of target
1258 }
1259
1260 template <>
1261 void SectionRelocationsAtom<x86>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1262 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1263 {
1264 macho_relocation_info<P> reloc1;
1265 macho_relocation_info<P> reloc2;
1266 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1267 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1268 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1269 bool external = entry.toTargetUsesExternalReloc;
1270 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1271 bool fromExternal = false;
1272 uint32_t fromSymbolNum = 0;
1273 if ( entry.fromTarget != NULL ) {
1274 fromExternal = entry.fromTargetUsesExternalReloc;
1275 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1276 }
1277
1278 switch ( entry.kind ) {
1279 case ld::Fixup::kindStoreX86PCRel32:
1280 case ld::Fixup::kindStoreX86BranchPCRel32:
1281 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
1282 case ld::Fixup::kindStoreX86DtraceCallSiteNop:
1283 case ld::Fixup::kindStoreX86DtraceIsEnableSiteClear:
1284 if ( !external && (entry.toAddend != 0) ) {
1285 // use scattered reloc is target offset is non-zero
1286 sreloc1->set_r_scattered(true);
1287 sreloc1->set_r_pcrel(true);
1288 sreloc1->set_r_length(2);
1289 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1290 sreloc1->set_r_address(address);
1291 sreloc1->set_r_value(entry.toTarget->finalAddress());
1292 }
1293 else {
1294 reloc1.set_r_address(address);
1295 reloc1.set_r_symbolnum(symbolNum);
1296 reloc1.set_r_pcrel(true);
1297 reloc1.set_r_length(2);
1298 reloc1.set_r_extern(external);
1299 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1300 }
1301 relocs.push_back(reloc1);
1302 break;
1303
1304 case ld::Fixup::kindStoreX86BranchPCRel8:
1305 if ( !external && (entry.toAddend != 0) ) {
1306 // use scattered reloc is target offset is non-zero
1307 sreloc1->set_r_scattered(true);
1308 sreloc1->set_r_pcrel(true);
1309 sreloc1->set_r_length(0);
1310 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1311 sreloc1->set_r_address(address);
1312 sreloc1->set_r_value(entry.toTarget->finalAddress());
1313 }
1314 else {
1315 reloc1.set_r_address(address);
1316 reloc1.set_r_symbolnum(symbolNum);
1317 reloc1.set_r_pcrel(true);
1318 reloc1.set_r_length(0);
1319 reloc1.set_r_extern(external);
1320 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1321 }
1322 relocs.push_back(reloc1);
1323 break;
1324
1325 case ld::Fixup::kindStoreX86PCRel16:
1326 if ( !external && (entry.toAddend != 0) ) {
1327 // use scattered reloc is target offset is non-zero
1328 sreloc1->set_r_scattered(true);
1329 sreloc1->set_r_pcrel(true);
1330 sreloc1->set_r_length(1);
1331 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1332 sreloc1->set_r_address(address);
1333 sreloc1->set_r_value(entry.toTarget->finalAddress());
1334 }
1335 else {
1336 reloc1.set_r_address(address);
1337 reloc1.set_r_symbolnum(symbolNum);
1338 reloc1.set_r_pcrel(true);
1339 reloc1.set_r_length(1);
1340 reloc1.set_r_extern(external);
1341 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1342 }
1343 relocs.push_back(reloc1);
1344 break;
1345
1346 case ld::Fixup::kindStoreLittleEndian32:
1347 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1348 if ( entry.fromTarget != NULL ) {
1349 // this is a pointer-diff
1350 sreloc1->set_r_scattered(true);
1351 sreloc1->set_r_pcrel(false);
1352 sreloc1->set_r_length(2);
1353 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1354 sreloc1->set_r_type(GENERIC_RELOC_LOCAL_SECTDIFF);
1355 else
1356 sreloc1->set_r_type(GENERIC_RELOC_SECTDIFF);
1357 sreloc1->set_r_address(address);
1358 if ( entry.toTarget == entry.inAtom ) {
1359 if ( entry.toAddend > entry.toTarget->size() )
1360 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1361 else
1362 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1363 }
1364 else
1365 sreloc1->set_r_value(entry.toTarget->finalAddress());
1366 sreloc2->set_r_scattered(true);
1367 sreloc2->set_r_pcrel(false);
1368 sreloc2->set_r_length(2);
1369 sreloc2->set_r_type(GENERIC_RELOC_PAIR);
1370 sreloc2->set_r_address(0);
1371 if ( entry.fromTarget == entry.inAtom ) {
1372 if ( entry.fromAddend > entry.fromTarget->size() )
1373 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.offsetInAtom);
1374 else
1375 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1376 }
1377 else
1378 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1379 relocs.push_back(reloc1);
1380 relocs.push_back(reloc2);
1381 }
1382 else {
1383 // regular pointer
1384 if ( !external && (entry.toAddend != 0) && (entry.toTarget->symbolTableInclusion() != ld::Atom::symbolTableNotIn) ) {
1385 // use scattered reloc if target offset is non-zero into named atom (5658046)
1386 sreloc1->set_r_scattered(true);
1387 sreloc1->set_r_pcrel(false);
1388 sreloc1->set_r_length(2);
1389 sreloc1->set_r_type(GENERIC_RELOC_VANILLA);
1390 sreloc1->set_r_address(address);
1391 sreloc1->set_r_value(entry.toTarget->finalAddress());
1392 }
1393 else {
1394 reloc1.set_r_address(address);
1395 reloc1.set_r_symbolnum(symbolNum);
1396 reloc1.set_r_pcrel(false);
1397 reloc1.set_r_length(2);
1398 reloc1.set_r_extern(external);
1399 reloc1.set_r_type(GENERIC_RELOC_VANILLA);
1400 }
1401 relocs.push_back(reloc1);
1402 }
1403 break;
1404 case ld::Fixup::kindStoreX86PCRel32TLVLoad:
1405 case ld::Fixup::kindStoreX86Abs32TLVLoad:
1406 case ld::Fixup::kindStoreTargetAddressX86Abs32TLVLoad:
1407 reloc1.set_r_address(address);
1408 reloc1.set_r_symbolnum(symbolNum);
1409 reloc1.set_r_pcrel(entry.kind == ld::Fixup::kindStoreX86PCRel32TLVLoad);
1410 reloc1.set_r_length(2);
1411 reloc1.set_r_extern(external);
1412 reloc1.set_r_type(GENERIC_RLEOC_TLV);
1413 relocs.push_back(reloc1);
1414 break;
1415 default:
1416 assert(0 && "need to handle -r reloc");
1417
1418 }
1419 }
1420
1421
1422
1423 #if SUPPORT_ARCH_arm_any
1424 template <>
1425 void SectionRelocationsAtom<arm>::encodeSectionReloc(ld::Internal::FinalSection* sect,
1426 const Entry& entry, std::vector<macho_relocation_info<P> >& relocs)
1427 {
1428 macho_relocation_info<P> reloc1;
1429 macho_relocation_info<P> reloc2;
1430 macho_scattered_relocation_info<P>* sreloc1 = (macho_scattered_relocation_info<P>*)&reloc1;
1431 macho_scattered_relocation_info<P>* sreloc2 = (macho_scattered_relocation_info<P>*)&reloc2;
1432 uint64_t address = entry.inAtom->finalAddress()+entry.offsetInAtom - sect->address;
1433 bool external = entry.toTargetUsesExternalReloc;
1434 uint32_t symbolNum = sectSymNum(external, entry.toTarget);
1435 bool fromExternal = false;
1436 uint32_t fromSymbolNum = 0;
1437 if ( entry.fromTarget != NULL ) {
1438 fromExternal = entry.fromTargetUsesExternalReloc;
1439 fromSymbolNum = sectSymNum(fromExternal, entry.fromTarget);
1440 }
1441
1442
1443 switch ( entry.kind ) {
1444 case ld::Fixup::kindStoreTargetAddressARMBranch24:
1445 case ld::Fixup::kindStoreARMBranch24:
1446 case ld::Fixup::kindStoreARMDtraceCallSiteNop:
1447 case ld::Fixup::kindStoreARMDtraceIsEnableSiteClear:
1448 if ( !external && (entry.toAddend != 0) ) {
1449 // use scattered reloc is target offset is non-zero
1450 sreloc1->set_r_scattered(true);
1451 sreloc1->set_r_pcrel(true);
1452 sreloc1->set_r_length(2);
1453 sreloc1->set_r_type(ARM_RELOC_BR24);
1454 sreloc1->set_r_address(address);
1455 sreloc1->set_r_value(entry.toTarget->finalAddress());
1456 }
1457 else {
1458 reloc1.set_r_address(address);
1459 reloc1.set_r_symbolnum(symbolNum);
1460 reloc1.set_r_pcrel(true);
1461 reloc1.set_r_length(2);
1462 reloc1.set_r_extern(external);
1463 reloc1.set_r_type(ARM_RELOC_BR24);
1464 }
1465 relocs.push_back(reloc1);
1466 break;
1467
1468 case ld::Fixup::kindStoreTargetAddressThumbBranch22:
1469 case ld::Fixup::kindStoreThumbBranch22:
1470 case ld::Fixup::kindStoreThumbDtraceCallSiteNop:
1471 case ld::Fixup::kindStoreThumbDtraceIsEnableSiteClear:
1472 if ( !external && (entry.toAddend != 0) ) {
1473 // use scattered reloc is target offset is non-zero
1474 sreloc1->set_r_scattered(true);
1475 sreloc1->set_r_pcrel(true);
1476 sreloc1->set_r_length(2);
1477 sreloc1->set_r_type(ARM_THUMB_RELOC_BR22);
1478 sreloc1->set_r_address(address);
1479 sreloc1->set_r_value(entry.toTarget->finalAddress());
1480 }
1481 else {
1482 reloc1.set_r_address(address);
1483 reloc1.set_r_symbolnum(symbolNum);
1484 reloc1.set_r_pcrel(true);
1485 reloc1.set_r_length(2);
1486 reloc1.set_r_extern(external);
1487 reloc1.set_r_type(ARM_THUMB_RELOC_BR22);
1488 }
1489 relocs.push_back(reloc1);
1490 break;
1491
1492 case ld::Fixup::kindStoreLittleEndian32:
1493 case ld::Fixup::kindStoreTargetAddressLittleEndian32:
1494 if ( entry.fromTarget != NULL ) {
1495 // this is a pointer-diff
1496 sreloc1->set_r_scattered(true);
1497 sreloc1->set_r_pcrel(false);
1498 sreloc1->set_r_length(2);
1499 if ( entry.toTarget->scope() == ld::Atom::scopeTranslationUnit )
1500 sreloc1->set_r_type(ARM_RELOC_LOCAL_SECTDIFF);
1501 else
1502 sreloc1->set_r_type(ARM_RELOC_SECTDIFF);
1503 sreloc1->set_r_address(address);
1504 if ( entry.toTarget == entry.inAtom ) {
1505 if ( entry.toAddend > entry.toTarget->size() )
1506 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.offsetInAtom);
1507 else
1508 sreloc1->set_r_value(entry.toTarget->finalAddress()+entry.toAddend);
1509 }
1510 else {
1511 sreloc1->set_r_value(entry.toTarget->finalAddress());
1512 }
1513 sreloc2->set_r_scattered(true);
1514 sreloc2->set_r_pcrel(false);
1515 sreloc2->set_r_length(2);
1516 sreloc2->set_r_type(ARM_RELOC_PAIR);
1517 sreloc2->set_r_address(0);
1518 if ( entry.fromTarget == entry.inAtom ) {
1519 //unsigned int pcBaseOffset = entry.inAtom->isThumb() ? 4 : 8;
1520 //if ( entry.fromAddend > pcBaseOffset )
1521 // sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend-pcBaseOffset);
1522 //else
1523 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1524 }
1525 else {
1526 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1527 }
1528 relocs.push_back(reloc1);
1529 relocs.push_back(reloc2);
1530 }
1531 else {
1532 // regular pointer
1533 if ( !external && (entry.toAddend != 0) ) {
1534 // use scattered reloc is target offset is non-zero
1535 sreloc1->set_r_scattered(true);
1536 sreloc1->set_r_pcrel(false);
1537 sreloc1->set_r_length(2);
1538 sreloc1->set_r_type(ARM_RELOC_VANILLA);
1539 sreloc1->set_r_address(address);
1540 sreloc1->set_r_value(entry.toTarget->finalAddress());
1541 }
1542 else {
1543 reloc1.set_r_address(address);
1544 reloc1.set_r_symbolnum(symbolNum);
1545 reloc1.set_r_pcrel(false);
1546 reloc1.set_r_length(2);
1547 reloc1.set_r_extern(external);
1548 reloc1.set_r_type(ARM_RELOC_VANILLA);
1549 }
1550 relocs.push_back(reloc1);
1551 }
1552 break;
1553
1554 case ld::Fixup::kindStoreARMLow16:
1555 case ld::Fixup::kindStoreARMHigh16:
1556 case ld::Fixup::kindStoreThumbLow16:
1557 case ld::Fixup::kindStoreThumbHigh16:
1558 {
1559 int len = 0;
1560 uint32_t otherHalf = 0;
1561 uint32_t value = entry.toTarget->finalAddress()+entry.toAddend;
1562 if ( entry.fromTarget != NULL )
1563 value -= (entry.fromTarget->finalAddress()+entry.fromAddend);
1564 switch ( entry.kind ) {
1565 case ld::Fixup::kindStoreARMLow16:
1566 len = 0;
1567 otherHalf = value >> 16;
1568 break;
1569 case ld::Fixup::kindStoreARMHigh16:
1570 len = 1;
1571 otherHalf = value & 0xFFFF;
1572 break;
1573 case ld::Fixup::kindStoreThumbLow16:
1574 len = 2;
1575 otherHalf = value >> 16;
1576 break;
1577 case ld::Fixup::kindStoreThumbHigh16:
1578 len = 3;
1579 otherHalf = value & 0xFFFF;
1580 break;
1581 default:
1582 break;
1583 }
1584 if ( entry.fromTarget != NULL ) {
1585 // this is a sect-diff
1586 sreloc1->set_r_scattered(true);
1587 sreloc1->set_r_pcrel(false);
1588 sreloc1->set_r_length(len);
1589 sreloc1->set_r_type(ARM_RELOC_HALF_SECTDIFF);
1590 sreloc1->set_r_address(address);
1591 sreloc1->set_r_value(entry.toTarget->finalAddress());
1592 sreloc2->set_r_scattered(true);
1593 sreloc2->set_r_pcrel(false);
1594 sreloc2->set_r_length(len);
1595 sreloc2->set_r_type(ARM_RELOC_PAIR);
1596 sreloc2->set_r_address(otherHalf);
1597 if ( entry.fromTarget == entry.inAtom )
1598 sreloc2->set_r_value(entry.fromTarget->finalAddress()+entry.fromAddend);
1599 else
1600 sreloc2->set_r_value(entry.fromTarget->finalAddress());
1601 relocs.push_back(reloc1);
1602 relocs.push_back(reloc2);
1603 }
1604 else {
1605 // this is absolute address
1606 if ( !external && (entry.toAddend != 0) ) {
1607 // use scattered reloc is target offset is non-zero
1608 sreloc1->set_r_scattered(true);
1609 sreloc1->set_r_pcrel(false);
1610 sreloc1->set_r_length(len);
1611 sreloc1->set_r_type(ARM_RELOC_HALF);
1612 sreloc1->set_r_address(address);
1613 sreloc1->set_r_value(entry.toTarget->finalAddress());
1614 reloc2.set_r_address(otherHalf);
1615 reloc2.set_r_symbolnum(0);
1616 reloc2.set_r_pcrel(false);
1617 reloc2.set_r_length(len);
1618 reloc2.set_r_extern(false);
1619 reloc2.set_r_type(ARM_RELOC_PAIR);
1620 relocs.push_back(reloc1);
1621 relocs.push_back(reloc2);
1622 }
1623 else {
1624 reloc1.set_r_address(address);
1625 reloc1.set_r_symbolnum(symbolNum);
1626 reloc1.set_r_pcrel(false);
1627 reloc1.set_r_length(len);
1628 reloc1.set_r_extern(external);
1629 reloc1.set_r_type(ARM_RELOC_HALF);
1630 reloc2.set_r_address(otherHalf); // other half
1631 reloc2.set_r_symbolnum(0);
1632 reloc2.set_r_pcrel(false);
1633 reloc2.set_r_length(len);
1634 reloc2.set_r_extern(false);
1635 reloc2.set_r_type(ARM_RELOC_PAIR);
1636 relocs.push_back(reloc1);
1637 relocs.push_back(reloc2);
1638 }
1639 }
1640 }
1641 break;
1642
1643 default:
1644 assert(0 && "need to handle -r reloc");
1645
1646 }
1647 }
1648 #endif
1649
1650
1651
1652 template <typename A>
1653 void SectionRelocationsAtom<A>::addSectionReloc(ld::Internal::FinalSection* sect, ld::Fixup::Kind kind,
1654 const ld::Atom* inAtom, uint32_t offsetInAtom,
1655 bool toTargetUsesExternalReloc ,bool fromTargetExternalReloc,
1656 const ld::Atom* toTarget, uint64_t toAddend,
1657 const ld::Atom* fromTarget, uint64_t fromAddend)
1658 {
1659 Entry entry;
1660 entry.kind = kind;
1661 entry.toTargetUsesExternalReloc = toTargetUsesExternalReloc;
1662 entry.fromTargetUsesExternalReloc = fromTargetExternalReloc;
1663 entry.inAtom = inAtom;
1664 entry.offsetInAtom = offsetInAtom;
1665 entry.toTarget = toTarget;
1666 entry.toAddend = toAddend;
1667 entry.fromTarget = fromTarget;
1668 entry.fromAddend = fromAddend;
1669
1670 static ld::Internal::FinalSection* lastSection = NULL;
1671 static SectionAndEntries* lastSectionAndEntries = NULL;
1672
1673 if ( sect != lastSection ) {
1674 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1675 if ( sect == it->sect ) {
1676 lastSection = sect;
1677 lastSectionAndEntries = &*it;
1678 break;
1679 }
1680 }
1681 if ( sect != lastSection ) {
1682 SectionAndEntries tmp;
1683 tmp.sect = sect;
1684 _entriesBySection.push_back(tmp);
1685 lastSection = sect;
1686 lastSectionAndEntries = &_entriesBySection.back();
1687 }
1688 }
1689 lastSectionAndEntries->entries.push_back(entry);
1690 }
1691
1692 template <typename A>
1693 void SectionRelocationsAtom<A>::encode()
1694 {
1695 // convert each Entry record to one or two reloc records
1696 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1697 SectionAndEntries& se = *it;
1698 for(typename std::vector<Entry>::iterator eit=se.entries.begin(); eit != se.entries.end(); ++eit) {
1699 encodeSectionReloc(se.sect, *eit, se.relocs);
1700 }
1701 }
1702
1703 // update sections with start and count or relocs
1704 uint32_t index = 0;
1705 for(typename std::vector<SectionAndEntries>::iterator it=_entriesBySection.begin(); it != _entriesBySection.end(); ++it) {
1706 SectionAndEntries& se = *it;
1707 se.sect->relocStart = index;
1708 se.sect->relocCount = se.relocs.size();
1709 index += se.sect->relocCount;
1710 }
1711
1712 }
1713
1714
1715
1716 template <typename A>
1717 class IndirectSymbolTableAtom : public ClassicLinkEditAtom
1718 {
1719 public:
1720 IndirectSymbolTableAtom(const Options& opts, ld::Internal& state, OutputFile& writer)
1721 : ClassicLinkEditAtom(opts, state, writer, _s_section, sizeof(pint_t)) { }
1722
1723 // overrides of ld::Atom
1724 virtual const char* name() const { return "indirect symbol table"; }
1725 virtual uint64_t size() const;
1726 virtual void copyRawContent(uint8_t buffer[]) const;
1727 // overrides of ClassicLinkEditAtom
1728 virtual void encode();
1729
1730 private:
1731 typedef typename A::P P;
1732 typedef typename A::P::E E;
1733 typedef typename A::P::uint_t pint_t;
1734
1735 void encodeStubSection(ld::Internal::FinalSection* sect);
1736 void encodeLazyPointerSection(ld::Internal::FinalSection* sect);
1737 void encodeNonLazyPointerSection(ld::Internal::FinalSection* sect);
1738 uint32_t symIndexOfStubAtom(const ld::Atom*);
1739 uint32_t symIndexOfLazyPointerAtom(const ld::Atom*);
1740 uint32_t symIndexOfNonLazyPointerAtom(const ld::Atom*);
1741 uint32_t symbolIndex(const ld::Atom*);
1742 bool kextBundlesDontHaveIndirectSymbolTable();
1743
1744
1745 std::vector<uint32_t> _entries;
1746
1747 static ld::Section _s_section;
1748 };
1749
1750 template <typename A>
1751 ld::Section IndirectSymbolTableAtom<A>::_s_section("__LINKEDIT", "__ind_sym_tab", ld::Section::typeLinkEdit, true);
1752
1753
1754
1755
1756 template <typename A>
1757 uint32_t IndirectSymbolTableAtom<A>::symbolIndex(const ld::Atom* atom)
1758 {
1759 std::map<const ld::Atom*, uint32_t>::iterator pos = this->_writer._atomToSymbolIndex.find(atom);
1760 if ( pos != this->_writer._atomToSymbolIndex.end() )
1761 return pos->second;
1762 //fprintf(stderr, "_atomToSymbolIndex content:\n");
1763 //for(std::map<const ld::Atom*, uint32_t>::iterator it = this->_writer._atomToSymbolIndex.begin(); it != this->_writer._atomToSymbolIndex.end(); ++it) {
1764 // fprintf(stderr, "%p(%s) => %d\n", it->first, it->first->name(), it->second);
1765 //}
1766 throwf("internal error: atom not found in symbolIndex(%s)", atom->name());
1767 }
1768
1769 template <typename A>
1770 uint32_t IndirectSymbolTableAtom<A>::symIndexOfStubAtom(const ld::Atom* stubAtom)
1771 {
1772 for (ld::Fixup::iterator fit = stubAtom->fixupsBegin(); fit != stubAtom->fixupsEnd(); ++fit) {
1773 if ( fit->binding == ld::Fixup::bindingDirectlyBound ) {
1774 assert((fit->u.target->contentType() == ld::Atom::typeLazyPointer)
1775 || (fit->u.target->contentType() == ld::Atom::typeLazyDylibPointer));
1776 return symIndexOfLazyPointerAtom(fit->u.target);
1777 }
1778 }
1779 throw "internal error: stub missing fixup to lazy pointer";
1780 }
1781
1782
1783 template <typename A>
1784 uint32_t IndirectSymbolTableAtom<A>::symIndexOfLazyPointerAtom(const ld::Atom* lpAtom)
1785 {
1786 for (ld::Fixup::iterator fit = lpAtom->fixupsBegin(); fit != lpAtom->fixupsEnd(); ++fit) {
1787 if ( fit->kind == ld::Fixup::kindLazyTarget ) {
1788 assert(fit->binding == ld::Fixup::bindingDirectlyBound);
1789 return symbolIndex(fit->u.target);
1790 }
1791 }
1792 throw "internal error: lazy pointer missing fixupLazyTarget fixup";
1793 }
1794
1795 template <typename A>
1796 uint32_t IndirectSymbolTableAtom<A>::symIndexOfNonLazyPointerAtom(const ld::Atom* nlpAtom)
1797 {
1798 //fprintf(stderr, "symIndexOfNonLazyPointerAtom(%p) %s\n", nlpAtom, nlpAtom->name());
1799 for (ld::Fixup::iterator fit = nlpAtom->fixupsBegin(); fit != nlpAtom->fixupsEnd(); ++fit) {
1800 // non-lazy-pointer to a stripped symbol => no symbol index
1801 if ( fit->clusterSize != ld::Fixup::k1of1 )
1802 return INDIRECT_SYMBOL_LOCAL;
1803 const ld::Atom* target;
1804 switch ( fit->binding ) {
1805 case ld::Fixup::bindingDirectlyBound:
1806 target = fit->u.target;
1807 break;
1808 case ld::Fixup::bindingsIndirectlyBound:
1809 target = _state.indirectBindingTable[fit->u.bindingIndex];
1810 break;
1811 default:
1812 throw "internal error: unexpected non-lazy pointer binding";
1813 }
1814 bool targetIsGlobal = (target->scope() == ld::Atom::scopeGlobal);
1815 switch ( target->definition() ) {
1816 case ld::Atom::definitionRegular:
1817 if ( targetIsGlobal ) {
1818 if ( _options.outputKind() == Options::kObjectFile ) {
1819 // nlpointer to global symbol uses indirect symbol table in .o files
1820 return symbolIndex(target);
1821 }
1822 else if ( target->combine() == ld::Atom::combineByName ) {
1823 // dyld needs to bind nlpointer to global weak def
1824 return symbolIndex(target);
1825 }
1826 else if ( _options.nameSpace() != Options::kTwoLevelNameSpace ) {
1827 // dyld needs to bind nlpointer to global def linked for flat namespace
1828 return symbolIndex(target);
1829 }
1830 }
1831 break;
1832 case ld::Atom::definitionTentative:
1833 case ld::Atom::definitionAbsolute:
1834 if ( _options.outputKind() == Options::kObjectFile ) {
1835 // tentative def in .o file always uses symbol index
1836 return symbolIndex(target);
1837 }
1838 // dyld needs to bind nlpointer to global def linked for flat namespace
1839 if ( targetIsGlobal && _options.nameSpace() != Options::kTwoLevelNameSpace )
1840 return symbolIndex(target);
1841 break;
1842 case ld::Atom::definitionProxy:
1843 // dyld needs to bind nlpointer to something in another dylib
1844 {
1845 const ld::dylib::File* dylib = dynamic_cast<const ld::dylib::File*>(target->file());
1846 if ( (dylib != NULL) && dylib->willBeLazyLoadedDylib() )
1847 throwf("illegal data reference to %s in lazy loaded dylib %s", target->name(), dylib->path());
1848 }
1849 return symbolIndex(target);
1850 }
1851 }
1852 if ( nlpAtom->fixupsBegin() == nlpAtom->fixupsEnd() ) {
1853 // no fixups means this is the ImageLoader cache slot
1854 return INDIRECT_SYMBOL_ABS;
1855 }
1856
1857 // The magic index INDIRECT_SYMBOL_LOCAL tells dyld it should does not need to bind
1858 // this non-lazy pointer.
1859 return INDIRECT_SYMBOL_LOCAL;
1860 }
1861
1862
1863
1864 template <typename A>
1865 void IndirectSymbolTableAtom<A>::encodeStubSection(ld::Internal::FinalSection* sect)
1866 {
1867 sect->indirectSymTabStartIndex = _entries.size();
1868 sect->indirectSymTabElementSize = sect->atoms[0]->size();
1869 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1870 _entries.push_back(symIndexOfStubAtom(*ait));
1871 }
1872 }
1873
1874 template <typename A>
1875 void IndirectSymbolTableAtom<A>::encodeLazyPointerSection(ld::Internal::FinalSection* sect)
1876 {
1877 sect->indirectSymTabStartIndex = _entries.size();
1878 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1879 _entries.push_back(symIndexOfLazyPointerAtom(*ait));
1880 }
1881 }
1882
1883 template <typename A>
1884 void IndirectSymbolTableAtom<A>::encodeNonLazyPointerSection(ld::Internal::FinalSection* sect)
1885 {
1886 sect->indirectSymTabStartIndex = _entries.size();
1887 for (std::vector<const ld::Atom*>::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
1888 _entries.push_back(symIndexOfNonLazyPointerAtom(*ait));
1889 }
1890 }
1891
1892 template <typename A>
1893 bool IndirectSymbolTableAtom<A>::kextBundlesDontHaveIndirectSymbolTable()
1894 {
1895 return true;
1896 }
1897
1898 template <typename A>
1899 void IndirectSymbolTableAtom<A>::encode()
1900 {
1901 // static executables should not have an indirect symbol table, unless PIE
1902 if ( (this->_options.outputKind() == Options::kStaticExecutable) && !_options.positionIndependentExecutable() )
1903 return;
1904
1905 // x86_64 kext bundles should not have an indirect symbol table
1906 if ( (this->_options.outputKind() == Options::kKextBundle) && kextBundlesDontHaveIndirectSymbolTable() )
1907 return;
1908
1909 // slidable static executables (-static -pie) should not have an indirect symbol table
1910 if ( (this->_options.outputKind() == Options::kStaticExecutable) && this->_options.positionIndependentExecutable() )
1911 return;
1912
1913 // find all special sections that need a range of the indirect symbol table section
1914 for (std::vector<ld::Internal::FinalSection*>::iterator sit = this->_state.sections.begin(); sit != this->_state.sections.end(); ++sit) {
1915 ld::Internal::FinalSection* sect = *sit;
1916 switch ( sect->type() ) {
1917 case ld::Section::typeStub:
1918 case ld::Section::typeStubClose:
1919 this->encodeStubSection(sect);
1920 break;
1921 case ld::Section::typeLazyPointerClose:
1922 case ld::Section::typeLazyPointer:
1923 case ld::Section::typeLazyDylibPointer:
1924 this->encodeLazyPointerSection(sect);
1925 break;
1926 case ld::Section::typeNonLazyPointer:
1927 this->encodeNonLazyPointerSection(sect);
1928 break;
1929 default:
1930 break;
1931 }
1932 }
1933 }
1934
1935 template <typename A>
1936 uint64_t IndirectSymbolTableAtom<A>::size() const
1937 {
1938 return _entries.size() * sizeof(uint32_t);
1939 }
1940
1941 template <typename A>
1942 void IndirectSymbolTableAtom<A>::copyRawContent(uint8_t buffer[]) const
1943 {
1944 uint32_t* array = (uint32_t*)buffer;
1945 for(unsigned long i=0; i < _entries.size(); ++i) {
1946 E::set32(array[i], _entries[i]);
1947 }
1948 }
1949
1950
1951
1952
1953
1954
1955
1956
1957 } // namespace tool
1958 } // namespace ld
1959
1960 #endif // __LINKEDIT_CLASSIC_HPP__