]> git.saurik.com Git - apple/ld64.git/blame - src/ld/passes/huge.cpp
ld64-274.2.tar.gz
[apple/ld64.git] / src / ld / passes / huge.cpp
CommitLineData
a645023d
A
1/* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2009 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25
26#include <stdint.h>
27#include <math.h>
28#include <unistd.h>
29#include <dlfcn.h>
30#include <mach/machine.h>
31
32#include <vector>
eaf282aa 33#include <map>
a645023d
A
34
35#include "ld.hpp"
36#include "huge.h"
37
38namespace ld {
39namespace passes {
40namespace huge {
41
42class NullAtom
43{
44public:
45 bool operator()(const ld::Atom* atom) const {
46 return (atom == NULL);
47 }
48};
49
0a8dc3df
A
50class DataPadAtom : public ld::Atom {
51public:
52 DataPadAtom(ld::Internal& state)
53 : ld::Atom(_s_section, ld::Atom::definitionRegular, ld::Atom::combineNever,
54 ld::Atom::scopeTranslationUnit, ld::Atom::typeUnclassified,
55 symbolTableNotIn, false, false, false, ld::Atom::Alignment(3))
56 { state.addAtom(*this); }
57
58
59 virtual const ld::File* file() const { return NULL; }
60 virtual const char* name() const { return "padding"; }
61 virtual uint64_t size() const { return 8; }
62 virtual uint64_t objectAddress() const { return 0; }
63 virtual void copyRawContent(uint8_t buffer[]) const { }
64
65protected:
66 virtual ~DataPadAtom() {}
67
68 static ld::Section _s_section;
69};
70
71ld::Section DataPadAtom::_s_section("__DATA", "__data", ld::Section::typeUnclassified);
72
73
a645023d
A
74void doPass(const Options& opts, ld::Internal& state)
75{
76 const bool log = false;
0a8dc3df
A
77
78 // <rdar://problem/26015603> add __data section if __DATA segment was gutted by dirty data removal
79 if ( (opts.outputKind() == Options::kDynamicLibrary) && opts.useDataConstSegment() && opts.hasDataSymbolMoves() ) {
80 uint64_t dataAtomsSize = 0;
81 bool foundSegmentDATA_DIRTY = false;
82 for (ld::Internal::FinalSection* sect : state.sections) {
83 if ( strcmp(sect->segmentName(), "__DATA") == 0 ) {
84 for (const ld::Atom* atom : sect->atoms) {
85 dataAtomsSize += atom->size();
86 }
87 }
88 else if ( strcmp(sect->segmentName(), "__DATA_DIRTY") == 0 ) {
89 foundSegmentDATA_DIRTY = true;
90 }
91 }
92 if ( foundSegmentDATA_DIRTY && (dataAtomsSize == 0) ) {
93 new DataPadAtom(state);
94 }
95 }
96
a645023d
A
97 // only make make __huge section in final linked images
98 if ( opts.outputKind() == Options::kObjectFile )
99 return;
100
101 // only make make __huge section for x86_64
102 if ( opts.architecture() != CPU_TYPE_X86_64 )
103 return;
104
105 // only needed if some (non-linkedit) atoms have an addresss >2GB from base address
106 state.usingHugeSections = false;
107 uint64_t address = 0;
108 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
109 ld::Internal::FinalSection* sect = *sit;
110 if ( sect->type() == ld::Section::typePageZero )
111 continue;
112 if ( sect->type() == ld::Section::typeStack )
113 continue;
114 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
115 const ld::Atom* atom = *ait;
116 if ( (address > 0x7FFFFFFFLL) && !sect->isSectionHidden() ) {
117 state.usingHugeSections = true;
118 if (log) fprintf(stderr, "atom: %s is >2GB (0x%09llX), so enabling huge mode\n", atom->name(), address);
119 break;
120 }
121 address += atom->size();
122 }
123 if ( state.usingHugeSections )
124 break;
125 }
126 if ( !state.usingHugeSections )
127 return;
128
129 // move all zero fill atoms that >1MB in size to a new __huge section
d425e388 130 ld::Internal::FinalSection* hugeSection = state.getFinalSection(*new ld::Section("__DATA", "__huge", ld::Section::typeZeroFill));
a645023d
A
131 for (std::vector<ld::Internal::FinalSection*>::iterator sit=state.sections.begin(); sit != state.sections.end(); ++sit) {
132 ld::Internal::FinalSection* sect = *sit;
133 if ( sect == hugeSection )
134 continue;
135 if ( sect->type() == ld::Section::typeZeroFill ) {
136 bool movedSome = false;
137 for (std::vector<const ld::Atom*>::iterator ait=sect->atoms.begin(); ait != sect->atoms.end(); ++ait) {
138 const ld::Atom* atom = *ait;
139 if ( atom->size() > 1024*1024 ) {
140 hugeSection->atoms.push_back(atom);
eaf282aa 141 state.atomToSection[atom] = hugeSection;
a645023d
A
142 if (log) fprintf(stderr, "moved to __huge: %s, size=%llu\n", atom->name(), atom->size());
143 *ait = NULL; // change atom to NULL for later bulk removal
144 movedSome = true;
145 }
146 }
147 if ( movedSome )
148 sect->atoms.erase(std::remove_if(sect->atoms.begin(), sect->atoms.end(), NullAtom()), sect->atoms.end());
149 }
150 }
151
152
153}
154
155
156} // namespace huge
157} // namespace passes
158} // namespace ld