]> git.saurik.com Git - apple/ld64.git/blob - src/ld/passes/code_dedup.cpp
ld64-278.4.tar.gz
[apple/ld64.git] / src / ld / passes / code_dedup.cpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2015 Apple Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25
26 #include <stdint.h>
27 #include <math.h>
28 #include <unistd.h>
29 #include <dlfcn.h>
30 #include <mach/machine.h>
31
32 #include <vector>
33 #include <map>
34 #include <algorithm>
35 #include <unordered_map>
36
37 #include "ld.hpp"
38 #include "code_dedup.h"
39
40 namespace ld {
41 namespace passes {
42 namespace dedup {
43
44
45
46 class DeDupAliasAtom : public ld::Atom
47 {
48 public:
49 DeDupAliasAtom(const ld::Atom* dupOf, const ld::Atom* replacement) :
50 ld::Atom(dupOf->section(), ld::Atom::definitionRegular, ld::Atom::combineNever,
51 dupOf->scope(), dupOf->contentType(), ld::Atom::symbolTableIn,
52 false, false, true, dupOf->alignment()),
53 _dedupOf(dupOf),
54 _fixup(0, ld::Fixup::k1of1, ld::Fixup::kindNoneFollowOn, ld::Fixup::bindingDirectlyBound, replacement) {
55 if ( dupOf->autoHide() )
56 setAutoHide();
57 }
58
59 virtual const ld::File* file() const { return _dedupOf->file(); }
60 virtual const char* translationUnitSource() const
61 { return NULL; }
62 virtual const char* name() const { return _dedupOf->name(); }
63 virtual uint64_t size() const { return 0; }
64 virtual uint64_t objectAddress() const { return 0; }
65 virtual void copyRawContent(uint8_t buffer[]) const { }
66 virtual ld::Fixup::iterator fixupsBegin() const { return &((ld::Fixup*)&_fixup)[0]; }
67 virtual ld::Fixup::iterator fixupsEnd() const { return &((ld::Fixup*)&_fixup)[1]; }
68
69 private:
70 const ld::Atom* _dedupOf;
71 ld::Fixup _fixup;
72 };
73
74
75 namespace {
76 typedef std::unordered_map<const ld::Atom*, unsigned long> CachedHashes;
77
78 ld::Internal* sState = nullptr;
79 CachedHashes sSavedHashes;
80 unsigned long sHashCount = 0;
81 unsigned long sFixupCompareCount = 0;
82 };
83
84
85 // A helper for std::unordered_map<> that hashes the instructions of a function
86 struct atom_hashing {
87
88 static unsigned long hash(const ld::Atom* atom) {
89 auto pos = sSavedHashes.find(atom);
90 if ( pos != sSavedHashes.end() )
91 return pos->second;
92
93 const unsigned instructionBytes = atom->size();
94 const uint8_t* instructions = atom->rawContentPointer();
95 unsigned long hash = instructionBytes;
96 for (unsigned i=0; i < instructionBytes; ++i) {
97 hash = (hash * 33) + instructions[i];
98 }
99 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
100 const Atom* target = NULL;
101 switch ( fit->binding ) {
102 case ld::Fixup::bindingDirectlyBound:
103 target = fit->u.target;
104 break;
105 case ld::Fixup::bindingsIndirectlyBound:
106 target = sState->indirectBindingTable[fit->u.bindingIndex];
107 break;
108 default:
109 break;
110 }
111 // don't include calls to auto-hide functions in hash because they might be de-dup'ed
112 switch ( fit->kind ) {
113 #if SUPPORT_ARCH_arm64
114 case ld::Fixup::kindStoreTargetAddressARM64Branch26:
115 #endif
116 case ld::Fixup::kindStoreTargetAddressX86BranchPCRel32:
117 if ( target && target->autoHide() )
118 continue; // don't include
119 break;
120 default:
121 break;
122 }
123 if ( target != NULL ) {
124 const char* name = target->name();
125 if ( target->contentType() == ld::Atom::typeCString )
126 name = (const char*)target->rawContentPointer();
127 for (const char* s = name; *s != '\0'; ++s)
128 hash = (hash * 33) + *s;
129 }
130 }
131 ++sHashCount;
132 sSavedHashes[atom] = hash;
133 return hash;
134 }
135
136 unsigned long operator()(const ld::Atom* atom) const {
137 return hash(atom);
138 }
139 };
140
141
142 // A helper for std::unordered_map<> that compares functions
143 struct atom_equal {
144
145 struct BackChain {
146 BackChain* prev;
147 const Atom* inCallChain1;
148 const Atom* inCallChain2;
149
150 bool inCallChain(const Atom* target) {
151 for (BackChain* p = this; p->prev != NULL; p = p->prev) {
152 if ( (p->inCallChain1 == target) || (p->inCallChain2 == target) )
153 return true;
154 }
155 return false;
156 }
157 };
158
159 static bool sameFixups(const ld::Atom* atom1, const ld::Atom* atom2, BackChain& backChain) {
160 ++sFixupCompareCount;
161 //fprintf(stderr, "sameFixups(%s,%s)\n", atom1->name(), atom2->name());
162 Fixup::iterator f1 = atom1->fixupsBegin();
163 Fixup::iterator end1 = atom1->fixupsEnd();
164 Fixup::iterator f2 = atom2->fixupsBegin();
165 Fixup::iterator end2 = atom2->fixupsEnd();
166 // two atoms must have same number of fixups
167 if ( (end1 - f1) != (end2 - f2) )
168 return false;
169 // if no fixups, fixups are equal
170 if ( f1 == end1 )
171 return true;
172 // all fixups must be the same
173 do {
174 if ( f1->offsetInAtom != f2->offsetInAtom )
175 return false;
176 if ( f1->kind != f2->kind )
177 return false;
178 if ( f1->clusterSize != f2->clusterSize )
179 return false;
180 if ( f1->binding != f2->binding )
181 return false;
182 const Atom* target1 = NULL;
183 const Atom* target2 = NULL;
184 switch ( f1->binding ) {
185 case ld::Fixup::bindingDirectlyBound:
186 target1 = f1->u.target;
187 target2 = f2->u.target;
188 break;
189 case ld::Fixup::bindingsIndirectlyBound:
190 target1 = sState->indirectBindingTable[f1->u.bindingIndex];
191 target2 = sState->indirectBindingTable[f2->u.bindingIndex];
192 break;
193 case ld::Fixup::bindingNone:
194 break;
195 default:
196 return false;
197 }
198 if ( target1 != target2 ) {
199 // targets must match unless they are both calls to functions that will de-dup together
200 #if SUPPORT_ARCH_arm64
201 if ( (f1->kind != ld::Fixup::kindStoreTargetAddressARM64Branch26) && (f1->kind != ld::Fixup::kindStoreTargetAddressX86BranchPCRel32) )
202 #else
203 if ( f1->kind != ld::Fixup::kindStoreTargetAddressX86BranchPCRel32 )
204 #endif
205 return false;
206 if ( target1->section().type() != target2->section().type() )
207 return false;
208 if ( target1->section().type() != ld::Section::typeCode )
209 return false;
210 // to support co-recursive functions, don't recurse into equals() for targets already in the back chain
211 if ( !backChain.inCallChain(target1) || !backChain.inCallChain(target2) ) {
212 BackChain nextBackChain;
213 nextBackChain.prev = &backChain;
214 nextBackChain.inCallChain1 = target1;
215 nextBackChain.inCallChain2 = target2;
216 if ( !equal(target1, target2, nextBackChain) )
217 return false;
218 }
219 }
220
221 ++f1;
222 ++f2;
223 } while (f1 != end1);
224
225 return true;
226 }
227
228 static bool equal(const ld::Atom* atom1, const ld::Atom* atom2, BackChain& backChain) {
229 if ( atom1->size() != atom2->size() )
230 return false;
231 if ( atom_hashing::hash(atom1) != atom_hashing::hash(atom2) )
232 return false;
233 if ( memcmp(atom1->rawContentPointer(), atom2->rawContentPointer(), atom1->size()) != 0 )
234 return false;
235 bool result = sameFixups(atom1, atom2, backChain);
236 //fprintf(stderr, "sameFixups(%s,%s) = %d\n", atom1->name(), atom2->name(), result);
237 return result;
238 }
239
240 bool operator()(const ld::Atom* atom1, const ld::Atom* atom2) const {
241 BackChain backChain = { NULL, atom1, atom2 };
242 return equal(atom1, atom2, backChain);
243 }
244 };
245
246
247
248 void doPass(const Options& opts, ld::Internal& state)
249 {
250 const bool log = false;
251
252 // only de-duplicate in final linked images
253 if ( opts.outputKind() == Options::kObjectFile )
254 return;
255
256 // only de-duplicate for architectures that use relocations that don't store bits in instructions
257 if ( (opts.architecture() != CPU_TYPE_ARM64) && (opts.architecture() != CPU_TYPE_X86_64) )
258 return;
259
260 // support -no_deduplicate to suppress this pass
261 if ( ! opts.deduplicateFunctions() )
262 return;
263
264 const bool verbose = opts.verboseDeduplicate();
265
266 // find __text section
267 ld::Internal::FinalSection* textSection = NULL;
268 for (ld::Internal::FinalSection* sect : state.sections) {
269 if ( (sect->type() == ld::Section::typeCode) && (strcmp(sect->sectionName(), "__text") == 0) ) {
270 textSection = sect;
271 break;
272 }
273 }
274 if ( textSection == NULL )
275 return;
276
277 // build map of auto-hide functions and their duplicates
278 // the key for the map is always the first element in the value vector
279 // the key is always earlier in the atoms list then matching other atoms
280 sState = &state;
281 std::unordered_map<const ld::Atom*, std::vector<const ld::Atom*>, atom_hashing, atom_equal> map;
282 std::unordered_set<const ld::Atom*> masterAtoms;
283 for (const ld::Atom* atom : textSection->atoms) {
284 // ignore empty (alias) atoms
285 if ( atom->size() == 0 )
286 continue;
287 if ( atom->autoHide() )
288 map[atom].push_back(atom);
289 }
290
291 if ( log ) {
292 for (auto& entry : map) {
293 if ( entry.second.size() > 1 ) {
294 printf("Found following matching functions:\n");
295 for (const ld::Atom* atom : entry.second) {
296 printf(" %p %s\n", atom, atom->name());
297 }
298 }
299 }
300 fprintf(stderr, "duplicate sets count:\n");
301 for (auto& entry : map)
302 fprintf(stderr, " %p -> %lu\n", entry.first, entry.second.size());
303 }
304
305 // construct alias atoms to replace atoms found to be duplicates
306 unsigned atomsBeingComparedCount = 0;
307 uint64_t dedupSavings = 0;
308 std::vector<const ld::Atom*>& textAtoms = textSection->atoms;
309 std::unordered_map<const ld::Atom*, const ld::Atom*> replacementMap;
310 for (auto& entry : map) {
311 const ld::Atom* masterAtom = entry.first;
312 std::vector<const ld::Atom*>& dups = entry.second;
313 atomsBeingComparedCount += dups.size();
314 if ( dups.size() == 1 )
315 continue;
316 if ( verbose ) {
317 dedupSavings += ((dups.size() - 1) * masterAtom->size());
318 fprintf(stderr, "deduplicate the following %lu functions (%llu bytes apiece):\n", dups.size(), masterAtom->size());
319 }
320 for (const ld::Atom* dupAtom : dups) {
321 if ( verbose )
322 fprintf(stderr, " %s\n", dupAtom->name());
323 if ( dupAtom == masterAtom )
324 continue;
325 const ld::Atom* aliasAtom = new DeDupAliasAtom(dupAtom, masterAtom);
326 auto pos = std::find(textAtoms.begin(), textAtoms.end(), masterAtom);
327 if ( pos != textAtoms.end() ) {
328 textAtoms.insert(pos, aliasAtom);
329 state.atomToSection[aliasAtom] = textSection;
330 replacementMap[dupAtom] = aliasAtom;
331 }
332 }
333 }
334 if ( verbose ) {
335 fprintf(stderr, "deduplication saved %llu bytes of __text\n", dedupSavings);
336 }
337
338 if ( log ) {
339 fprintf(stderr, "replacement map:\n");
340 for (auto& entry : replacementMap)
341 fprintf(stderr, " %p -> %p\n", entry.first, entry.second);
342 }
343
344 // walk all atoms and replace references to dups with references to alias
345 for (ld::Internal::FinalSection* sect : state.sections) {
346 for (const ld::Atom* atom : sect->atoms) {
347 for (ld::Fixup::iterator fit = atom->fixupsBegin(), end=atom->fixupsEnd(); fit != end; ++fit) {
348 std::unordered_map<const ld::Atom*, const ld::Atom*>::iterator pos;
349 switch ( fit->binding ) {
350 case ld::Fixup::bindingsIndirectlyBound:
351 pos = replacementMap.find(state.indirectBindingTable[fit->u.bindingIndex]);
352 if ( pos != replacementMap.end() )
353 state.indirectBindingTable[fit->u.bindingIndex] = pos->second;
354 break;
355 case ld::Fixup::bindingDirectlyBound:
356 pos = replacementMap.find(fit->u.target);
357 if ( pos != replacementMap.end() )
358 fit->u.target = pos->second;
359 break;
360 default:
361 break;
362 }
363 }
364 }
365 }
366
367 if ( log ) {
368 fprintf(stderr, "atoms before pruning:\n");
369 for (const ld::Atom* atom : textSection->atoms)
370 fprintf(stderr, " %p (size=%llu) %s\n", atom, atom->size(), atom->name());
371 }
372 // remove replaced atoms from section
373 textSection->atoms.erase(std::remove_if(textSection->atoms.begin(), textSection->atoms.end(),
374 [&](const ld::Atom* atom) {
375 return (replacementMap.count(atom) != 0);
376 }),
377 textSection->atoms.end());
378
379 for (auto& entry : replacementMap)
380 state.atomToSection.erase(entry.first);
381
382 if ( log ) {
383 fprintf(stderr, "atoms after pruning:\n");
384 for (const ld::Atom* atom : textSection->atoms)
385 fprintf(stderr, " %p (size=%llu) %s\n", atom, atom->size(), atom->name());
386 }
387
388 //fprintf(stderr, "hash-count=%lu, fixup-compares=%lu, atom-count=%u\n", sHashCount, sFixupCompareCount, atomsBeingComparedCount);
389 }
390
391
392 } // namespace dedup
393 } // namespace passes
394 } // namespace ld