]> git.saurik.com Git - apple/dyld.git/blob - src/ImageLoader.cpp
1274a7f0478437103f108ef7366bdd9262860401
[apple/dyld.git] / src / ImageLoader.cpp
1 /* -*- mode: C++; c-basic-offset: 4; tab-width: 4 -*-
2 *
3 * Copyright (c) 2004-2006 Apple Computer, Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25 #define __STDC_LIMIT_MACROS
26 #include <stdint.h>
27 #include <errno.h>
28 #include <fcntl.h>
29 #include <mach/mach.h>
30 #include <mach-o/fat.h>
31 #include <sys/types.h>
32 #include <sys/stat.h>
33 #include <sys/mman.h>
34 #include <sys/param.h>
35 #include <sys/mount.h>
36 #include <libkern/OSAtomic.h>
37
38 #include "ImageLoader.h"
39
40 // in libc.a
41 extern "C" void _spin_lock(uint32_t*);
42 extern "C" void _spin_unlock(uint32_t*);
43
44 uint32_t ImageLoader::fgImagesUsedFromSharedCache = 0;
45 uint32_t ImageLoader::fgImagesWithUsedPrebinding = 0;
46 uint32_t ImageLoader::fgImagesRequiringNoFixups = 0;
47 uint32_t ImageLoader::fgTotalRebaseFixups = 0;
48 uint32_t ImageLoader::fgTotalBindFixups = 0;
49 uint32_t ImageLoader::fgTotalBindSymbolsResolved = 0;
50 uint32_t ImageLoader::fgTotalBindImageSearches = 0;
51 uint32_t ImageLoader::fgTotalLazyBindFixups = 0;
52 uint32_t ImageLoader::fgTotalPossibleLazyBindFixups = 0;
53 uint32_t ImageLoader::fgTotalSegmentsMapped = 0;
54 uint64_t ImageLoader::fgTotalBytesMapped = 0;
55 uint64_t ImageLoader::fgTotalBytesPreFetched = 0;
56 uint64_t ImageLoader::fgTotalLoadLibrariesTime;
57 uint64_t ImageLoader::fgTotalRebaseTime;
58 uint64_t ImageLoader::fgTotalBindTime;
59 uint64_t ImageLoader::fgTotalInitTime;
60 uintptr_t ImageLoader::fgNextSplitSegAddress = 0x90000000;
61 uint16_t ImageLoader::fgLoadOrdinal = 0;
62 uintptr_t Segment::fgNextPIEDylibAddress = 0;
63
64
65 void ImageLoader::init(const char* path, uint64_t offsetInFat, dev_t device, ino_t inode, time_t modDate)
66 {
67 fPathHash = 0;
68 fPath = path;
69 fLogicalPath = NULL;
70 fDevice = device;
71 fInode = inode;
72 fLastModified = modDate;
73 fOffsetInFatFile = offsetInFat;
74 fLibraries = NULL;
75 fLibrariesCount = 0;
76 fDlopenReferenceCount = 0;
77 fStaticReferenceCount = 0;
78 fDynamicReferenceCount = 0;
79 fDynamicReferences = NULL;
80 fDepth = 0;
81 fLoadOrder = fgLoadOrdinal++;
82 fState = 0;
83 fAllLibraryChecksumsAndLoadAddressesMatch = false;
84 fLeaveMapped = false;
85 fNeverUnload = false;
86 fHideSymbols = false;
87 fMatchByInstallName = false;
88 fRegisteredDOF = false;
89 #if IMAGE_NOTIFY_SUPPORT
90 fAnnounced = false;
91 #endif
92 fAllLazyPointersBound = false;
93 fBeingRemoved = false;
94 fAddFuncNotified = false;
95 fPathOwnedByImage = false;
96 #if RECURSIVE_INITIALIZER_LOCK
97 fInitializerRecursiveLock = NULL;
98 #else
99 fInitializerLock = 0;
100 #endif
101 if ( fPath != NULL )
102 fPathHash = hash(fPath);
103 }
104
105
106 ImageLoader::ImageLoader(const char* path, uint64_t offsetInFat, const struct stat& info)
107 {
108 init(path, offsetInFat, info.st_dev, info.st_ino, info.st_mtime);
109 }
110
111 ImageLoader::ImageLoader(const char* moduleName)
112 {
113 init(moduleName, 0, 0, 0, 0);
114 }
115
116
117 ImageLoader::~ImageLoader()
118 {
119 if ( fPathOwnedByImage && (fPath != NULL) )
120 delete [] fPath;
121 if ( fLogicalPath != NULL )
122 delete [] fLogicalPath;
123 if ( fLibraries != NULL ) {
124 for (uint32_t i = 0; i < fLibrariesCount; ++i) {
125 if ( fLibraries[i].image != NULL )
126 fLibraries[i].image->fStaticReferenceCount--;
127 }
128 delete [] fLibraries;
129 }
130 if ( fDynamicReferences != NULL ) {
131 for (std::set<const ImageLoader*>::iterator it = fDynamicReferences->begin(); it != fDynamicReferences->end(); ++it ) {
132 const_cast<ImageLoader*>(*it)->fDynamicReferenceCount--;
133 }
134 delete fDynamicReferences;
135 }
136 }
137
138 void ImageLoader::setMapped(const LinkContext& context)
139 {
140 fState = dyld_image_state_mapped;
141 context.notifySingle(dyld_image_state_mapped, this->machHeader(), fPath, fLastModified);
142 }
143
144 void ImageLoader::addDynamicReference(const ImageLoader* target)
145 {
146 if ( fDynamicReferences == NULL )
147 fDynamicReferences = new std::set<const ImageLoader*>();
148 if ( fDynamicReferences->count(target) == 0 ) {
149 fDynamicReferences->insert(target);
150 const_cast<ImageLoader*>(target)->fDynamicReferenceCount++;
151 }
152 //dyld::log("dyld: addDynamicReference() from %s to %s, fDynamicReferences->size()=%lu\n", this->getPath(), target->getPath(), fDynamicReferences->size());
153 }
154
155 int ImageLoader::compare(const ImageLoader* right) const
156 {
157 if ( this->fDepth == right->fDepth ) {
158 if ( this->fLoadOrder == right->fLoadOrder )
159 return 0;
160 else if ( this->fLoadOrder < right->fLoadOrder )
161 return -1;
162 else
163 return 1;
164 }
165 else {
166 if ( this->fDepth < right->fDepth )
167 return -1;
168 else
169 return 1;
170 }
171 }
172
173 void ImageLoader::setPath(const char* path)
174 {
175 if ( fPathOwnedByImage && (fPath != NULL) )
176 delete [] fPath;
177 fPath = new char[strlen(path)+1];
178 strcpy((char*)fPath, path);
179 fPathOwnedByImage = true; // delete fPath when this image is destructed
180 fPathHash = hash(fPath);
181 }
182
183 void ImageLoader::setPathUnowned(const char* path)
184 {
185 if ( fPathOwnedByImage && (fPath != NULL) ) {
186 delete [] fPath;
187 }
188 fPath = path;
189 fPathOwnedByImage = false;
190 fPathHash = hash(fPath);
191 }
192
193 void ImageLoader::setLogicalPath(const char* path)
194 {
195 if ( fPath == NULL ) {
196 // no physical path set yet, so use this path as physical
197 this->setPath(path);
198 }
199 else if ( strcmp(path, fPath) == 0 ) {
200 // do not set logical path because it is the same as the physical path
201 fLogicalPath = NULL;
202 }
203 else {
204 fLogicalPath = new char[strlen(path)+1];
205 strcpy((char*)fLogicalPath, path);
206 }
207 }
208
209 const char* ImageLoader::getLogicalPath() const
210 {
211 if ( fLogicalPath != NULL )
212 return fLogicalPath;
213 else
214 return fPath;
215 }
216
217 uint32_t ImageLoader::hash(const char* path)
218 {
219 // this does not need to be a great hash
220 // it is just used to reduce the number of strcmp() calls
221 // of existing images when loading a new image
222 uint32_t h = 0;
223 for (const char* s=path; *s != '\0'; ++s)
224 h = h*5 + *s;
225 return h;
226 }
227
228 bool ImageLoader::matchInstallPath() const
229 {
230 return fMatchByInstallName;
231 }
232
233 void ImageLoader::setMatchInstallPath(bool match)
234 {
235 fMatchByInstallName = match;
236 }
237
238 bool ImageLoader::statMatch(const struct stat& stat_buf) const
239 {
240 return ( (this->fDevice == stat_buf.st_dev) && (this->fInode == stat_buf.st_ino) );
241 }
242
243 const char* ImageLoader::getShortName() const
244 {
245 // try to return leaf name
246 if ( fPath != NULL ) {
247 const char* s = strrchr(fPath, '/');
248 if ( s != NULL )
249 return &s[1];
250 }
251 return fPath;
252 }
253
254 uint64_t ImageLoader::getOffsetInFatFile() const
255 {
256 return fOffsetInFatFile;
257 }
258
259 void ImageLoader::setLeaveMapped()
260 {
261 fLeaveMapped = true;
262 }
263
264 void ImageLoader::setHideExports(bool hide)
265 {
266 fHideSymbols = hide;
267 }
268
269 bool ImageLoader::hasHiddenExports() const
270 {
271 return fHideSymbols;
272 }
273
274 bool ImageLoader::isLinked() const
275 {
276 return (fState >= dyld_image_state_bound);
277 }
278
279 time_t ImageLoader::lastModified() const
280 {
281 return fLastModified;
282 }
283
284 bool ImageLoader::containsAddress(const void* addr) const
285 {
286 if ( ! this->isLinked() )
287 return false;
288 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
289 Segment* seg = *it;
290 const uint8_t* start = (const uint8_t*)seg->getActualLoadAddress(this);
291 const uint8_t* end = start + seg->getSize();
292 if ( (start <= addr) && (addr < end) && !seg->unaccessible() )
293 return true;
294 }
295 return false;
296 }
297
298 bool ImageLoader::overlapsWithAddressRange(const void* start, const void* end) const
299 {
300 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
301 Segment* seg = *it;
302 const uint8_t* segStart = (const uint8_t*)(seg->getActualLoadAddress(this));
303 const uint8_t* segEnd = segStart + seg->getSize();
304 if ( (start <= segStart) && (segStart < end) )
305 return true;
306 if ( (start <= segEnd) && (segEnd < end) )
307 return true;
308 if ( (segStart < start) && (end < segEnd) )
309 return true;
310 }
311 return false;
312 }
313
314 void ImageLoader::getMappedRegions(MappedRegion*& regions) const
315 {
316 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
317 Segment* seg = *it;
318 MappedRegion region;
319 region.address = seg->getActualLoadAddress(this);
320 region.size = seg->getSize();
321 *regions++ = region;
322 }
323 }
324
325
326 static bool notInImgageList(const ImageLoader* image, const ImageLoader** dsiStart, const ImageLoader** dsiCur)
327 {
328 for (const ImageLoader** p = dsiStart; p < dsiCur; ++p)
329 if ( *p == image )
330 return false;
331 return true;
332 }
333
334
335 // private method that handles circular dependencies by only search any image once
336 const ImageLoader::Symbol* ImageLoader::findExportedSymbolInDependentImagesExcept(const char* name,
337 const ImageLoader** dsiStart, const ImageLoader**& dsiCur, const ImageLoader** dsiEnd, const ImageLoader** foundIn) const
338 {
339 const ImageLoader::Symbol* sym;
340
341 // search self
342 if ( notInImgageList(this, dsiStart, dsiCur) ) {
343 sym = this->findExportedSymbol(name, NULL, false, foundIn);
344 if ( sym != NULL )
345 return sym;
346 *dsiCur++ = this;
347 }
348
349 // search directly dependent libraries
350 for (uint32_t i=0; i < fLibrariesCount; ++i) {
351 ImageLoader* dependentImage = fLibraries[i].image;
352 if ( (dependentImage != NULL) && notInImgageList(dependentImage, dsiStart, dsiCur) ) {
353 const ImageLoader::Symbol* sym = dependentImage->findExportedSymbol(name, NULL, false, foundIn);
354 if ( sym != NULL )
355 return sym;
356 }
357 }
358
359 // search indirectly dependent libraries
360 for (uint32_t i=0; i < fLibrariesCount; ++i) {
361 ImageLoader* dependentImage = fLibraries[i].image;
362 if ( (dependentImage != NULL) && notInImgageList(dependentImage, dsiStart, dsiCur) ) {
363 *dsiCur++ = dependentImage;
364 const ImageLoader::Symbol* sym = dependentImage->findExportedSymbolInDependentImagesExcept(name, dsiStart, dsiCur, dsiEnd, foundIn);
365 if ( sym != NULL )
366 return sym;
367 }
368 }
369
370 return NULL;
371 }
372
373
374 const ImageLoader::Symbol* ImageLoader::findExportedSymbolInDependentImages(const char* name, const LinkContext& context, const ImageLoader** foundIn) const
375 {
376 unsigned int imageCount = context.imageCount();
377 const ImageLoader* dontSearchImages[imageCount];
378 dontSearchImages[0] = this; // don't search this image
379 const ImageLoader** cur = &dontSearchImages[1];
380 return this->findExportedSymbolInDependentImagesExcept(name, &dontSearchImages[0], cur, &dontSearchImages[imageCount], foundIn);
381 }
382
383 const ImageLoader::Symbol* ImageLoader::findExportedSymbolInImageOrDependentImages(const char* name, const LinkContext& context, const ImageLoader** foundIn) const
384 {
385 unsigned int imageCount = context.imageCount();
386 const ImageLoader* dontSearchImages[imageCount];
387 const ImageLoader** cur = &dontSearchImages[0];
388 return this->findExportedSymbolInDependentImagesExcept(name, &dontSearchImages[0], cur, &dontSearchImages[imageCount], foundIn);
389 }
390
391
392 void ImageLoader::link(const LinkContext& context, bool forceLazysBound, bool preflightOnly, const RPathChain& loaderRPaths)
393 {
394 //dyld::log("ImageLoader::link(%s) refCount=%d, neverUnload=%d\n", this->getPath(), fStaticReferenceCount, fNeverUnload);
395
396 uint64_t t0 = mach_absolute_time();
397 this->recursiveLoadLibraries(context,loaderRPaths);
398 context.notifyBatch(dyld_image_state_dependents_mapped);
399
400 // we only do the loading step for preflights
401 if ( preflightOnly )
402 return;
403
404 uint64_t t1 = mach_absolute_time();
405 context.clearAllDepths();
406 this->recursiveUpdateDepth(context.imageCount());
407
408 uint64_t t2 = mach_absolute_time();
409 this->recursiveRebase(context);
410 context.notifyBatch(dyld_image_state_rebased);
411
412 uint64_t t3 = mach_absolute_time();
413 this->recursiveBind(context, forceLazysBound);
414 context.notifyBatch(dyld_image_state_bound);
415
416 uint64_t t4 = mach_absolute_time();
417 std::vector<DOFInfo> dofs;
418 this->recursiveGetDOFSections(context, dofs);
419 context.registerDOFs(dofs);
420
421
422 fgTotalLoadLibrariesTime += t1 - t0;
423 fgTotalRebaseTime += t3 - t2;
424 fgTotalBindTime += t4 - t3;
425
426 // done with initial dylib loads
427 Segment::fgNextPIEDylibAddress = 0;
428 }
429
430
431 void ImageLoader::printReferenceCounts()
432 {
433 dyld::log(" dlopen=%d, static=%d, dynamic=%d for %s\n",
434 fDlopenReferenceCount, fStaticReferenceCount, fDynamicReferenceCount, getPath() );
435 }
436
437
438 bool ImageLoader::decrementDlopenReferenceCount()
439 {
440 if ( fDlopenReferenceCount == 0 )
441 return true;
442 --fDlopenReferenceCount;
443 return false;
444 }
445
446 void ImageLoader::runInitializers(const LinkContext& context)
447 {
448 #if IMAGE_NOTIFY_SUPPORT
449 ImageLoader* newImages[context.imageCount()];
450 ImageLoader** end = newImages;
451 this->recursiveImageAnnouncement(context, end); // build bottom up list images being added
452 context.notifyAdding(newImages, end-newImages); // tell anyone who cares about these
453 #endif
454
455 uint64_t t1 = mach_absolute_time();
456 this->recursiveInitialization(context, mach_thread_self());
457 context.notifyBatch(dyld_image_state_initialized);
458 uint64_t t2 = mach_absolute_time();
459 fgTotalInitTime += (t2 - t1);
460 }
461
462
463 void ImageLoader::bindAllLazyPointers(const LinkContext& context, bool recursive)
464 {
465 if ( ! fAllLazyPointersBound ) {
466 fAllLazyPointersBound = true;
467
468 if ( recursive ) {
469 // bind lower level libraries first
470 for(unsigned int i=0; i < fLibrariesCount; ++i){
471 DependentLibrary& libInfo = fLibraries[i];
472 if ( libInfo.image != NULL )
473 libInfo.image->bindAllLazyPointers(context, recursive);
474 }
475 }
476 // bind lazies in this image
477 this->doBindJustLazies(context);
478 }
479 }
480
481
482 intptr_t ImageLoader::assignSegmentAddresses(const LinkContext& context)
483 {
484 // preflight and calculate slide if needed
485 intptr_t slide = 0;
486 if ( this->segmentsCanSlide() && this->segmentsMustSlideTogether() ) {
487 bool needsToSlide = false;
488 uintptr_t lowAddr = UINTPTR_MAX;
489 uintptr_t highAddr = 0;
490 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
491 Segment* seg = *it;
492 const uintptr_t segLow = seg->getPreferredLoadAddress();
493 const uintptr_t segHigh = (segLow + seg->getSize() + 4095) & -4096;
494 if ( segLow < lowAddr )
495 lowAddr = segLow;
496 if ( segHigh > highAddr )
497 highAddr = segHigh;
498
499 if ( !seg->hasPreferredLoadAddress() || !Segment::reserveAddressRange(seg->getPreferredLoadAddress(), seg->getSize()) )
500 needsToSlide = true;
501 }
502 if ( needsToSlide ) {
503 // find a chunk of address space to hold all segments
504 uintptr_t addr = Segment::reserveAnAddressRange(highAddr-lowAddr, context);
505 slide = addr - lowAddr;
506 }
507 }
508 else if ( ! this->segmentsCanSlide() ) {
509 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
510 Segment* seg = *it;
511 if ( strcmp(seg->getName(), "__PAGEZERO") == 0 )
512 continue;
513 if ( !Segment::reserveAddressRange(seg->getPreferredLoadAddress(), seg->getSize()) )
514 throw "can't map";
515 }
516 }
517 else {
518 // mach-o does not support independently sliding segments
519 }
520 return slide;
521 }
522
523
524 void ImageLoader::mapSegments(int fd, uint64_t offsetInFat, uint64_t lenInFat, uint64_t fileLen, const LinkContext& context)
525 {
526 if ( context.verboseMapping )
527 dyld::log("dyld: Mapping %s\n", this->getPath());
528 // find address range for image
529 intptr_t slide = this->assignSegmentAddresses(context);
530 // map in all segments
531 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
532 Segment* seg = *it;
533 seg->map(fd, offsetInFat, slide, this, context);
534 }
535 // update slide to reflect load location
536 this->setSlide(slide);
537 }
538
539 void ImageLoader::mapSegments(const void* memoryImage, uint64_t imageLen, const LinkContext& context)
540 {
541 if ( context.verboseMapping )
542 dyld::log("dyld: Mapping memory %p\n", memoryImage);
543 // find address range for image
544 intptr_t slide = this->assignSegmentAddresses(context);
545 // map in all segments
546 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
547 Segment* seg = *it;
548 seg->map(memoryImage, slide, this, context);
549 }
550 // update slide to reflect load location
551 this->setSlide(slide);
552 // set R/W permissions on all segments at slide location
553 for(ImageLoader::SegmentIterator it = this->beginSegments(); it != this->endSegments(); ++it ) {
554 Segment* seg = *it;
555 seg->setPermissions(context, this);
556 }
557 }
558
559 bool ImageLoader::allDependentLibrariesAsWhenPreBound() const
560 {
561 return fAllLibraryChecksumsAndLoadAddressesMatch;
562 }
563
564
565 unsigned int ImageLoader::recursiveUpdateDepth(unsigned int maxDepth)
566 {
567 // the purpose of this phase is to make the images sortable such that
568 // in a sort list of images, every image that an image depends on
569 // occurs in the list before it.
570 if ( fDepth == 0 ) {
571 // break cycles
572 fDepth = maxDepth;
573
574 // get depth of dependents
575 unsigned int minDependentDepth = maxDepth;
576 for(unsigned int i=0; i < fLibrariesCount; ++i) {
577 DependentLibrary& libInfo = fLibraries[i];
578 if ( libInfo.image != NULL ) {
579 unsigned int d = libInfo.image->recursiveUpdateDepth(maxDepth);
580 if ( d < minDependentDepth )
581 minDependentDepth = d;
582 }
583 }
584
585 // make me less deep then all my dependents
586 fDepth = minDependentDepth - 1;
587 }
588
589 return fDepth;
590 }
591
592
593 void ImageLoader::recursiveLoadLibraries(const LinkContext& context, const RPathChain& loaderRPaths)
594 {
595 if ( fState < dyld_image_state_dependents_mapped ) {
596 // break cycles
597 fState = dyld_image_state_dependents_mapped;
598
599 // get list of libraries this image needs
600 fLibrariesCount = this->doGetDependentLibraryCount();
601 fLibraries = new DependentLibrary[fLibrariesCount];
602 bzero(fLibraries, sizeof(DependentLibrary)*fLibrariesCount);
603 DependentLibraryInfo libraryInfos[fLibrariesCount];
604 this->doGetDependentLibraries(libraryInfos);
605
606 // get list of rpaths that this image adds
607 std::vector<const char*> rpathsFromThisImage;
608 this->getRPaths(context, rpathsFromThisImage);
609 const RPathChain thisRPaths(&loaderRPaths, &rpathsFromThisImage);
610
611 // try to load each
612 bool canUsePrelinkingInfo = true;
613 for(unsigned int i=0; i < fLibrariesCount; ++i){
614 DependentLibrary& requiredLib = fLibraries[i];
615 DependentLibraryInfo& requiredLibInfo = libraryInfos[i];
616 try {
617 bool depNamespace = false;
618 requiredLib.image = context.loadLibrary(requiredLibInfo.name, true, depNamespace, this->getPath(), &thisRPaths);
619 if ( requiredLib.image == this ) {
620 // found circular reference, perhaps DYLD_LIBARY_PATH is causing this rdar://problem/3684168
621 requiredLib.image = context.loadLibrary(requiredLibInfo.name, false, depNamespace, NULL, NULL);
622 if ( requiredLib.image != this )
623 dyld::warn("DYLD_ setting caused circular dependency in %s\n", this->getPath());
624 }
625 if ( fNeverUnload )
626 requiredLib.image->setNeverUnload();
627 requiredLib.image->fStaticReferenceCount += 1;
628 LibraryInfo actualInfo = requiredLib.image->doGetLibraryInfo();
629 requiredLib.required = requiredLibInfo.required;
630 requiredLib.checksumMatches = ( actualInfo.checksum == requiredLibInfo.info.checksum );
631 requiredLib.isReExported = requiredLibInfo.reExported;
632 if ( ! requiredLib.isReExported ) {
633 requiredLib.isSubFramework = requiredLib.image->isSubframeworkOf(context, this);
634 requiredLib.isReExported = requiredLib.isSubFramework || this->hasSubLibrary(context, requiredLib.image);
635 }
636 // check found library version is compatible
637 if ( actualInfo.minVersion < requiredLibInfo.info.minVersion ) {
638 dyld::throwf("Incompatible library version: %s requires version %d.%d.%d or later, but %s provides version %d.%d.%d",
639 this->getShortName(), requiredLibInfo.info.minVersion >> 16, (requiredLibInfo.info.minVersion >> 8) & 0xff, requiredLibInfo.info.minVersion & 0xff,
640 requiredLib.image->getShortName(), actualInfo.minVersion >> 16, (actualInfo.minVersion >> 8) & 0xff, actualInfo.minVersion & 0xff);
641 }
642 // prebinding for this image disabled if any dependent library changed or slid
643 if ( !requiredLib.checksumMatches || (requiredLib.image->getSlide() != 0) )
644 canUsePrelinkingInfo = false;
645 //if ( context.verbosePrebinding ) {
646 // if ( !requiredLib.checksumMatches )
647 // fprintf(stderr, "dyld: checksum mismatch, (%u v %u) for %s referencing %s\n",
648 // requiredLibInfo.info.checksum, actualInfo.checksum, this->getPath(), requiredLib.image->getPath());
649 // if ( requiredLib.image->getSlide() != 0 )
650 // fprintf(stderr, "dyld: dependent library slid for %s referencing %s\n", this->getPath(), requiredLib.image->getPath());
651 //}
652 }
653 catch (const char* msg) {
654 //if ( context.verbosePrebinding )
655 // fprintf(stderr, "dyld: exception during processing for %s referencing %s\n", this->getPath(), requiredLib.image->getPath());
656 if ( requiredLibInfo.required ) {
657 fState = dyld_image_state_mapped;
658 dyld::throwf("Library not loaded: %s\n Referenced from: %s\n Reason: %s", requiredLibInfo.name, this->getPath(), msg);
659 }
660 // ok if weak library not found
661 requiredLib.image = NULL;
662 canUsePrelinkingInfo = false; // this disables all prebinding, we may want to just slam import vectors for this lib to zero
663 }
664 }
665 fAllLibraryChecksumsAndLoadAddressesMatch = canUsePrelinkingInfo;
666
667 // tell each to load its dependents
668 for(unsigned int i=0; i < fLibrariesCount; ++i){
669 DependentLibrary& lib = fLibraries[i];
670 if ( lib.image != NULL ) {
671 lib.image->recursiveLoadLibraries(context, thisRPaths);
672 }
673 }
674
675 // do deep prebind check
676 if ( fAllLibraryChecksumsAndLoadAddressesMatch ) {
677 for(unsigned int i=0; i < fLibrariesCount; ++i){
678 const DependentLibrary& libInfo = fLibraries[i];
679 if ( libInfo.image != NULL ) {
680 if ( !libInfo.image->allDependentLibrariesAsWhenPreBound() )
681 fAllLibraryChecksumsAndLoadAddressesMatch = false;
682 }
683 }
684 }
685
686 // free rpaths (getRPaths() malloc'ed each string)
687 for(std::vector<const char*>::iterator it=rpathsFromThisImage.begin(); it != rpathsFromThisImage.end(); ++it) {
688 const char* str = *it;
689 free((void*)str);
690 }
691
692 }
693 }
694
695 void ImageLoader::recursiveRebase(const LinkContext& context)
696 {
697 if ( fState < dyld_image_state_rebased ) {
698 // break cycles
699 fState = dyld_image_state_rebased;
700
701 try {
702 // rebase lower level libraries first
703 for(unsigned int i=0; i < fLibrariesCount; ++i){
704 DependentLibrary& libInfo = fLibraries[i];
705 if ( libInfo.image != NULL )
706 libInfo.image->recursiveRebase(context);
707 }
708
709 // rebase this image
710 doRebase(context);
711
712 // notify
713 context.notifySingle(dyld_image_state_rebased, this->machHeader(), fPath, fLastModified);
714 }
715 catch (const char* msg) {
716 // this image is not rebased
717 fState = dyld_image_state_dependents_mapped;
718 throw;
719 }
720 }
721 }
722
723
724
725
726 void ImageLoader::recursiveBind(const LinkContext& context, bool forceLazysBound)
727 {
728 // Normally just non-lazy pointers are bound immediately.
729 // The exceptions are:
730 // 1) DYLD_BIND_AT_LAUNCH will cause lazy pointers to be bound immediately
731 // 2) some API's (e.g. RTLD_NOW) can cause lazy pointers to be bound immediately
732 if ( fState < dyld_image_state_bound ) {
733 // break cycles
734 fState = dyld_image_state_bound;
735
736 try {
737 // bind lower level libraries first
738 for(unsigned int i=0; i < fLibrariesCount; ++i){
739 DependentLibrary& libInfo = fLibraries[i];
740 if ( libInfo.image != NULL )
741 libInfo.image->recursiveBind(context, forceLazysBound);
742 }
743 // bind this image
744 this->doBind(context, forceLazysBound);
745 this->doUpdateMappingPermissions(context);
746 // mark if lazys are also bound
747 if ( forceLazysBound || this->usablePrebinding(context) )
748 fAllLazyPointersBound = true;
749
750 context.notifySingle(dyld_image_state_bound, this->machHeader(), fPath, fLastModified);
751 }
752 catch (const char* msg) {
753 // restore state
754 fState = dyld_image_state_rebased;
755 throw;
756 }
757 }
758 }
759
760
761 #if IMAGE_NOTIFY_SUPPORT
762 void ImageLoader::recursiveImageAnnouncement(const LinkContext& context, ImageLoader**& newImages)
763 {
764 if ( ! fAnnounced ) {
765 // break cycles
766 fAnnounced = true;
767
768 // announce lower level libraries first
769 for(unsigned int i=0; i < fLibrariesCount; ++i){
770 DependentLibrary& libInfo = fLibraries[i];
771 if ( libInfo.image != NULL )
772 libInfo.image->recursiveImageAnnouncement(context, newImages);
773 }
774
775 // add to list of images to notify about
776 *newImages++ = this;
777 //dyld::log("next size = %d\n", newImages.size());
778
779 // remember that this image wants to be notified about other images
780 if ( this->hasImageNotification() )
781 context.addImageNeedingNotification(this);
782 }
783 }
784 #endif
785
786 void ImageLoader::recursiveGetDOFSections(const LinkContext& context, std::vector<DOFInfo>& dofs)
787 {
788 if ( ! fRegisteredDOF ) {
789 // break cycles
790 fRegisteredDOF = true;
791
792 // gather lower level libraries first
793 for(unsigned int i=0; i < fLibrariesCount; ++i){
794 DependentLibrary& libInfo = fLibraries[i];
795 if ( libInfo.image != NULL )
796 libInfo.image->recursiveGetDOFSections(context, dofs);
797 }
798 this->doGetDOFSections(context, dofs);
799 }
800 }
801
802
803 void ImageLoader::recursiveSpinLock(recursive_lock& rlock)
804 {
805 // try to set image's ivar fInitializerRecursiveLock to point to this lock_info
806 // keep trying until success (spin)
807 while ( ! OSAtomicCompareAndSwapPtrBarrier(NULL, &rlock, (void**)&fInitializerRecursiveLock) ) {
808 // if fInitializerRecursiveLock already points to a different lock_info, if it is for
809 // the same thread we are on, the increment the lock count, otherwise continue to spin
810 if ( (fInitializerRecursiveLock != NULL) && (fInitializerRecursiveLock->thread == rlock.thread) )
811 break;
812 }
813 ++(fInitializerRecursiveLock->count);
814 }
815
816 void ImageLoader::recursiveSpinUnLock()
817 {
818 if ( --(fInitializerRecursiveLock->count) == 0 )
819 fInitializerRecursiveLock = NULL;
820 }
821
822
823 void ImageLoader::recursiveInitialization(const LinkContext& context, mach_port_t this_thread)
824 {
825 #if RECURSIVE_INITIALIZER_LOCK
826 recursive_lock lock_info(this_thread);
827 recursiveSpinLock(lock_info);
828 #else
829 _spin_lock(&fInitializerLock);
830 #endif
831
832 if ( fState < dyld_image_state_dependents_initialized-1 ) {
833 uint8_t oldState = fState;
834 // break cycles
835 fState = dyld_image_state_dependents_initialized-1;
836
837 try {
838 // initialize lower level libraries first
839 for(unsigned int i=0; i < fLibrariesCount; ++i){
840 DependentLibrary& libInfo = fLibraries[i];
841 // don't try to initialize stuff "above" me
842 if ( (libInfo.image != NULL) && (libInfo.image->fDepth >= fDepth) )
843 libInfo.image->recursiveInitialization(context, this_thread);
844 }
845
846 // record termination order
847 if ( this->needsTermination() )
848 context.terminationRecorder(this);
849
850 // let objc know we are about to initalize this image
851 fState = dyld_image_state_dependents_initialized;
852 oldState = fState;
853 context.notifySingle(dyld_image_state_dependents_initialized, this->machHeader(), fPath, fLastModified);
854
855 // initialize this image
856 this->doInitialization(context);
857 // let anyone know we finished initalizing this image
858 fState = dyld_image_state_initialized;
859 oldState = fState;
860 context.notifySingle(dyld_image_state_initialized, this->machHeader(), fPath, fLastModified);
861 }
862 catch (const char* msg) {
863 // this image is not initialized
864 fState = oldState;
865 #if RECURSIVE_INITIALIZER_LOCK
866 recursiveSpinUnLock();
867 #else
868 _spin_unlock(&fInitializerLock);
869 #endif
870 throw;
871 }
872 }
873
874 #if RECURSIVE_INITIALIZER_LOCK
875 recursiveSpinUnLock();
876 #else
877 _spin_unlock(&fInitializerLock);
878 #endif
879 }
880
881
882 static void printTime(const char* msg, uint64_t partTime, uint64_t totalTime)
883 {
884 static uint64_t sUnitsPerSecond = 0;
885 if ( sUnitsPerSecond == 0 ) {
886 struct mach_timebase_info timeBaseInfo;
887 if ( mach_timebase_info(&timeBaseInfo) == KERN_SUCCESS ) {
888 sUnitsPerSecond = 1000000000ULL * timeBaseInfo.denom / timeBaseInfo.numer;
889 }
890 }
891 if ( partTime < sUnitsPerSecond ) {
892 uint32_t milliSecondsTimeTen = (partTime*10000)/sUnitsPerSecond;
893 uint32_t milliSeconds = milliSecondsTimeTen/10;
894 uint32_t percentTimesTen = (partTime*1000)/totalTime;
895 uint32_t percent = percentTimesTen/10;
896 dyld::log("%s: %u.%u milliseconds (%u.%u%%)\n", msg, milliSeconds, milliSecondsTimeTen-milliSeconds*10, percent, percentTimesTen-percent*10);
897 }
898 else {
899 uint32_t secondsTimeTen = (partTime*10)/sUnitsPerSecond;
900 uint32_t seconds = secondsTimeTen/10;
901 uint32_t percentTimesTen = (partTime*1000)/totalTime;
902 uint32_t percent = percentTimesTen/10;
903 dyld::log("%s: %u.%u seconds (%u.%u%%)\n", msg, seconds, secondsTimeTen-seconds*10, percent, percentTimesTen-percent*10);
904 }
905 }
906
907 static char* commatize(uint64_t in, char* out)
908 {
909 uint64_t div10 = in / 10;
910 uint8_t delta = in - div10*10;
911 char* s = &out[32];
912 int digitCount = 1;
913 *s = '\0';
914 *(--s) = '0' + delta;
915 in = div10;
916 while ( in != 0 ) {
917 if ( (digitCount % 3) == 0 )
918 *(--s) = ',';
919 div10 = in / 10;
920 delta = in - div10*10;
921 *(--s) = '0' + delta;
922 in = div10;
923 ++digitCount;
924 }
925 return s;
926 }
927
928
929
930 void ImageLoader::printStatistics(unsigned int imageCount)
931 {
932 uint64_t totalTime = fgTotalLoadLibrariesTime + fgTotalRebaseTime + fgTotalBindTime + fgTotalInitTime;
933 char commaNum1[40];
934 char commaNum2[40];
935
936 printTime("total time", totalTime, totalTime);
937 dyld::log("total images loaded: %d (%u from dyld shared cache, %u needed no fixups)\n", imageCount, fgImagesUsedFromSharedCache, fgImagesRequiringNoFixups);
938 dyld::log("total segments mapped: %u, into %llu pages with %llu pages pre-fetched\n", fgTotalSegmentsMapped, fgTotalBytesMapped/4096, fgTotalBytesPreFetched/4096);
939 printTime("total images loading time", fgTotalLoadLibrariesTime, totalTime);
940 dyld::log("total rebase fixups: %s\n", commatize(fgTotalRebaseFixups, commaNum1));
941 printTime("total rebase fixups time", fgTotalRebaseTime, totalTime);
942 dyld::log("total binding fixups: %s\n", commatize(fgTotalBindFixups, commaNum1));
943 if ( fgTotalBindSymbolsResolved != 0 ) {
944 uint32_t avgTimesTen = (fgTotalBindImageSearches * 10) / fgTotalBindSymbolsResolved;
945 uint32_t avgInt = fgTotalBindImageSearches / fgTotalBindSymbolsResolved;
946 uint32_t avgTenths = avgTimesTen - (avgInt*10);
947 dyld::log("total binding symbol lookups: %s, average images searched per symbol: %u.%u\n",
948 commatize(fgTotalBindSymbolsResolved, commaNum1), avgInt, avgTenths);
949 }
950 printTime("total binding fixups time", fgTotalBindTime, totalTime);
951 dyld::log("total bindings lazily fixed up: %s of %s\n", commatize(fgTotalLazyBindFixups, commaNum1), commatize(fgTotalPossibleLazyBindFixups, commaNum2));
952 printTime("total init time time", fgTotalInitTime, totalTime);
953 }
954
955
956 //
957 // copy path and add suffix to result
958 //
959 // /path/foo.dylib _debug => /path/foo_debug.dylib
960 // foo.dylib _debug => foo_debug.dylib
961 // foo _debug => foo_debug
962 // /path/bar _debug => /path/bar_debug
963 // /path/bar.A.dylib _debug => /path/bar.A_debug.dylib
964 //
965 void ImageLoader::addSuffix(const char* path, const char* suffix, char* result)
966 {
967 strcpy(result, path);
968
969 char* start = strrchr(result, '/');
970 if ( start != NULL )
971 start++;
972 else
973 start = result;
974
975 char* dot = strrchr(start, '.');
976 if ( dot != NULL ) {
977 strcpy(dot, suffix);
978 strcat(&dot[strlen(suffix)], &path[dot-result]);
979 }
980 else {
981 strcat(result, suffix);
982 }
983 }
984
985
986 void Segment::map(int fd, uint64_t offsetInFatWrapper, intptr_t slide, const ImageLoader* image, const ImageLoader::LinkContext& context)
987 {
988 vm_offset_t fileOffset = this->getFileOffset() + offsetInFatWrapper;
989 vm_size_t size = this->getFileSize();
990 void* requestedLoadAddress = (void*)(this->getPreferredLoadAddress() + slide);
991 int protection = 0;
992 if ( !this->unaccessible() ) {
993 if ( this->executable() )
994 protection |= PROT_EXEC;
995 if ( this->readable() )
996 protection |= PROT_READ;
997 if ( this->writeable() )
998 protection |= PROT_WRITE;
999 }
1000 #if __i386__
1001 // initially map __IMPORT segments R/W so dyld can update them
1002 if ( this->readOnlyImportStubs() )
1003 protection |= PROT_WRITE;
1004 #endif
1005 // wholly zero-fill segments have nothing to mmap() in
1006 if ( size > 0 ) {
1007 void* loadAddress = mmap(requestedLoadAddress, size, protection, MAP_FIXED | MAP_PRIVATE, fd, fileOffset);
1008 if ( loadAddress == ((void*)(-1)) )
1009 dyld::throwf("mmap() error %d at address=0x%08lX, size=0x%08lX segment=%s in Segment::map() mapping %s", errno, (uintptr_t)requestedLoadAddress, (uintptr_t)size, this->getName(), image->getPath());
1010 }
1011 // update stats
1012 ++ImageLoader::fgTotalSegmentsMapped;
1013 ImageLoader::fgTotalBytesMapped += size;
1014 if ( context.verboseMapping )
1015 dyld::log("%18s at %p->%p with permissions %c%c%c\n", this->getName(), requestedLoadAddress, (char*)requestedLoadAddress+this->getFileSize()-1,
1016 (protection & PROT_READ) ? 'r' : '.', (protection & PROT_WRITE) ? 'w' : '.', (protection & PROT_EXEC) ? 'x' : '.' );
1017 }
1018
1019 void Segment::map(const void* memoryImage, intptr_t slide, const ImageLoader* image, const ImageLoader::LinkContext& context)
1020 {
1021 vm_address_t loadAddress = this->getPreferredLoadAddress() + slide;
1022 vm_address_t srcAddr = (uintptr_t)memoryImage + this->getFileOffset();
1023 vm_size_t size = this->getFileSize();
1024 kern_return_t r = vm_copy(mach_task_self(), srcAddr, size, loadAddress);
1025 if ( r != KERN_SUCCESS )
1026 throw "can't map segment";
1027
1028 if ( context.verboseMapping )
1029 dyld::log("%18s at %p->%p\n", this->getName(), (char*)loadAddress, (char*)loadAddress+this->getFileSize()-1);
1030 }
1031
1032 void Segment::setPermissions(const ImageLoader::LinkContext& context, const ImageLoader* image)
1033 {
1034 vm_prot_t protection = 0;
1035 if ( !this->unaccessible() ) {
1036 if ( this->executable() )
1037 protection |= VM_PROT_EXECUTE;
1038 if ( this->readable() )
1039 protection |= VM_PROT_READ;
1040 if ( this->writeable() )
1041 protection |= VM_PROT_WRITE;
1042 }
1043 vm_address_t addr = this->getActualLoadAddress(image);
1044 vm_size_t size = this->getSize();
1045 const bool setCurrentPermissions = false;
1046 kern_return_t r = vm_protect(mach_task_self(), addr, size, setCurrentPermissions, protection);
1047 if ( r != KERN_SUCCESS )
1048 throw "can't set vm permissions for mapped segment";
1049 if ( context.verboseMapping ) {
1050 dyld::log("%18s at %p->%p altered permissions to %c%c%c\n", this->getName(), (char*)addr, (char*)addr+this->getFileSize()-1,
1051 (protection & PROT_READ) ? 'r' : '.', (protection & PROT_WRITE) ? 'w' : '.', (protection & PROT_EXEC) ? 'x' : '.' );
1052 }
1053 }
1054
1055 void Segment::tempWritable(const ImageLoader::LinkContext& context, const ImageLoader* image)
1056 {
1057 vm_address_t addr = this->getActualLoadAddress(image);
1058 vm_size_t size = this->getSize();
1059 const bool setCurrentPermissions = false;
1060 vm_prot_t protection = VM_PROT_WRITE | VM_PROT_READ;
1061 if ( this->executable() )
1062 protection |= VM_PROT_EXECUTE;
1063 kern_return_t r = vm_protect(mach_task_self(), addr, size, setCurrentPermissions, protection);
1064 if ( r != KERN_SUCCESS )
1065 throw "can't set vm permissions for mapped segment";
1066 if ( context.verboseMapping ) {
1067 dyld::log("%18s at %p->%p altered permissions to %c%c%c\n", this->getName(), (char*)addr, (char*)addr+this->getFileSize()-1,
1068 (protection & PROT_READ) ? 'r' : '.', (protection & PROT_WRITE) ? 'w' : '.', (protection & PROT_EXEC) ? 'x' : '.' );
1069 }
1070 }
1071
1072
1073 bool Segment::hasTrailingZeroFill()
1074 {
1075 return ( this->writeable() && (this->getSize() > this->getFileSize()) );
1076 }
1077
1078
1079 uintptr_t Segment::reserveAnAddressRange(size_t length, const ImageLoader::LinkContext& context)
1080 {
1081 vm_address_t addr = 0;
1082 vm_size_t size = length;
1083 // in PIE programs, load initial dylibs after main executable so they don't have fixed addresses either
1084 if ( fgNextPIEDylibAddress != 0 ) {
1085 addr = fgNextPIEDylibAddress + (arc4random() & 0x3) * 4096; // add small random padding between dylibs
1086 kern_return_t r = vm_allocate(mach_task_self(), &addr, size, VM_FLAGS_FIXED);
1087 if ( r == KERN_SUCCESS ) {
1088 fgNextPIEDylibAddress = addr + size;
1089 return addr;
1090 }
1091 fgNextPIEDylibAddress = 0;
1092 }
1093 kern_return_t r = vm_allocate(mach_task_self(), &addr, size, VM_FLAGS_ANYWHERE);
1094 if ( r != KERN_SUCCESS )
1095 throw "out of address space";
1096
1097 return addr;
1098 }
1099
1100 bool Segment::reserveAddressRange(uintptr_t start, size_t length)
1101 {
1102 vm_address_t addr = start;
1103 vm_size_t size = length;
1104 kern_return_t r = vm_allocate(mach_task_self(), &addr, size, false /*only this range*/);
1105 if ( r != KERN_SUCCESS )
1106 return false;
1107 return true;
1108 }
1109
1110
1111