dyld-551.3.tar.gz
[apple/dyld.git] / dyld3 / LaunchCacheReader.cpp
1 /*
2 * Copyright (c) 2017 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24
25 #include <stdint.h>
26 #include <assert.h>
27 #include <uuid/uuid.h>
28 #include <unistd.h>
29 #include <limits.h>
30
31 #include "LaunchCacheFormat.h"
32 #include "LaunchCache.h"
33 #include "MachOParser.h"
34 #include "DyldCacheParser.h"
35
36 namespace dyld {
37 extern void log(const char* format, ...) __attribute__((format(printf, 1, 2)));
38 }
39
40 namespace dyld3 {
41 namespace launch_cache {
42
43 static uintptr_t read_uleb128(const uint8_t*& p, const uint8_t* end)
44 {
45 uint64_t result = 0;
46 int bit = 0;
47 do {
48 if (p == end) {
49 assert("malformed uleb128");
50 break;
51 }
52 uint64_t slice = *p & 0x7f;
53
54 if (bit > 63) {
55 assert("uleb128 too big for uint64");
56 break;
57 }
58 else {
59 result |= (slice << bit);
60 bit += 7;
61 }
62 } while (*p++ & 0x80);
63 return (uintptr_t)result;
64 }
65
66
67 bool MemoryRange::contains(const MemoryRange& other) const
68 {
69 if ( this->address > other.address )
70 return false;
71 const uint8_t* thisEnd = (uint8_t*)address + size;
72 const uint8_t* otherEnd = (uint8_t*)other.address + other.size;
73 return (thisEnd >= otherEnd);
74 }
75
76 bool MemoryRange::intersects(const MemoryRange& other) const
77 {
78 const uint8_t* thisEnd = (uint8_t*)address + size;
79 const uint8_t* otherEnd = (uint8_t*)other.address + other.size;
80 if ( otherEnd < this->address )
81 return false;
82 return ( other.address < thisEnd );
83 }
84
85
86 //////////////////////////// SlowLoadSet ////////////////////////////////////////
87
88 bool SlowLoadSet::contains(const BinaryImageData* image)
89 {
90 for (const BinaryImageData** p=_start; p < _current; ++p) {
91 if ( *p == image )
92 return true;
93 }
94 return false;
95 }
96
97 bool SlowLoadSet::add(const BinaryImageData* image)
98 {
99 if ( _current < _end ) {
100 *_current++ = image;
101 return true;
102 }
103 return false;
104 }
105
106 void SlowLoadSet::forEach(void (^handler)(const BinaryImageData*))
107 {
108 for (const BinaryImageData** p=_start; p < _current; ++p) {
109 handler(*p);
110 }
111 }
112
113 void SlowLoadSet::forEach(void (^handler)(const BinaryImageData*, bool& stop))
114 {
115 bool stop = false;
116 for (const BinaryImageData** p=_start; p < _current; ++p) {
117 handler(*p, stop);
118 if ( stop )
119 break;
120 }
121 }
122
123
124 long SlowLoadSet::count() const
125 {
126 return (_current - _start);
127 }
128
129
130 //////////////////////////// TargetSymbolValue ////////////////////////////////////////
131
132
133 #if DYLD_IN_PROCESS
134
135 uintptr_t TargetSymbolValue::resolveTarget(Diagnostics& diag, const ImageGroup& inGroup, LoadedImages& images) const
136 {
137 // this block is only used if findExportedSymbol() needs to trace re-exported dylibs to find a symbol
138 MachOParser::DependentFinder reExportFollower = ^(uint32_t depIndex, const char* depLoadPath, void* extra, const mach_header** foundMH, void** foundExtra) {
139 *foundMH = nullptr;
140 images.forEachImage(^(uint32_t idx, const BinaryImageData* binImage, const mach_header* mh, bool& stop) {
141 Image anImage(binImage);
142 if ( strcmp(depLoadPath, anImage.path()) == 0 ) {
143 *foundMH = mh;
144 stop = true;
145 }
146 });
147 return (*foundMH != nullptr);
148 };
149
150 uintptr_t offset;
151 switch ( _data.sharedCache.kind ) {
152
153 case TargetSymbolValue::kindSharedCache:
154 assert(_data.sharedCache.offsetIntoCache != 0);
155 return (uintptr_t)(images.dyldCacheLoadAddressForImage() + _data.sharedCache.offsetIntoCache);
156
157 case TargetSymbolValue::kindAbsolute:
158 offset = (uintptr_t)_data.absolute.value;
159 // sign extend 42 bit value
160 if ( offset & 0x2000000000000000ULL )
161 offset |= 0xC000000000000000ULL;
162 return offset;
163
164 case TargetSymbolValue::kindGroup: {
165 uint32_t groupNum = _data.group.isIndirectGroup ? inGroup.indirectGroupNum(_data.group.groupNum) : _data.group.groupNum;
166 uintptr_t targetImageLoadAddress = (uintptr_t)(images.loadAddressFromGroupAndIndex(groupNum, _data.group.indexInGroup));
167 if ( targetImageLoadAddress == 0 )
168 diag.error("image for groupNum=%d, indexInGroup=%d not found", groupNum, _data.group.indexInGroup);
169 offset = (uintptr_t)_data.group.offsetInImage;
170 // sign extend 42 bit offset
171 if ( offset & 0x0000020000000000ULL )
172 offset |= 0xFFFFFC0000000000ULL;
173 return targetImageLoadAddress + offset;
174 }
175
176 case TargetSymbolValue::kindDynamicGroup: {
177 const char* imagePath = inGroup.stringFromPool(_data.dynamicGroup.imagePathOffset);
178 const char* symbolName = inGroup.stringFromPool(_data.dynamicGroup.symbolNameOffset);
179 __block uintptr_t result = 0;
180 __block bool found = false;
181 if ( strcmp(imagePath, "@flat") == 0 ) {
182 // search all images in load order
183 images.forEachImage(^(uint32_t idx, const BinaryImageData* binImage, const mach_header* mh, bool& stop) {
184 Diagnostics findSymbolDiag;
185 dyld3::MachOParser parser(mh);
186 dyld3::MachOParser::FoundSymbol foundInfo;
187 if ( parser.findExportedSymbol(findSymbolDiag, symbolName, nullptr, foundInfo, ^(uint32_t, const char* depLoadPath, void*, const mach_header** foundMH, void**) {
188 // <rdar://problem/31921090> need to follow re-exported symbols to support libc renamed and reexported symbols
189 *foundMH = nullptr;
190 images.forEachImage(^(uint32_t innerIndex, const BinaryImageData* innerBinImage, const mach_header* innerMH, bool& innerStop) {
191 Image innerImage(innerBinImage);
192 if ( strcmp(depLoadPath, innerImage.path()) == 0 ) {
193 *foundMH = innerMH;
194 innerStop = true;
195 }
196 });
197 return (*foundMH != nullptr);
198 }) ) {
199 switch (foundInfo.kind) {
200 case MachOParser::FoundSymbol::Kind::headerOffset:
201 case MachOParser::FoundSymbol::Kind::resolverOffset:
202 result = ((uintptr_t)(foundInfo.foundInDylib) + (uintptr_t)foundInfo.value);
203 break;
204 case MachOParser::FoundSymbol::Kind::absolute:
205 result = (uintptr_t)foundInfo.value;
206 break;
207 }
208 images.setAsNeverUnload(idx);
209 found = true;
210 stop = true;
211 }
212 });
213 // <rdar://problem/31944092> bind unfound flat symbols to NULL to support lazy binding semantics
214 if ( !found ) {
215 result = 0;
216 found = true;
217 }
218 }
219 else if ( strcmp(imagePath, "@main") == 0 ) {
220 // search only main executable
221 images.forEachImage(^(uint32_t idx, const BinaryImageData* binImage, const mach_header* mh, bool& stop) {
222 if ( mh->filetype == MH_EXECUTE ) {
223 Diagnostics findSymbolDiag;
224 dyld3::MachOParser parser(mh);
225 dyld3::MachOParser::FoundSymbol foundInfo;
226 if ( parser.findExportedSymbol(findSymbolDiag, symbolName, nullptr, foundInfo, nullptr) ) {
227 switch (foundInfo.kind) {
228 case MachOParser::FoundSymbol::Kind::headerOffset:
229 case MachOParser::FoundSymbol::Kind::resolverOffset:
230 result = ((uintptr_t)(foundInfo.foundInDylib) + (uintptr_t)foundInfo.value);
231 break;
232 case MachOParser::FoundSymbol::Kind::absolute:
233 result = (uintptr_t)foundInfo.value;
234 break;
235 }
236 found = true;
237 stop = true;
238 }
239 }
240 });
241 }
242 else if ( strcmp(imagePath, "@weak_def") == 0 ) {
243 // search images with weak definitions in load order
244 images.forEachImage(^(uint32_t idx, const BinaryImageData* binImage, const mach_header* mh, bool& stop) {
245 Image anImage(binImage);
246 if ( anImage.hasWeakDefs() ) {
247 Diagnostics findSymbolDiag;
248 dyld3::MachOParser parser(mh);
249 dyld3::MachOParser::FoundSymbol foundInfo;
250 if ( parser.findExportedSymbol(findSymbolDiag, symbolName, nullptr, foundInfo, nullptr) ) {
251 switch (foundInfo.kind) {
252 case MachOParser::FoundSymbol::Kind::headerOffset:
253 case MachOParser::FoundSymbol::Kind::resolverOffset:
254 result = ((uintptr_t)(foundInfo.foundInDylib) + (uintptr_t)foundInfo.value);
255 break;
256 case MachOParser::FoundSymbol::Kind::absolute:
257 result = (uintptr_t)foundInfo.value;
258 break;
259 }
260 found = true;
261 images.setAsNeverUnload(idx);
262 stop = true;
263 }
264 }
265 });
266 }
267 else {
268 // search only image the matches supplied path
269 images.forEachImage(^(uint32_t idx, const BinaryImageData* binImage, const mach_header* mh, bool& stop) {
270 Image anImage(binImage);
271 if ( strcmp(anImage.path(), imagePath) == 0 ) {
272 Diagnostics findSymbolDiag;
273 dyld3::MachOParser parser(mh);
274 dyld3::MachOParser::FoundSymbol foundInfo;
275 if ( parser.findExportedSymbol(findSymbolDiag, symbolName, nullptr, foundInfo, reExportFollower) ) {
276 switch (foundInfo.kind) {
277 case MachOParser::FoundSymbol::Kind::headerOffset:
278 case MachOParser::FoundSymbol::Kind::resolverOffset:
279 result = ((uintptr_t)(foundInfo.foundInDylib) + (uintptr_t)foundInfo.value);
280 break;
281 case MachOParser::FoundSymbol::Kind::absolute:
282 result = (uintptr_t)foundInfo.value;
283 break;
284 }
285 found = true;
286 stop = true;
287 }
288 }
289 });
290 }
291 if ( found )
292 return result;
293 if ( _data.dynamicGroup.weakImport )
294 return 0;
295 diag.error("dynamic symbol '%s' not found for %s", symbolName, imagePath);
296 return 0;
297 }
298 }
299 assert(0 && "resolveTarget() not reachable");
300 }
301
302 #else
303
304 TargetSymbolValue::TargetSymbolValue()
305 {
306 _data.raw = 0;
307 }
308
309 TargetSymbolValue TargetSymbolValue::makeInvalid()
310 {
311 return TargetSymbolValue();
312 }
313
314 TargetSymbolValue TargetSymbolValue::makeSharedCacheOffset(uint32_t offset)
315 {
316 TargetSymbolValue t;
317 t._data.sharedCache.kind = kindSharedCache;
318 t._data.sharedCache.offsetIntoCache = offset;
319 return t;
320 }
321
322 TargetSymbolValue TargetSymbolValue::makeAbsolute(uint64_t value)
323 {
324 TargetSymbolValue t;
325 t._data.absolute.kind = kindAbsolute;
326 t._data.absolute.value = value;
327 return t;
328 }
329
330 TargetSymbolValue TargetSymbolValue::makeGroupValue(uint32_t groupIndex, uint32_t imageIndexInGroup, uint64_t offsetInImage, bool isIndirectGroupNum)
331 {
332 assert(groupIndex != 0 || isIndirectGroupNum);
333 assert(groupIndex < 128);
334 assert(imageIndexInGroup < 4096);
335 TargetSymbolValue t;
336 t._data.group.kind = kindGroup;
337 t._data.group.isIndirectGroup = isIndirectGroupNum;
338 t._data.group.groupNum = groupIndex;
339 t._data.group.indexInGroup = imageIndexInGroup;
340 t._data.group.offsetInImage = offsetInImage;
341 return t;
342 }
343
344 TargetSymbolValue TargetSymbolValue::makeDynamicGroupValue(uint32_t imagePathPoolOffset, uint32_t imageSymbolPoolOffset, bool weakImport)
345 {
346 TargetSymbolValue t;
347 t._data.dynamicGroup.kind = kindDynamicGroup;
348 t._data.dynamicGroup.weakImport = weakImport;
349 t._data.dynamicGroup.imagePathOffset = imagePathPoolOffset;
350 t._data.dynamicGroup.symbolNameOffset = imageSymbolPoolOffset;
351 return t;
352 }
353
354 bool TargetSymbolValue::isSharedCacheTarget(uint64_t& offsetInCache) const
355 {
356 if ( _data.sharedCache.kind != kindSharedCache )
357 return false;
358 offsetInCache = _data.sharedCache.offsetIntoCache;
359 return true;
360 }
361
362 bool TargetSymbolValue::isGroupImageTarget(uint32_t& groupNum, uint32_t& indexInGroup, uint64_t& offsetInImage) const
363 {
364 if ( _data.sharedCache.kind != kindGroup )
365 return false;
366 // This is only used for interposing, so refuse to allow indirect for group 2
367 assert(!_data.group.isIndirectGroup);
368 groupNum = _data.group.groupNum;
369 indexInGroup = _data.group.indexInGroup;
370 offsetInImage = _data.group.offsetInImage;
371 return true;
372 }
373
374 bool TargetSymbolValue::isInvalid() const
375 {
376 return (_data.raw == 0);
377 }
378
379 static std::string hex8(uint64_t value) {
380 char buff[64];
381 sprintf(buff, "0x%08llX", value);
382 return buff;
383 }
384
385 static std::string decimal(uint64_t value) {
386 char buff[64];
387 sprintf(buff, "%llu", value);
388 return buff;
389 }
390
391 std::string TargetSymbolValue::asString(ImageGroup group) const
392 {
393 int64_t offset;
394 switch ( _data.sharedCache.kind ) {
395 case kindSharedCache:
396 if ( _data.sharedCache.offsetIntoCache == 0 )
397 return "{invalid target}";
398 else
399 return "{cache+" + hex8(_data.sharedCache.offsetIntoCache) + "}";
400 case kindAbsolute:
401 offset = (uintptr_t)_data.absolute.value;
402 // sign extend 42 bit value
403 if ( offset & 0x2000000000000000ULL )
404 offset |= 0xC000000000000000ULL;
405 return "{absolute:" + hex8(offset) + "}";
406 case kindGroup:
407 offset = _data.group.offsetInImage;
408 // sign extend 42 bit offset
409 if ( offset & 0x0000020000000000ULL )
410 offset |= 0xFFFFFC0000000000ULL;
411 if ( _data.group.groupNum == 1 )
412 return "{otherDylib[" + decimal(_data.group.indexInGroup) +"]+" + hex8(offset) + "}";
413 if ( _data.group.groupNum == 2 )
414 return "{closure[" + decimal(_data.group.indexInGroup) +"]+" + hex8(offset) + "}";
415 else {
416 uint32_t groupNum = _data.group.isIndirectGroup ? group.indirectGroupNum(_data.group.groupNum) : _data.group.groupNum;
417 return "{dlopen-group-" + decimal(groupNum-2) + "[" + decimal(_data.group.indexInGroup) +"]+" + hex8(offset) + "}";
418 }
419 case kindDynamicGroup:
420 return "{dynamic image='" + std::string(group.stringFromPool(_data.dynamicGroup.imagePathOffset))
421 + "' symbol='" + std::string(group.stringFromPool(_data.dynamicGroup.symbolNameOffset)) + "'}";
422 }
423 assert(0 && "unreachable");
424 return "xx";
425 }
426
427 #endif
428
429 //////////////////////////// ImageRef ////////////////////////////////////////
430
431 binary_format::ImageRef binary_format::ImageRef::weakImportMissing()
432 {
433 ImageRef missing(0xFFFFFFFF);
434 return missing;
435 }
436
437
438
439 //////////////////////////// Closure ////////////////////////////////////////
440
441 Closure::Closure(const binary_format::Closure* closure)
442 : _binaryData(closure)
443 {
444 assert(closure->magic == binary_format::Closure::magicV1);
445 }
446
447 size_t Closure::size() const
448 {
449 return _binaryData->stringPoolOffset + _binaryData->stringPoolSize;
450 }
451
452 const ImageGroup Closure::group() const
453 {
454 return ImageGroup(&_binaryData->group);
455 }
456
457 void Closure::forEachEnvVar(void (^handler)(const char* keyEqualValue, bool& stop)) const
458 {
459 const uint32_t* envVarStringOffsets = (uint32_t*)((uint8_t*)_binaryData + _binaryData->dyldEnvVarsOffset);
460 const char* stringPool = (char*)_binaryData + _binaryData->stringPoolOffset;
461 bool stop = false;
462 for (uint32_t i=0; i < _binaryData->dyldEnvVarsCount; ++i) {
463 handler(&stringPool[envVarStringOffsets[i]], stop);
464 if ( stop )
465 break;
466 }
467 }
468
469 void Closure::forEachMustBeMissingFile(void (^handler)(const char* path, bool& stop)) const
470 {
471 const uint16_t* offsets = (uint16_t*)((uint8_t*)_binaryData + _binaryData->missingFileComponentsOffset);
472 if ( *offsets == 0 )
473 return;
474 const char* stringPool = (char*)_binaryData + _binaryData->stringPoolOffset;
475 bool stop = false;
476 while ( !stop ) {
477 char path[PATH_MAX];
478 path[0] = '\0';
479 while ( *offsets != 0 ) {
480 const char* component = &stringPool[*offsets++];
481 strlcat(path, "/", PATH_MAX);
482 strlcat(path, component, PATH_MAX);
483 }
484 handler(path, stop);
485 ++offsets; // move to next path
486 if ( *offsets == 0 ) // if no next path, then end of list of strings
487 stop = true;
488 }
489 }
490
491 const uuid_t* Closure::dyldCacheUUID() const
492 {
493 return &(_binaryData->dyldCacheUUID);
494 }
495
496
497 const uint8_t* Closure::cdHash() const
498 {
499 return _binaryData->mainExecutableCdHash;
500 }
501
502
503 uint32_t Closure::initialImageCount() const
504 {
505 return _binaryData->initialImageCount;
506 }
507
508
509 uint32_t Closure::mainExecutableImageIndex() const
510 {
511 return _binaryData->mainExecutableIndexInGroup;
512 }
513
514
515 uint32_t Closure::mainExecutableEntryOffset() const
516 {
517 return _binaryData->mainExecutableEntryOffset;
518 }
519
520 bool Closure::mainExecutableUsesCRT() const
521 {
522 return _binaryData->usesCRT;
523 }
524
525 bool Closure::isRestricted() const
526 {
527 return _binaryData->isRestricted;
528 }
529
530 bool Closure::usesLibraryValidation() const
531 {
532 return _binaryData->usesLibraryValidation;
533 }
534
535 uint32_t Closure::libdyldVectorOffset() const
536 {
537 return _binaryData->libdyldVectorOffset;
538 }
539
540 const BinaryImageData* Closure::libSystem(const ImageGroupList& groups)
541 {
542 return Image::resolveImageRef(groups, _binaryData->libSystemRef).binaryData();
543 }
544
545 const BinaryImageData* Closure::libDyld(const ImageGroupList& groups)
546 {
547 return Image::resolveImageRef(groups, _binaryData->libDyldRef).binaryData();
548 }
549
550
551 //////////////////////////// ImageGroup ////////////////////////////////////////
552
553 size_t ImageGroup::size() const
554 {
555 return (_binaryData->stringsPoolOffset + _binaryData->stringsPoolSize + 3) & (-4);
556 }
557
558 uint32_t ImageGroup::groupNum() const
559 {
560 return _binaryData->groupNum;
561 }
562
563 bool ImageGroup::dylibsExpectedOnDisk() const
564 {
565 return _binaryData->dylibsExpectedOnDisk;
566 }
567
568 uint32_t ImageGroup::imageCount() const
569 {
570 return _binaryData->imagesPoolCount;
571 }
572
573 const binary_format::Image* ImageGroup::imageBinary(uint32_t index) const
574 {
575 assert(index <_binaryData->imagesPoolCount);
576 return (binary_format::Image*)((char*)_binaryData + _binaryData->imagesPoolOffset + (index * _binaryData->imagesEntrySize));
577 }
578
579
580 const Image ImageGroup::image(uint32_t index) const
581 {
582 return Image(imageBinary(index));
583 }
584
585 uint32_t ImageGroup::indexInGroup(const binary_format::Image* img) const
586 {
587 long delta = (char*)img - ((char*)_binaryData + _binaryData->imagesPoolOffset);
588 uint32_t index = (uint32_t)(delta /_binaryData->imagesEntrySize);
589 assert(image(index)._binaryData == img);
590 return index;
591 }
592
593 const binary_format::Image* ImageGroup::findImageByPath(const char* path, uint32_t& foundIndex) const
594 {
595 // check path of each image in group
596 uint32_t targetHash = hashFunction(path);
597 const uint8_t* p = (uint8_t*)_binaryData + _binaryData->imagesPoolOffset;
598 for (uint32_t i=0; i < _binaryData->imagesPoolCount; ++i) {
599 const binary_format::Image* binImage = (binary_format::Image*)p;
600 if ( binImage->pathHash == targetHash ) {
601 Image img(binImage);
602 if ( !img.isInvalid() && (strcmp(img.path(), path) == 0) ) {
603 foundIndex = i;
604 return binImage;
605 }
606 }
607 p += _binaryData->imagesEntrySize;
608 }
609 // check each alias
610 const binary_format::AliasEntry* aliasEntries = (binary_format::AliasEntry*)((uint8_t*)_binaryData + _binaryData->imageAliasOffset);
611 for (uint32_t i=0; i < _binaryData->imageAliasCount; ++i) {
612 const char* aliasPath = stringFromPool(aliasEntries[i].aliasOffsetInStringPool);
613 if ( aliasEntries[i].aliasHash == targetHash ) {
614 if ( strcmp(aliasPath, path) == 0 ) {
615 Image img = image(aliasEntries[i].imageIndexInGroup);
616 if ( !img.isInvalid() ) {
617 foundIndex = aliasEntries[i].imageIndexInGroup;
618 return img.binaryData();
619 }
620 }
621 }
622 }
623 return nullptr;
624 }
625
626 const binary_format::Image* ImageGroup::findImageByCacheOffset(size_t cacheVmOffset, uint32_t& mhCacheOffset, uint8_t& foundPermissions) const
627 {
628 assert(groupNum() == 0);
629
630 const binary_format::DyldCacheSegment* cacheSegs = (binary_format::DyldCacheSegment*)segmentPool(0);
631 const binary_format::Image* image = (binary_format::Image*)((char*)_binaryData + _binaryData->imagesPoolOffset);
632 // most address lookups are in TEXT, so just search first segment in first pass
633 for (uint32_t imageIndex=0; imageIndex < _binaryData->imagesPoolCount; ++imageIndex) {
634 const binary_format::DyldCacheSegment* segInfo = &cacheSegs[image->segmentsArrayStartIndex];
635 if ( (cacheVmOffset >= segInfo->cacheOffset) && (cacheVmOffset < (segInfo->cacheOffset + segInfo->size)) ) {
636 mhCacheOffset = segInfo->cacheOffset;
637 foundPermissions = segInfo->permissions;
638 return image;
639 }
640 image = (binary_format::Image*)((char*)image + _binaryData->imagesEntrySize);
641 }
642 // second pass, skip TEXT segment
643 image = (binary_format::Image*)((char*)_binaryData + _binaryData->imagesPoolOffset);
644 for (uint32_t imageIndex=0; imageIndex < _binaryData->imagesPoolCount; ++imageIndex) {
645 for (uint32_t segIndex=1; segIndex < image->segmentsArrayCount; ++segIndex) {
646 const binary_format::DyldCacheSegment* segInfo = &cacheSegs[image->segmentsArrayStartIndex+segIndex];
647 if ( (cacheVmOffset >= segInfo->cacheOffset) && (cacheVmOffset < (segInfo->cacheOffset + segInfo->size)) ) {
648 mhCacheOffset = cacheSegs[image->segmentsArrayStartIndex].cacheOffset;
649 foundPermissions = segInfo->permissions;
650 return image;
651 }
652 }
653 image = (binary_format::Image*)((char*)image + _binaryData->imagesEntrySize);
654 }
655 return nullptr;
656 }
657
658 void ImageGroup::forEachAliasOf(uint32_t imageIndex, void (^handler)(const char* aliasPath, uint32_t aliasPathHash, bool& stop)) const
659 {
660 bool stop = false;
661 const binary_format::AliasEntry* aliasEntries = (binary_format::AliasEntry*)((uint8_t*)_binaryData + _binaryData->imageAliasOffset);
662 for (uint32_t i=0; i < _binaryData->imageAliasCount; ++i) {
663 if ( aliasEntries[i].imageIndexInGroup == imageIndex ) {
664 const char* aliasPath = stringFromPool(aliasEntries[i].aliasOffsetInStringPool);
665 handler(aliasPath, aliasEntries[i].aliasHash, stop);
666 if ( stop )
667 break;
668 }
669 }
670 }
671
672 const char* ImageGroup::stringPool() const
673 {
674 return (char*)_binaryData + _binaryData->stringsPoolOffset;
675 }
676
677 const char* ImageGroup::stringFromPool(uint32_t offset) const
678 {
679 assert(offset < _binaryData->stringsPoolSize);
680 return (char*)_binaryData + _binaryData->stringsPoolOffset + offset;
681 }
682
683 uint32_t ImageGroup::stringPoolSize() const
684 {
685 return _binaryData->stringsPoolSize;;
686 }
687
688 binary_format::ImageRef ImageGroup::dependentPool(uint32_t index) const
689 {
690 assert(index < _binaryData->dependentsPoolCount);
691 const binary_format::ImageRef* depArray = (binary_format::ImageRef*)((char*)_binaryData + _binaryData->dependentsPoolOffset);
692 return depArray[index];
693 }
694
695 const uint64_t* ImageGroup::segmentPool(uint32_t index) const
696 {
697 assert(index < _binaryData->segmentsPoolCount);
698 const uint64_t* segArray = (uint64_t*)((char*)_binaryData + _binaryData->segmentsPoolOffset);
699 return &segArray[index];
700 }
701
702
703 const uint32_t* ImageGroup::initializerOffsetsPool() const
704 {
705 return (uint32_t*)((char*)_binaryData + _binaryData->intializerOffsetPoolOffset);
706 }
707
708 const uint32_t ImageGroup::initializerOffsetsCount() const
709 {
710 return _binaryData->intializerOffsetPoolCount;
711 }
712
713 const binary_format::ImageRef* ImageGroup::intializerListPool() const
714 {
715 return (binary_format::ImageRef*)((char*)_binaryData + _binaryData->intializerListPoolOffset);
716 }
717
718 const uint32_t ImageGroup::intializerListPoolCount() const
719 {
720 return _binaryData->intializerListPoolCount;
721 }
722
723 const binary_format::AllFixupsBySegment* ImageGroup::fixUps(uint32_t offset) const
724 {
725 return (binary_format::AllFixupsBySegment*)((char*)_binaryData + _binaryData->fixupsOffset + offset);
726 }
727
728 const TargetSymbolValue* ImageGroup::targetValuesArray() const
729 {
730 return (TargetSymbolValue*)((char*)_binaryData + _binaryData->targetsOffset);
731 }
732
733 uint32_t ImageGroup::targetValuesCount() const
734 {
735 return _binaryData->targetsPoolCount;
736 }
737
738
739 const uint32_t* ImageGroup::dofOffsetsPool() const
740 {
741 return (uint32_t*)((char*)_binaryData + _binaryData->dofOffsetPoolOffset);
742 }
743
744 const uint32_t ImageGroup::dofOffsetsCount() const
745 {
746 return _binaryData->dofOffsetPoolCount;
747 }
748
749
750 const uint32_t* ImageGroup::indirectGroupNumsPool() const
751 {
752 return (uint32_t*)((char*)_binaryData + _binaryData->indirectGroupNumPoolOffset);
753 }
754
755 const uint32_t ImageGroup::indirectGroupNumsCount() const
756 {
757 return _binaryData->indirectGroupNumPoolCount;
758 }
759
760 uint32_t ImageGroup::indirectGroupNum(uint32_t offset) const
761 {
762 assert(offset < _binaryData->indirectGroupNumPoolCount);
763 return indirectGroupNumsPool()[offset];
764 }
765
766 uint32_t ImageGroup::hashFunction(const char* str)
767 {
768 uint32_t h = 0;
769 for (const char* s=str; *s != '\0'; ++s)
770 h = h*5 + *s;
771 return h;
772 }
773
774
775 void ImageGroup::forEachDyldCachePatch(uint32_t patchTargetIndex, uint32_t cacheDataVmOffset, void (^handler)(uint32_t targetCacheOffset, uint32_t usePointersCacheOffset, bool hasAddend, bool& stop)) const
776 {
777 assert(_binaryData->imagesEntrySize == sizeof(binary_format::CachedImage) && "only callable on group-0 in shared cache");
778 assert(patchTargetIndex < _binaryData->cachePatchTableCount);
779 const binary_format::PatchTable* patches = (binary_format::PatchTable*)((char*)_binaryData + _binaryData->cachePatchTableOffset);
780 uint32_t offsetsIndex = patches[patchTargetIndex].offsetsStartIndex;
781 uint32_t targetCacheOffset = patches[patchTargetIndex].targetCacheOffset;
782 const binary_format::PatchOffset* patchLocationOffsets = (binary_format::PatchOffset*)((char*)_binaryData + _binaryData->cachePatchOffsetsOffset);
783 bool stop = false;
784 while ( !stop ) {
785 assert(offsetsIndex < _binaryData->cachePatchOffsetsCount);
786 binary_format::PatchOffset entry = patchLocationOffsets[offsetsIndex];
787 ++offsetsIndex;
788 handler(targetCacheOffset, cacheDataVmOffset+entry.dataRegionOffset, entry.hasAddend, stop);
789 if ( entry.last )
790 stop = true;
791 }
792 }
793
794 void ImageGroup::forEachImageRefOverride(void (^handler)(binary_format::ImageRef standardDylibRef, binary_format::ImageRef overrideDylibRef, bool& stop)) const
795 {
796 bool stop = false;
797 const binary_format::ImageRefOverride* entries = (binary_format::ImageRefOverride*)((char*)_binaryData + _binaryData->imageOverrideTableOffset);
798 for (uint32_t i=0; (i < _binaryData->imageOverrideTableCount) && !stop; ++i) {
799 handler(entries[i].standardDylib, entries[i].overrideDylib, stop);
800 }
801 }
802
803 void ImageGroup::forEachImageRefOverride(const ImageGroupList& groupList, void (^handler)(Image standardDylib, Image overrideDylib, bool& stop)) const
804 {
805 forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef, binary_format::ImageRef overrideDylibRef, bool& stop) {
806 Image standardDylib = Image::resolveImageRef(groupList, standardDylibRef, false);
807 Image overrideDylib = Image::resolveImageRef(groupList, overrideDylibRef, false);
808 handler(standardDylib, overrideDylib, stop);
809 });
810 }
811
812
813 #if DYLD_IN_PROCESS
814
815 void ImageGroup::forEachDyldCachePatchLocation(const void* dyldCacheLoadAddress, uint32_t patchTargetIndex, void (^handler)(uintptr_t* locationToPatch, uintptr_t addend, bool&)) const
816 {
817 DyldCacheParser cacheParser((DyldSharedCache*)dyldCacheLoadAddress, false);
818 uint32_t cacheDataVmOffset = (uint32_t)cacheParser.dataRegionRuntimeVmOffset();
819 forEachDyldCachePatch(patchTargetIndex, cacheDataVmOffset, ^(uint32_t targetCacheOffset, uint32_t usePointersCacheOffset, bool hasAddend, bool& stop) {
820 uintptr_t addend = 0;
821 uintptr_t* fixupLoc = (uintptr_t*)((char*)dyldCacheLoadAddress + usePointersCacheOffset);
822 if ( hasAddend ) {
823 uintptr_t currentValue = *fixupLoc;
824 uintptr_t expectedValue = (uintptr_t)dyldCacheLoadAddress + targetCacheOffset;
825 uintptr_t delta = currentValue - expectedValue;
826 assert(delta < 32);
827 addend = delta;
828 }
829 handler(fixupLoc, addend, stop);
830 });
831 }
832
833 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler)(uint32_t patchTableIndex, const BinaryImageData* image, uint32_t imageOffset, bool& stop)) const
834 {
835 bool stop = false;
836 const binary_format::DyldCacheOverride* entries = (binary_format::DyldCacheOverride*)((char*)_binaryData + _binaryData->symbolOverrideTableOffset);
837 for (uint32_t i=0; (i < _binaryData->symbolOverrideTableCount) && !stop; ++i) {
838 handler(entries[i].patchTableIndex, imageBinary(entries[i].imageIndex), entries[i].imageOffset, stop);
839 }
840 }
841
842 #else
843
844 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler)(uint32_t patchTableIndex, uint32_t imageIndexInClosure, uint32_t imageOffset, bool& stop)) const
845 {
846 bool stop = false;
847 const binary_format::DyldCacheOverride* entries = (binary_format::DyldCacheOverride*)((char*)_binaryData + _binaryData->symbolOverrideTableOffset);
848 for (uint32_t i=0; (i < _binaryData->symbolOverrideTableCount) && !stop; ++i) {
849 handler(entries[i].patchTableIndex, entries[i].imageIndex, entries[i].imageOffset, stop);
850 }
851 }
852
853 void ImageGroup::forEachDyldCachePatchLocation(const DyldCacheParser& cacheParser, void (^handler)(uint32_t targetCacheOffset, const std::vector<uint32_t>& usesPointersCacheOffsets, bool& stop)) const
854 {
855 uint32_t cacheDataVmOffset = (uint32_t)cacheParser.dataRegionRuntimeVmOffset();
856 __block std::vector<uint32_t> pointerCacheOffsets;
857 bool stop = false;
858 for (uint32_t patchIndex=0; patchIndex < _binaryData->cachePatchTableCount; ++patchIndex) {
859 pointerCacheOffsets.clear();
860 __block uint32_t targetCacheOffset = 0;
861 forEachDyldCachePatch(patchIndex, cacheDataVmOffset, ^(uint32_t targetCacheOff, uint32_t usePointersCacheOffset, bool hasAddend, bool&) {
862 targetCacheOffset = targetCacheOff;
863 pointerCacheOffsets.push_back(usePointersCacheOffset);
864 });
865 std::sort(pointerCacheOffsets.begin(), pointerCacheOffsets.end(), [&](uint32_t a, uint32_t b) { return a < b; });
866 handler(targetCacheOffset, pointerCacheOffsets, stop);
867 if ( stop )
868 break;
869 }
870 }
871
872 bool ImageGroup::hasPatchTableIndex(uint32_t targetCacheOffset, uint32_t& foundIndex) const
873 {
874 const binary_format::PatchTable* patches = (binary_format::PatchTable*)((char*)_binaryData + _binaryData->cachePatchTableOffset);
875 for (uint32_t i=0; i < _binaryData->cachePatchTableCount; ++i) {
876 if ( patches[i].targetCacheOffset == targetCacheOffset ) {
877 foundIndex = i;
878 return true;
879 }
880 }
881 return false;
882 }
883
884 #endif
885
886
887 //////////////////////////// Image ////////////////////////////////////////
888
889
890
891 const ImageGroup Image::group() const
892 {
893 return ImageGroup((binary_format::ImageGroup*)(((char*)_binaryData) + (_binaryData->groupOffset)));
894 }
895
896 uint32_t Image::maxLoadCount() const
897 {
898 return _binaryData->maxLoadCount;
899 }
900
901 const char* Image::path() const
902 {
903 return group().stringFromPool(_binaryData->pathPoolOffset);
904 }
905
906 uint32_t Image::pathHash() const
907 {
908 return _binaryData->pathHash;
909 }
910
911 const char* Image::leafName() const
912 {
913 const char* path = group().stringFromPool(_binaryData->pathPoolOffset);
914 const char* lastSlash = strrchr(path, '/');
915 if ( lastSlash != nullptr )
916 return lastSlash+1;
917 else
918 return path;
919 }
920
921 const uuid_t* Image::uuid() const
922 {
923 return &(_binaryData->uuid);
924 }
925
926 bool Image::isInvalid() const
927 {
928 return (_binaryData == nullptr) || _binaryData->isInvalid;
929 }
930
931 bool Image::hasObjC() const
932 {
933 return _binaryData->hasObjC;
934 }
935
936 bool Image::isBundle() const
937 {
938 return _binaryData->isBundle;
939 }
940
941 bool Image::hasWeakDefs() const
942 {
943 return _binaryData->hasWeakDefs;
944 }
945
946 bool Image::mayHavePlusLoads() const
947 {
948 return _binaryData->mayHavePlusLoads;
949 }
950
951 bool Image::hasTextRelocs() const
952 {
953 return _binaryData->hasTextRelocs;
954 }
955
956 bool Image::neverUnload() const
957 {
958 return _binaryData->neverUnload;
959 }
960
961 bool Image::cwdMustBeThisDir() const
962 {
963 return _binaryData->cwdSameAsThis;
964 }
965
966 bool Image::isPlatformBinary() const
967 {
968 return _binaryData->isPlatformBinary;
969 }
970
971 bool Image::overridableDylib() const
972 {
973 return _binaryData->overridableDylib;
974 }
975
976 void Image::forEachDependentImage(const ImageGroupList& groups, void (^handler)(uint32_t depIndex, Image depImage, LinkKind kind, bool& stop)) const
977 {
978 assert(!_binaryData->isInvalid);
979 binary_format::ImageRef missingRef = binary_format::ImageRef::weakImportMissing();
980 __block bool stop = false;
981 for (uint32_t depIndex=0; (depIndex < _binaryData->dependentsArrayCount) && !stop; ++depIndex) {
982 binary_format::ImageRef ref = group().dependentPool(_binaryData->dependentsArrayStartIndex + depIndex);
983 if ( ref != missingRef ) {
984 Image depImage(resolveImageRef(groups, ref));
985 handler(depIndex, depImage, (LinkKind)ref.kind(), stop);
986 }
987 }
988 }
989
990
991 #if !DYLD_IN_PROCESS
992 bool Image::recurseAllDependentImages(const ImageGroupList& groups, std::unordered_set<const BinaryImageData*>& allDependents) const
993 {
994 if ( isInvalid() )
995 return false;
996 __block bool result = true;
997 forEachDependentImage(groups, ^(uint32_t depIndex, Image depImage, LinkKind kind, bool& stop) {
998 if ( allDependents.count(depImage.binaryData()) == 0 ) {
999 allDependents.insert(depImage.binaryData());
1000 if ( !depImage.recurseAllDependentImages(groups, allDependents) ) {
1001 result = false;
1002 stop = true;
1003 }
1004 }
1005 });
1006 return result;
1007 }
1008 #endif
1009
1010 bool Image::recurseAllDependentImages(const ImageGroupList& groups, SlowLoadSet& allDependents, bool& stopped,
1011 void (^handler)(const dyld3::launch_cache::binary_format::Image* aBinImage, bool& stop)) const
1012 {
1013 __block bool result = true;
1014 // breadth first, add all directly dependent images
1015 const dyld3::launch_cache::binary_format::Image* needToProcessArray[_binaryData->dependentsArrayCount];
1016 memset((void*)needToProcessArray, 0, _binaryData->dependentsArrayCount * sizeof(*needToProcessArray));
1017 const dyld3::launch_cache::binary_format::Image** const needToProcess = needToProcessArray;
1018 forEachDependentImage(groups, ^(uint32_t depIndex, Image depImage, LinkKind kind, bool& stop) {
1019 const dyld3::launch_cache::binary_format::Image* depImageData = depImage.binaryData();
1020 if ( allDependents.contains(depImageData) ) {
1021 needToProcess[depIndex] = nullptr;
1022 }
1023 else {
1024 needToProcess[depIndex] = depImageData;
1025 if ( !allDependents.add(depImageData) ) {
1026 result = false;
1027 stop = true;
1028 return;
1029 }
1030 if (handler) {
1031 handler(depImageData, stop);
1032 if ( stop )
1033 stopped = true;
1034 }
1035 }
1036 });
1037
1038 // recurse on each dependent image
1039 for (int i=0; !stopped && (i < _binaryData->dependentsArrayCount); ++i) {
1040 if ( const dyld3::launch_cache::binary_format::Image* depImageData = needToProcess[i] ) {
1041 Image depImage(depImageData);
1042 if ( !depImage.recurseAllDependentImages(groups, allDependents, stopped, handler) ) {
1043 return false;
1044 }
1045 }
1046 }
1047
1048 return result;
1049 }
1050
1051 bool Image::recurseAllDependentImages(const ImageGroupList& groups, SlowLoadSet& allDependents,
1052 void (^handler)(const dyld3::launch_cache::binary_format::Image* aBinImage, bool& stop)) const
1053 {
1054 bool stopped = false;
1055 return recurseAllDependentImages(groups, allDependents, stopped, handler);
1056 }
1057
1058 void Image::forEachDiskSegment(void (^handler)(uint32_t segIndex, uint32_t fileOffset, uint32_t fileSize, int64_t vmOffset, uint64_t vmSize, uint8_t permissions, bool& stop)) const
1059 {
1060 assert(isDiskImage());
1061 const uint32_t pageSize = (_binaryData->has16KBpages ? 0x4000 : 0x1000);
1062 const uint64_t* rawSegs = group().segmentPool(_binaryData->segmentsArrayStartIndex);
1063 const binary_format::DiskSegment* diskSegs = (binary_format::DiskSegment*)rawSegs;
1064 uint32_t segIndex = 0;
1065 uint32_t fileOffset = 0;
1066 int64_t vmOffset = 0;
1067 // decrement vmOffset by all segments before TEXT (e.g. PAGEZERO)
1068 for (uint32_t i=0; i < _binaryData->segmentsArrayCount; ++i) {
1069 const binary_format::DiskSegment* seg = &diskSegs[i];
1070 if ( seg->filePageCount != 0 ) {
1071 break;
1072 }
1073 vmOffset -= (uint64_t)seg->vmPageCount * pageSize;
1074 }
1075 // walk each segment and call handler
1076 for (uint32_t i=0; i < _binaryData->segmentsArrayCount; ++i) {
1077 const binary_format::DiskSegment* seg = &diskSegs[i];
1078 uint64_t vmSize = (uint64_t)seg->vmPageCount * pageSize;
1079 uint32_t fileSize = seg->filePageCount * pageSize;
1080 if ( !seg->paddingNotSeg ) {
1081 bool stop = false;
1082 handler(segIndex, ( fileSize == 0) ? 0 : fileOffset, fileSize, vmOffset, vmSize, seg->permissions, stop);
1083 ++segIndex;
1084 if ( stop )
1085 break;
1086 }
1087 vmOffset += vmSize;
1088 fileOffset += fileSize;
1089 }
1090 }
1091
1092 void Image::forEachCacheSegment(void (^handler)(uint32_t segIndex, uint64_t vmOffset, uint64_t vmSize, uint8_t permissions, bool& stop)) const
1093 {
1094 assert(!isDiskImage());
1095 const uint64_t* rawSegs = group().segmentPool(_binaryData->segmentsArrayStartIndex);
1096 const binary_format::DyldCacheSegment* cacheSegs = (binary_format::DyldCacheSegment*)rawSegs;
1097 bool stop = false;
1098 for (uint32_t i=0; i < _binaryData->segmentsArrayCount; ++i) {
1099 uint64_t vmOffset = cacheSegs[i].cacheOffset - cacheSegs[0].cacheOffset;
1100 uint64_t vmSize = cacheSegs[i].size;
1101 uint8_t permissions = cacheSegs[i].permissions;
1102 handler(i, vmOffset, vmSize, permissions, stop);
1103 if ( stop )
1104 break;
1105 }
1106 }
1107
1108 bool Image::segmentHasFixups(uint32_t segIndex) const
1109 {
1110 return (segmentFixups(segIndex) != nullptr);
1111 }
1112
1113 bool Image::containsAddress(const void* addr, const void* imageLoadAddress, uint8_t* permissions) const
1114 {
1115 if ( addr < imageLoadAddress )
1116 return false;
1117
1118 __block bool found = false;
1119 uint64_t offsetInImage = (char*)addr - (char*)imageLoadAddress;
1120 if ( _binaryData->isDiskImage ) {
1121 forEachDiskSegment(^(uint32_t segIterIndex, uint32_t fileOffset, uint32_t fileSize, int64_t vmOffset, uint64_t vmSize, uint8_t segPerms, bool& stop) {
1122 if ( (offsetInImage >= vmOffset) && (offsetInImage < vmOffset+vmSize) ) {
1123 if ( permissions != nullptr )
1124 *permissions = segPerms;
1125 found = true;
1126 stop = true;
1127 }
1128 });
1129 }
1130 else {
1131 forEachCacheSegment(^(uint32_t segIterIndex, uint64_t vmOffset, uint64_t vmSize, uint8_t segPerms, bool& stop) {
1132 if ( (offsetInImage >= vmOffset) && (offsetInImage < vmOffset+vmSize) ) {
1133 if ( permissions != nullptr )
1134 *permissions = segPerms;
1135 found = true;
1136 stop = true;
1137 }
1138 });
1139 }
1140 return found;
1141 }
1142
1143 void Image::forEachInitializer(const void* imageLoadAddress, void (^handler)(const void* initializer)) const
1144 {
1145 const uint32_t initCount = _binaryData->initOffsetsArrayCount;
1146 const uint32_t startIndex = _binaryData->initOffsetsArrayStartIndex;
1147 const uint32_t* initOffsets = group().initializerOffsetsPool();
1148 assert(startIndex + initCount <= group().initializerOffsetsCount());
1149 for (uint32_t i=0; i < initCount; ++i) {
1150 uint32_t anOffset = initOffsets[startIndex+i];
1151 const void* func = (char*)imageLoadAddress + anOffset;
1152 handler(func);
1153 }
1154 }
1155
1156 void Image::forEachInitBefore(void (^handler)(binary_format::ImageRef imageToInit)) const
1157 {
1158 const uint32_t initCount = _binaryData->initBeforeArrayCount;
1159 const uint32_t startIndex = _binaryData->initBeforeArrayStartIndex;
1160 const uint32_t endIndex = group().intializerListPoolCount();
1161 const binary_format::ImageRef* initRefs = group().intializerListPool();
1162 assert(startIndex + initCount <= endIndex);
1163 for (uint32_t i=0; i < initCount; ++i) {
1164 binary_format::ImageRef ref = initRefs[startIndex+i];
1165 handler(ref);
1166 }
1167 }
1168
1169 void Image::forEachDOF(const void* imageLoadAddress, void (^handler)(const void* section)) const
1170 {
1171 const uint32_t dofCount = _binaryData->dofOffsetsArrayCount;
1172 const uint32_t startIndex = _binaryData->dofOffsetsArrayStartIndex;
1173 const uint32_t* dofOffsets = group().dofOffsetsPool();
1174 assert(startIndex + dofCount <= group().dofOffsetsCount());
1175 for (uint32_t i=0; i < dofCount; ++i) {
1176 uint32_t anOffset = dofOffsets[startIndex+i];
1177 const void* section = (char*)imageLoadAddress + anOffset;
1178 handler(section);
1179 }
1180 }
1181
1182 Image Image::resolveImageRef(const ImageGroupList& groups, binary_format::ImageRef ref, bool applyOverrides)
1183 {
1184 // first look if ref image is overridden in closure
1185 __block binary_format::ImageRef targetRef = ref;
1186 if ( applyOverrides ) {
1187 binary_format::ImageRef refToMatch = ref;
1188 refToMatch.clearKind();
1189 for (int i=0; i < groups.count(); ++i) {
1190 ImageGroup aGroup(groups[i]);
1191 if ( aGroup.groupNum() >= 2 ) {
1192 aGroup.forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef, binary_format::ImageRef overrideDylibRef, bool &stop) {
1193 if ( refToMatch == standardDylibRef ) {
1194 targetRef = overrideDylibRef;
1195 stop = true;
1196 }
1197 });
1198 }
1199 }
1200 }
1201 // create Image object from targetRef
1202 for (int i=0; i < groups.count(); ++i) {
1203 ImageGroup aGroup(groups[i]);
1204 if ( aGroup.groupNum() == targetRef.groupNum() ) {
1205 return aGroup.image(targetRef.indexInGroup());
1206 }
1207 }
1208 //assert(0 && "invalid ImageRef");
1209 return Image(nullptr);
1210 }
1211
1212 void Image::forEachInitBefore(const ImageGroupList& groups, void (^handler)(Image imageToInit)) const
1213 {
1214 forEachInitBefore(^(binary_format::ImageRef ref) {
1215 handler(resolveImageRef(groups, ref));
1216 });
1217 }
1218
1219 bool Image::validateUsingModTimeAndInode() const
1220 {
1221 return !group().binaryData()->imageFileInfoIsCdHash;
1222 }
1223
1224 bool Image::validateUsingCdHash() const
1225 {
1226 // don't have cdHash info if union has modtime info in it
1227 if ( !group().binaryData()->imageFileInfoIsCdHash )
1228 return false;
1229
1230 // don't have codesign blob in dyld cache
1231 if ( !_binaryData->isDiskImage )
1232 return false;
1233
1234 // return true if image is code signed and cdHash16 is non-zero
1235 const binary_format::DiskImage* diskImage = asDiskImage();
1236 if ( diskImage->codeSignFileOffset == 0 )
1237 return false;
1238
1239 uint8_t zeros[16];
1240 bzero(zeros, 16);
1241 return (memcmp(cdHash16(), zeros, 16) != 0);
1242 }
1243
1244 const uint8_t* Image::cdHash16() const
1245 {
1246 return _binaryData->fileInfo.cdHash16.bytes;
1247 }
1248
1249 uint64_t Image::fileModTime() const
1250 {
1251 return _binaryData->fileInfo.statInfo.mtime;
1252 }
1253
1254 uint64_t Image::fileINode() const
1255 {
1256 return _binaryData->fileInfo.statInfo.inode;
1257 }
1258
1259
1260 bool Image::isDiskImage() const
1261 {
1262 return _binaryData->isDiskImage;
1263 }
1264
1265 const binary_format::DiskImage* Image::asDiskImage() const
1266 {
1267 assert(_binaryData->isDiskImage);
1268 return (binary_format::DiskImage*)_binaryData;
1269 }
1270
1271 const binary_format::CachedImage* Image::asCachedImage() const
1272 {
1273 assert(!_binaryData->isDiskImage);
1274 return (binary_format::CachedImage*)_binaryData;
1275 }
1276
1277 uint32_t Image::pageSize() const
1278 {
1279 return (_binaryData->has16KBpages ? 0x4000 : 0x1000);
1280 }
1281
1282 uint32_t Image::cacheOffset() const
1283 {
1284 assert(!_binaryData->isDiskImage);
1285 const uint64_t* rawSegs = group().segmentPool(_binaryData->segmentsArrayStartIndex);
1286 const binary_format::DyldCacheSegment* cacheSegs = (binary_format::DyldCacheSegment*)rawSegs;
1287 return cacheSegs[0].cacheOffset;
1288 }
1289
1290 uint32_t Image::patchStartIndex() const
1291 {
1292 return asCachedImage()->patchStartIndex;
1293 }
1294
1295 uint32_t Image::patchCount() const
1296 {
1297 return asCachedImage()->patchCount;
1298 }
1299
1300 uint64_t Image::sliceOffsetInFile() const
1301 {
1302 return asDiskImage()->sliceOffsetIn4K * 4096;
1303 }
1304
1305 bool Image::hasCodeSignature(uint32_t& fileOffset, uint32_t& size) const
1306 {
1307 const binary_format::DiskImage* diskImage = asDiskImage();
1308 if ( diskImage->codeSignFileOffset != 0 ) {
1309 fileOffset = diskImage->codeSignFileOffset;
1310 size = diskImage->codeSignFileSize;
1311 return true;
1312 }
1313 return false;
1314 }
1315
1316 bool Image::isFairPlayEncrypted(uint32_t& textOffset, uint32_t& size) const
1317 {
1318 const binary_format::DiskImage* diskImage = asDiskImage();
1319 if ( diskImage->fairPlayTextPageCount != 0 ) {
1320 textOffset = diskImage->fairPlayTextStartPage * pageSize();
1321 size = diskImage->fairPlayTextPageCount * pageSize();
1322 return true;
1323 }
1324 return false;
1325 }
1326
1327 uint64_t Image::vmSizeToMap() const
1328 {
1329 return asDiskImage()->totalVmPages * pageSize();
1330 }
1331
1332 void Image::forEachFixup(const uint8_t* pageFixups, const void* segContent, uint32_t& offset, uint32_t& ordinal,
1333 void (^handler)(uint32_t pageOffset, FixupKind kind, uint32_t ordinal, bool& stop))
1334 {
1335 bool stop = false;
1336 for (const uint8_t* p = pageFixups; (*p != 0) && !stop;) {
1337 binary_format::FixUpOpcode fullOp = (binary_format::FixUpOpcode)(*p);
1338 binary_format::FixUpOpcode majorOp = (binary_format::FixUpOpcode)(*p & 0xF0);
1339 uint8_t low4 = (*p & 0x0F);
1340 switch ( majorOp ) {
1341 case binary_format::FixUpOpcode::done:
1342 return;
1343 case binary_format::FixUpOpcode::rebase32: // apply
1344 switch ( fullOp ) {
1345 case binary_format::FixUpOpcode::bind64:
1346 handler(offset, FixupKind::bind64, ordinal, stop);
1347 offset += 8;
1348 ++p;
1349 break;
1350 case binary_format::FixUpOpcode::bind32:
1351 handler(offset, FixupKind::bind32, ordinal, stop);
1352 offset += 4;
1353 ++p;
1354 break;
1355 case binary_format::FixUpOpcode::rebase64:
1356 handler(offset, FixupKind::rebase64, 0, stop);
1357 offset += 8;
1358 ++p;
1359 break;
1360 case binary_format::FixUpOpcode::rebase32:
1361 handler(offset, FixupKind::rebase32, 0, stop);
1362 offset += 4;
1363 ++p;
1364 break;
1365 case binary_format::FixUpOpcode::rebaseText32:
1366 handler(offset, FixupKind::rebaseText32, 0, stop);
1367 offset += 4;
1368 ++p;
1369 break;
1370 case binary_format::FixUpOpcode::bindText32:
1371 handler(offset, FixupKind::bindText32, ordinal, stop);
1372 offset += 4;
1373 ++p;
1374 break;
1375 case binary_format::FixUpOpcode::bindTextRel32:
1376 handler(offset, FixupKind::bindTextRel32, ordinal, stop);
1377 offset += 4;
1378 ++p;
1379 break;
1380 case binary_format::FixUpOpcode::bindImportJmp32:
1381 handler(offset, FixupKind::bindImportJmp32, ordinal, stop);
1382 offset += 5;
1383 ++p;
1384 break;
1385 //case binary_format::FixUpOpcode::fixupChain64:
1386 // assert(0 && "rebase/bind chain support not implemented yet");
1387 // break;
1388 default:
1389 assert(0 && "bad opcode");
1390 break;
1391 }
1392 break;
1393 case binary_format::FixUpOpcode::incPageOffset:
1394 if ( low4 == 0 ) {
1395 ++p;
1396 offset += read_uleb128(p, p+8)*4;
1397 }
1398 else {
1399 offset += (low4*4);
1400 ++p;
1401 }
1402 break;
1403 case binary_format::FixUpOpcode::setPageOffset:
1404 if ( low4 == 0 ) {
1405 ++p;
1406 offset = (uint32_t)read_uleb128(p, p+8);
1407 }
1408 else {
1409 offset = low4;
1410 ++p;
1411 }
1412 break;
1413 case binary_format::FixUpOpcode::incOrdinal:
1414 if ( low4 == 0 ) {
1415 ++p;
1416 ordinal += read_uleb128(p, p+8);
1417 }
1418 else {
1419 ordinal += low4;
1420 ++p;
1421 }
1422 break;
1423 case binary_format::FixUpOpcode::setOrdinal:
1424 if ( low4 == 0 ) {
1425 ++p;
1426 ordinal = (uint32_t)read_uleb128(p, p+8);
1427 }
1428 else {
1429 ordinal = low4;
1430 ++p;
1431 }
1432 break;
1433 case binary_format::FixUpOpcode::repeat: {
1434 ++p;
1435 uint32_t count = (uint32_t)read_uleb128(p, p+8);
1436 uint8_t pattern[32];
1437 for (int j=0; j < low4; ++j) {
1438 pattern[j] = *p++;
1439 }
1440 pattern[low4] = (uint8_t)binary_format::FixUpOpcode::done;
1441 for (int j=0; j < count; ++j) {
1442 forEachFixup(&pattern[0], segContent, offset, ordinal, handler);
1443 if ( stop )
1444 break;
1445 }
1446 }
1447 break;
1448 default:
1449 assert(0 && "bad opcode");
1450 break;
1451 }
1452 }
1453 }
1454
1455 const binary_format::SegmentFixupsByPage* Image::segmentFixups(uint32_t segIndex) const
1456 {
1457 const binary_format::DiskImage* diskImage = asDiskImage();
1458 //const BinaryImageGroupData* g = group().binaryData();
1459 uint32_t segCountWithFixups = diskImage->fixupsPoolSegCount;
1460 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d), group=%p, segCountWithFixup=%d\n", _binaryData, segIndex, g, segCountWithFixups);
1461 const binary_format::AllFixupsBySegment* allFixups = group().fixUps(diskImage->fixupsPoolOffset);
1462 for (uint32_t i=0; i < segCountWithFixups; ++i) {
1463 if ( allFixups[i].segIndex == segIndex ) {
1464 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) allFixups=%p, allFixups[%d].segIndex=%d, allFixups[%d].offset=%d\n", _binaryData, segIndex, allFixups, i, allFixups[i].segIndex, i, allFixups[i].offset);
1465 return (binary_format::SegmentFixupsByPage*)((char*)allFixups + allFixups[i].offset);
1466 }
1467 }
1468 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) => nullptr\n", _binaryData, segIndex);
1469 return nullptr;
1470 }
1471
1472 void Image::forEachFixup(uint32_t segIndex, MemoryRange segContent, void (^handler)(uint64_t segOffset, FixupKind, TargetSymbolValue, bool& stop)) const
1473 {
1474 const binary_format::SegmentFixupsByPage* segFixups = segmentFixups(segIndex);
1475 if ( segFixups == nullptr )
1476 return;
1477
1478 assert(segFixups->pageCount*segFixups->pageSize <= segContent.size);
1479
1480 const uint32_t ordinalsIndexInGroupPool = asDiskImage()->targetsArrayStartIndex;
1481 const uint32_t maxOrdinal = asDiskImage()->targetsArrayCount;
1482 const TargetSymbolValue* groupArray = group().targetValuesArray();
1483 assert(ordinalsIndexInGroupPool < group().targetValuesCount());
1484 const TargetSymbolValue* targetOrdinalArray = &groupArray[ordinalsIndexInGroupPool];
1485
1486 for (uint32_t pageIndex=0; pageIndex < segFixups->pageCount; ++pageIndex) {
1487 const uint8_t* opcodes = (uint8_t*)(segFixups) + segFixups->pageInfoOffsets[pageIndex];
1488 uint64_t pageStartOffet = pageIndex * segFixups->pageSize;
1489 uint32_t curOffset = 0;
1490 uint32_t curOrdinal = 0;
1491 forEachFixup(opcodes, segContent.address, curOffset, curOrdinal, ^(uint32_t pageOffset, FixupKind kind, uint32_t targetOrdinal, bool& stop) {
1492 assert(targetOrdinal < maxOrdinal);
1493 handler(pageStartOffet + pageOffset, kind, targetOrdinalArray[targetOrdinal], stop);
1494 });
1495 }
1496 }
1497
1498
1499 } // namespace launch_cache
1500 } // namespace dyld3
1501
1502
1503