2 * Copyright (c) 2017 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
27 #include <uuid/uuid.h>
31 #include "LaunchCacheFormat.h"
32 #include "LaunchCache.h"
33 #include "MachOParser.h"
34 #include "DyldCacheParser.h"
37 extern void log(const char* format
, ...) __attribute__((format(printf
, 1, 2)));
41 namespace launch_cache
{
43 static uintptr_t read_uleb128(const uint8_t*& p
, const uint8_t* end
)
49 assert("malformed uleb128");
52 uint64_t slice
= *p
& 0x7f;
55 assert("uleb128 too big for uint64");
59 result
|= (slice
<< bit
);
62 } while (*p
++ & 0x80);
63 return (uintptr_t)result
;
67 bool MemoryRange::contains(const MemoryRange
& other
) const
69 if ( this->address
> other
.address
)
71 const uint8_t* thisEnd
= (uint8_t*)address
+ size
;
72 const uint8_t* otherEnd
= (uint8_t*)other
.address
+ other
.size
;
73 return (thisEnd
>= otherEnd
);
76 bool MemoryRange::intersects(const MemoryRange
& other
) const
78 const uint8_t* thisEnd
= (uint8_t*)address
+ size
;
79 const uint8_t* otherEnd
= (uint8_t*)other
.address
+ other
.size
;
80 if ( otherEnd
< this->address
)
82 return ( other
.address
< thisEnd
);
86 //////////////////////////// SlowLoadSet ////////////////////////////////////////
88 bool SlowLoadSet::contains(const BinaryImageData
* image
)
90 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
97 bool SlowLoadSet::add(const BinaryImageData
* image
)
99 if ( _current
< _end
) {
106 void SlowLoadSet::forEach(void (^handler
)(const BinaryImageData
*))
108 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
113 void SlowLoadSet::forEach(void (^handler
)(const BinaryImageData
*, bool& stop
))
116 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
124 long SlowLoadSet::count() const
126 return (_current
- _start
);
130 //////////////////////////// TargetSymbolValue ////////////////////////////////////////
135 uintptr_t TargetSymbolValue::resolveTarget(Diagnostics
& diag
, const ImageGroup
& inGroup
, LoadedImages
& images
) const
137 // this block is only used if findExportedSymbol() needs to trace re-exported dylibs to find a symbol
138 MachOParser::DependentFinder reExportFollower
= ^(uint32_t depIndex
, const char* depLoadPath
, void* extra
, const mach_header
** foundMH
, void** foundExtra
) {
140 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
141 Image
anImage(binImage
);
142 if ( strcmp(depLoadPath
, anImage
.path()) == 0 ) {
147 return (*foundMH
!= nullptr);
151 switch ( _data
.sharedCache
.kind
) {
153 case TargetSymbolValue::kindSharedCache
:
154 assert(_data
.sharedCache
.offsetIntoCache
!= 0);
155 return (uintptr_t)(images
.dyldCacheLoadAddressForImage() + _data
.sharedCache
.offsetIntoCache
);
157 case TargetSymbolValue::kindAbsolute
:
158 offset
= (uintptr_t)_data
.absolute
.value
;
159 // sign extend 42 bit value
160 if ( offset
& 0x2000000000000000ULL
)
161 offset
|= 0xC000000000000000ULL
;
164 case TargetSymbolValue::kindGroup
: {
165 uint32_t groupNum
= _data
.group
.isIndirectGroup
? inGroup
.indirectGroupNum(_data
.group
.groupNum
) : _data
.group
.groupNum
;
166 uintptr_t targetImageLoadAddress
= (uintptr_t)(images
.loadAddressFromGroupAndIndex(groupNum
, _data
.group
.indexInGroup
));
167 if ( targetImageLoadAddress
== 0 )
168 diag
.error("image for groupNum=%d, indexInGroup=%d not found", groupNum
, _data
.group
.indexInGroup
);
169 offset
= (uintptr_t)_data
.group
.offsetInImage
;
170 // sign extend 42 bit offset
171 if ( offset
& 0x0000020000000000ULL
)
172 offset
|= 0xFFFFFC0000000000ULL
;
173 return targetImageLoadAddress
+ offset
;
176 case TargetSymbolValue::kindDynamicGroup
: {
177 const char* imagePath
= inGroup
.stringFromPool(_data
.dynamicGroup
.imagePathOffset
);
178 const char* symbolName
= inGroup
.stringFromPool(_data
.dynamicGroup
.symbolNameOffset
);
179 __block
uintptr_t result
= 0;
180 __block
bool found
= false;
181 if ( strcmp(imagePath
, "@flat") == 0 ) {
182 // search all images in load order
183 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
184 Diagnostics findSymbolDiag
;
185 dyld3::MachOParser
parser(mh
);
186 dyld3::MachOParser::FoundSymbol foundInfo
;
187 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, ^(uint32_t, const char* depLoadPath
, void*, const mach_header
** foundMH
, void**) {
188 // <rdar://problem/31921090> need to follow re-exported symbols to support libc renamed and reexported symbols
190 images
.forEachImage(^(uint32_t innerIndex
, const BinaryImageData
* innerBinImage
, const mach_header
* innerMH
, bool& innerStop
) {
191 Image
innerImage(innerBinImage
);
192 if ( strcmp(depLoadPath
, innerImage
.path()) == 0 ) {
197 return (*foundMH
!= nullptr);
199 switch (foundInfo
.kind
) {
200 case MachOParser::FoundSymbol::Kind::headerOffset
:
201 case MachOParser::FoundSymbol::Kind::resolverOffset
:
202 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
204 case MachOParser::FoundSymbol::Kind::absolute
:
205 result
= (uintptr_t)foundInfo
.value
;
208 images
.setAsNeverUnload(idx
);
213 // <rdar://problem/31944092> bind unfound flat symbols to NULL to support lazy binding semantics
219 else if ( strcmp(imagePath
, "@main") == 0 ) {
220 // search only main executable
221 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
222 if ( mh
->filetype
== MH_EXECUTE
) {
223 Diagnostics findSymbolDiag
;
224 dyld3::MachOParser
parser(mh
);
225 dyld3::MachOParser::FoundSymbol foundInfo
;
226 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, nullptr) ) {
227 switch (foundInfo
.kind
) {
228 case MachOParser::FoundSymbol::Kind::headerOffset
:
229 case MachOParser::FoundSymbol::Kind::resolverOffset
:
230 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
232 case MachOParser::FoundSymbol::Kind::absolute
:
233 result
= (uintptr_t)foundInfo
.value
;
242 else if ( strcmp(imagePath
, "@weak_def") == 0 ) {
243 // search images with weak definitions in load order
244 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
245 Image
anImage(binImage
);
246 if ( anImage
.hasWeakDefs() ) {
247 Diagnostics findSymbolDiag
;
248 dyld3::MachOParser
parser(mh
);
249 dyld3::MachOParser::FoundSymbol foundInfo
;
250 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, nullptr) ) {
251 switch (foundInfo
.kind
) {
252 case MachOParser::FoundSymbol::Kind::headerOffset
:
253 case MachOParser::FoundSymbol::Kind::resolverOffset
:
254 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
256 case MachOParser::FoundSymbol::Kind::absolute
:
257 result
= (uintptr_t)foundInfo
.value
;
261 images
.setAsNeverUnload(idx
);
268 // search only image the matches supplied path
269 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
270 Image
anImage(binImage
);
271 if ( strcmp(anImage
.path(), imagePath
) == 0 ) {
272 Diagnostics findSymbolDiag
;
273 dyld3::MachOParser
parser(mh
);
274 dyld3::MachOParser::FoundSymbol foundInfo
;
275 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, reExportFollower
) ) {
276 switch (foundInfo
.kind
) {
277 case MachOParser::FoundSymbol::Kind::headerOffset
:
278 case MachOParser::FoundSymbol::Kind::resolverOffset
:
279 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
281 case MachOParser::FoundSymbol::Kind::absolute
:
282 result
= (uintptr_t)foundInfo
.value
;
293 if ( _data
.dynamicGroup
.weakImport
)
295 diag
.error("dynamic symbol '%s' not found for %s", symbolName
, imagePath
);
299 assert(0 && "resolveTarget() not reachable");
304 TargetSymbolValue::TargetSymbolValue()
309 TargetSymbolValue
TargetSymbolValue::makeInvalid()
311 return TargetSymbolValue();
314 TargetSymbolValue
TargetSymbolValue::makeSharedCacheOffset(uint32_t offset
)
317 t
._data
.sharedCache
.kind
= kindSharedCache
;
318 t
._data
.sharedCache
.offsetIntoCache
= offset
;
322 TargetSymbolValue
TargetSymbolValue::makeAbsolute(uint64_t value
)
325 t
._data
.absolute
.kind
= kindAbsolute
;
326 t
._data
.absolute
.value
= value
;
330 TargetSymbolValue
TargetSymbolValue::makeGroupValue(uint32_t groupIndex
, uint32_t imageIndexInGroup
, uint64_t offsetInImage
, bool isIndirectGroupNum
)
332 assert(groupIndex
!= 0 || isIndirectGroupNum
);
333 assert(groupIndex
< 128);
334 assert(imageIndexInGroup
< 4096);
336 t
._data
.group
.kind
= kindGroup
;
337 t
._data
.group
.isIndirectGroup
= isIndirectGroupNum
;
338 t
._data
.group
.groupNum
= groupIndex
;
339 t
._data
.group
.indexInGroup
= imageIndexInGroup
;
340 t
._data
.group
.offsetInImage
= offsetInImage
;
344 TargetSymbolValue
TargetSymbolValue::makeDynamicGroupValue(uint32_t imagePathPoolOffset
, uint32_t imageSymbolPoolOffset
, bool weakImport
)
347 t
._data
.dynamicGroup
.kind
= kindDynamicGroup
;
348 t
._data
.dynamicGroup
.weakImport
= weakImport
;
349 t
._data
.dynamicGroup
.imagePathOffset
= imagePathPoolOffset
;
350 t
._data
.dynamicGroup
.symbolNameOffset
= imageSymbolPoolOffset
;
354 bool TargetSymbolValue::isSharedCacheTarget(uint64_t& offsetInCache
) const
356 if ( _data
.sharedCache
.kind
!= kindSharedCache
)
358 offsetInCache
= _data
.sharedCache
.offsetIntoCache
;
362 bool TargetSymbolValue::isGroupImageTarget(uint32_t& groupNum
, uint32_t& indexInGroup
, uint64_t& offsetInImage
) const
364 if ( _data
.sharedCache
.kind
!= kindGroup
)
366 // This is only used for interposing, so refuse to allow indirect for group 2
367 assert(!_data
.group
.isIndirectGroup
);
368 groupNum
= _data
.group
.groupNum
;
369 indexInGroup
= _data
.group
.indexInGroup
;
370 offsetInImage
= _data
.group
.offsetInImage
;
374 bool TargetSymbolValue::isInvalid() const
376 return (_data
.raw
== 0);
379 static std::string
hex8(uint64_t value
) {
381 sprintf(buff
, "0x%08llX", value
);
385 static std::string
decimal(uint64_t value
) {
387 sprintf(buff
, "%llu", value
);
391 std::string
TargetSymbolValue::asString(ImageGroup group
) const
394 switch ( _data
.sharedCache
.kind
) {
395 case kindSharedCache
:
396 if ( _data
.sharedCache
.offsetIntoCache
== 0 )
397 return "{invalid target}";
399 return "{cache+" + hex8(_data
.sharedCache
.offsetIntoCache
) + "}";
401 offset
= (uintptr_t)_data
.absolute
.value
;
402 // sign extend 42 bit value
403 if ( offset
& 0x2000000000000000ULL
)
404 offset
|= 0xC000000000000000ULL
;
405 return "{absolute:" + hex8(offset
) + "}";
407 offset
= _data
.group
.offsetInImage
;
408 // sign extend 42 bit offset
409 if ( offset
& 0x0000020000000000ULL
)
410 offset
|= 0xFFFFFC0000000000ULL
;
411 if ( _data
.group
.groupNum
== 1 )
412 return "{otherDylib[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
413 if ( _data
.group
.groupNum
== 2 )
414 return "{closure[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
416 uint32_t groupNum
= _data
.group
.isIndirectGroup
? group
.indirectGroupNum(_data
.group
.groupNum
) : _data
.group
.groupNum
;
417 return "{dlopen-group-" + decimal(groupNum
-2) + "[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
419 case kindDynamicGroup
:
420 return "{dynamic image='" + std::string(group
.stringFromPool(_data
.dynamicGroup
.imagePathOffset
))
421 + "' symbol='" + std::string(group
.stringFromPool(_data
.dynamicGroup
.symbolNameOffset
)) + "'}";
423 assert(0 && "unreachable");
429 //////////////////////////// ImageRef ////////////////////////////////////////
431 binary_format::ImageRef
binary_format::ImageRef::weakImportMissing()
433 ImageRef
missing(0xFFFFFFFF);
439 //////////////////////////// Closure ////////////////////////////////////////
441 Closure::Closure(const binary_format::Closure
* closure
)
442 : _binaryData(closure
)
444 assert(closure
->magic
== binary_format::Closure::magicV1
);
447 size_t Closure::size() const
449 return _binaryData
->stringPoolOffset
+ _binaryData
->stringPoolSize
;
452 const ImageGroup
Closure::group() const
454 return ImageGroup(&_binaryData
->group
);
457 void Closure::forEachEnvVar(void (^handler
)(const char* keyEqualValue
, bool& stop
)) const
459 const uint32_t* envVarStringOffsets
= (uint32_t*)((uint8_t*)_binaryData
+ _binaryData
->dyldEnvVarsOffset
);
460 const char* stringPool
= (char*)_binaryData
+ _binaryData
->stringPoolOffset
;
462 for (uint32_t i
=0; i
< _binaryData
->dyldEnvVarsCount
; ++i
) {
463 handler(&stringPool
[envVarStringOffsets
[i
]], stop
);
469 void Closure::forEachMustBeMissingFile(void (^handler
)(const char* path
, bool& stop
)) const
471 const uint16_t* offsets
= (uint16_t*)((uint8_t*)_binaryData
+ _binaryData
->missingFileComponentsOffset
);
474 const char* stringPool
= (char*)_binaryData
+ _binaryData
->stringPoolOffset
;
479 while ( *offsets
!= 0 ) {
480 const char* component
= &stringPool
[*offsets
++];
481 strlcat(path
, "/", PATH_MAX
);
482 strlcat(path
, component
, PATH_MAX
);
485 ++offsets
; // move to next path
486 if ( *offsets
== 0 ) // if no next path, then end of list of strings
491 const uuid_t
* Closure::dyldCacheUUID() const
493 return &(_binaryData
->dyldCacheUUID
);
497 const uint8_t* Closure::cdHash() const
499 return _binaryData
->mainExecutableCdHash
;
503 uint32_t Closure::initialImageCount() const
505 return _binaryData
->initialImageCount
;
509 uint32_t Closure::mainExecutableImageIndex() const
511 return _binaryData
->mainExecutableIndexInGroup
;
515 uint32_t Closure::mainExecutableEntryOffset() const
517 return _binaryData
->mainExecutableEntryOffset
;
520 bool Closure::mainExecutableUsesCRT() const
522 return _binaryData
->usesCRT
;
525 bool Closure::isRestricted() const
527 return _binaryData
->isRestricted
;
530 bool Closure::usesLibraryValidation() const
532 return _binaryData
->usesLibraryValidation
;
535 uint32_t Closure::libdyldVectorOffset() const
537 return _binaryData
->libdyldVectorOffset
;
540 const BinaryImageData
* Closure::libSystem(const ImageGroupList
& groups
)
542 return Image::resolveImageRef(groups
, _binaryData
->libSystemRef
).binaryData();
545 const BinaryImageData
* Closure::libDyld(const ImageGroupList
& groups
)
547 return Image::resolveImageRef(groups
, _binaryData
->libDyldRef
).binaryData();
551 //////////////////////////// ImageGroup ////////////////////////////////////////
553 size_t ImageGroup::size() const
555 return (_binaryData
->stringsPoolOffset
+ _binaryData
->stringsPoolSize
+ 3) & (-4);
558 uint32_t ImageGroup::groupNum() const
560 return _binaryData
->groupNum
;
563 bool ImageGroup::dylibsExpectedOnDisk() const
565 return _binaryData
->dylibsExpectedOnDisk
;
568 uint32_t ImageGroup::imageCount() const
570 return _binaryData
->imagesPoolCount
;
573 const binary_format::Image
* ImageGroup::imageBinary(uint32_t index
) const
575 assert(index
<_binaryData
->imagesPoolCount
);
576 return (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
+ (index
* _binaryData
->imagesEntrySize
));
580 const Image
ImageGroup::image(uint32_t index
) const
582 return Image(imageBinary(index
));
585 uint32_t ImageGroup::indexInGroup(const binary_format::Image
* img
) const
587 long delta
= (char*)img
- ((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
588 uint32_t index
= (uint32_t)(delta
/_binaryData
->imagesEntrySize
);
589 assert(image(index
)._binaryData
== img
);
593 const binary_format::Image
* ImageGroup::findImageByPath(const char* path
, uint32_t& foundIndex
) const
595 // check path of each image in group
596 uint32_t targetHash
= hashFunction(path
);
597 const uint8_t* p
= (uint8_t*)_binaryData
+ _binaryData
->imagesPoolOffset
;
598 for (uint32_t i
=0; i
< _binaryData
->imagesPoolCount
; ++i
) {
599 const binary_format::Image
* binImage
= (binary_format::Image
*)p
;
600 if ( binImage
->pathHash
== targetHash
) {
602 if ( !img
.isInvalid() && (strcmp(img
.path(), path
) == 0) ) {
607 p
+= _binaryData
->imagesEntrySize
;
610 const binary_format::AliasEntry
* aliasEntries
= (binary_format::AliasEntry
*)((uint8_t*)_binaryData
+ _binaryData
->imageAliasOffset
);
611 for (uint32_t i
=0; i
< _binaryData
->imageAliasCount
; ++i
) {
612 const char* aliasPath
= stringFromPool(aliasEntries
[i
].aliasOffsetInStringPool
);
613 if ( aliasEntries
[i
].aliasHash
== targetHash
) {
614 if ( strcmp(aliasPath
, path
) == 0 ) {
615 Image img
= image(aliasEntries
[i
].imageIndexInGroup
);
616 if ( !img
.isInvalid() ) {
617 foundIndex
= aliasEntries
[i
].imageIndexInGroup
;
618 return img
.binaryData();
626 const binary_format::Image
* ImageGroup::findImageByCacheOffset(size_t cacheVmOffset
, uint32_t& mhCacheOffset
, uint8_t& foundPermissions
) const
628 assert(groupNum() == 0);
630 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)segmentPool(0);
631 const binary_format::Image
* image
= (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
632 // most address lookups are in TEXT, so just search first segment in first pass
633 for (uint32_t imageIndex
=0; imageIndex
< _binaryData
->imagesPoolCount
; ++imageIndex
) {
634 const binary_format::DyldCacheSegment
* segInfo
= &cacheSegs
[image
->segmentsArrayStartIndex
];
635 if ( (cacheVmOffset
>= segInfo
->cacheOffset
) && (cacheVmOffset
< (segInfo
->cacheOffset
+ segInfo
->size
)) ) {
636 mhCacheOffset
= segInfo
->cacheOffset
;
637 foundPermissions
= segInfo
->permissions
;
640 image
= (binary_format::Image
*)((char*)image
+ _binaryData
->imagesEntrySize
);
642 // second pass, skip TEXT segment
643 image
= (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
644 for (uint32_t imageIndex
=0; imageIndex
< _binaryData
->imagesPoolCount
; ++imageIndex
) {
645 for (uint32_t segIndex
=1; segIndex
< image
->segmentsArrayCount
; ++segIndex
) {
646 const binary_format::DyldCacheSegment
* segInfo
= &cacheSegs
[image
->segmentsArrayStartIndex
+segIndex
];
647 if ( (cacheVmOffset
>= segInfo
->cacheOffset
) && (cacheVmOffset
< (segInfo
->cacheOffset
+ segInfo
->size
)) ) {
648 mhCacheOffset
= cacheSegs
[image
->segmentsArrayStartIndex
].cacheOffset
;
649 foundPermissions
= segInfo
->permissions
;
653 image
= (binary_format::Image
*)((char*)image
+ _binaryData
->imagesEntrySize
);
658 void ImageGroup::forEachAliasOf(uint32_t imageIndex
, void (^handler
)(const char* aliasPath
, uint32_t aliasPathHash
, bool& stop
)) const
661 const binary_format::AliasEntry
* aliasEntries
= (binary_format::AliasEntry
*)((uint8_t*)_binaryData
+ _binaryData
->imageAliasOffset
);
662 for (uint32_t i
=0; i
< _binaryData
->imageAliasCount
; ++i
) {
663 if ( aliasEntries
[i
].imageIndexInGroup
== imageIndex
) {
664 const char* aliasPath
= stringFromPool(aliasEntries
[i
].aliasOffsetInStringPool
);
665 handler(aliasPath
, aliasEntries
[i
].aliasHash
, stop
);
672 const char* ImageGroup::stringPool() const
674 return (char*)_binaryData
+ _binaryData
->stringsPoolOffset
;
677 const char* ImageGroup::stringFromPool(uint32_t offset
) const
679 assert(offset
< _binaryData
->stringsPoolSize
);
680 return (char*)_binaryData
+ _binaryData
->stringsPoolOffset
+ offset
;
683 uint32_t ImageGroup::stringPoolSize() const
685 return _binaryData
->stringsPoolSize
;;
688 binary_format::ImageRef
ImageGroup::dependentPool(uint32_t index
) const
690 assert(index
< _binaryData
->dependentsPoolCount
);
691 const binary_format::ImageRef
* depArray
= (binary_format::ImageRef
*)((char*)_binaryData
+ _binaryData
->dependentsPoolOffset
);
692 return depArray
[index
];
695 const uint64_t* ImageGroup::segmentPool(uint32_t index
) const
697 assert(index
< _binaryData
->segmentsPoolCount
);
698 const uint64_t* segArray
= (uint64_t*)((char*)_binaryData
+ _binaryData
->segmentsPoolOffset
);
699 return &segArray
[index
];
703 const uint32_t* ImageGroup::initializerOffsetsPool() const
705 return (uint32_t*)((char*)_binaryData
+ _binaryData
->intializerOffsetPoolOffset
);
708 const uint32_t ImageGroup::initializerOffsetsCount() const
710 return _binaryData
->intializerOffsetPoolCount
;
713 const binary_format::ImageRef
* ImageGroup::intializerListPool() const
715 return (binary_format::ImageRef
*)((char*)_binaryData
+ _binaryData
->intializerListPoolOffset
);
718 const uint32_t ImageGroup::intializerListPoolCount() const
720 return _binaryData
->intializerListPoolCount
;
723 const binary_format::AllFixupsBySegment
* ImageGroup::fixUps(uint32_t offset
) const
725 return (binary_format::AllFixupsBySegment
*)((char*)_binaryData
+ _binaryData
->fixupsOffset
+ offset
);
728 const TargetSymbolValue
* ImageGroup::targetValuesArray() const
730 return (TargetSymbolValue
*)((char*)_binaryData
+ _binaryData
->targetsOffset
);
733 uint32_t ImageGroup::targetValuesCount() const
735 return _binaryData
->targetsPoolCount
;
739 const uint32_t* ImageGroup::dofOffsetsPool() const
741 return (uint32_t*)((char*)_binaryData
+ _binaryData
->dofOffsetPoolOffset
);
744 const uint32_t ImageGroup::dofOffsetsCount() const
746 return _binaryData
->dofOffsetPoolCount
;
750 const uint32_t* ImageGroup::indirectGroupNumsPool() const
752 return (uint32_t*)((char*)_binaryData
+ _binaryData
->indirectGroupNumPoolOffset
);
755 const uint32_t ImageGroup::indirectGroupNumsCount() const
757 return _binaryData
->indirectGroupNumPoolCount
;
760 uint32_t ImageGroup::indirectGroupNum(uint32_t offset
) const
762 assert(offset
< _binaryData
->indirectGroupNumPoolCount
);
763 return indirectGroupNumsPool()[offset
];
766 uint32_t ImageGroup::hashFunction(const char* str
)
769 for (const char* s
=str
; *s
!= '\0'; ++s
)
775 void ImageGroup::forEachDyldCachePatch(uint32_t patchTargetIndex
, uint32_t cacheDataVmOffset
, void (^handler
)(uint32_t targetCacheOffset
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool& stop
)) const
777 assert(_binaryData
->imagesEntrySize
== sizeof(binary_format::CachedImage
) && "only callable on group-0 in shared cache");
778 assert(patchTargetIndex
< _binaryData
->cachePatchTableCount
);
779 const binary_format::PatchTable
* patches
= (binary_format::PatchTable
*)((char*)_binaryData
+ _binaryData
->cachePatchTableOffset
);
780 uint32_t offsetsIndex
= patches
[patchTargetIndex
].offsetsStartIndex
;
781 uint32_t targetCacheOffset
= patches
[patchTargetIndex
].targetCacheOffset
;
782 const binary_format::PatchOffset
* patchLocationOffsets
= (binary_format::PatchOffset
*)((char*)_binaryData
+ _binaryData
->cachePatchOffsetsOffset
);
785 assert(offsetsIndex
< _binaryData
->cachePatchOffsetsCount
);
786 binary_format::PatchOffset entry
= patchLocationOffsets
[offsetsIndex
];
788 handler(targetCacheOffset
, cacheDataVmOffset
+entry
.dataRegionOffset
, entry
.hasAddend
, stop
);
794 void ImageGroup::forEachImageRefOverride(void (^handler
)(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool& stop
)) const
797 const binary_format::ImageRefOverride
* entries
= (binary_format::ImageRefOverride
*)((char*)_binaryData
+ _binaryData
->imageOverrideTableOffset
);
798 for (uint32_t i
=0; (i
< _binaryData
->imageOverrideTableCount
) && !stop
; ++i
) {
799 handler(entries
[i
].standardDylib
, entries
[i
].overrideDylib
, stop
);
803 void ImageGroup::forEachImageRefOverride(const ImageGroupList
& groupList
, void (^handler
)(Image standardDylib
, Image overrideDylib
, bool& stop
)) const
805 forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool& stop
) {
806 Image standardDylib
= Image::resolveImageRef(groupList
, standardDylibRef
, false);
807 Image overrideDylib
= Image::resolveImageRef(groupList
, overrideDylibRef
, false);
808 handler(standardDylib
, overrideDylib
, stop
);
815 void ImageGroup::forEachDyldCachePatchLocation(const void* dyldCacheLoadAddress
, uint32_t patchTargetIndex
, void (^handler
)(uintptr_t* locationToPatch
, uintptr_t addend
, bool&)) const
817 DyldCacheParser
cacheParser((DyldSharedCache
*)dyldCacheLoadAddress
, false);
818 uint32_t cacheDataVmOffset
= (uint32_t)cacheParser
.dataRegionRuntimeVmOffset();
819 forEachDyldCachePatch(patchTargetIndex
, cacheDataVmOffset
, ^(uint32_t targetCacheOffset
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool& stop
) {
820 uintptr_t addend
= 0;
821 uintptr_t* fixupLoc
= (uintptr_t*)((char*)dyldCacheLoadAddress
+ usePointersCacheOffset
);
823 uintptr_t currentValue
= *fixupLoc
;
824 uintptr_t expectedValue
= (uintptr_t)dyldCacheLoadAddress
+ targetCacheOffset
;
825 uintptr_t delta
= currentValue
- expectedValue
;
829 handler(fixupLoc
, addend
, stop
);
833 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler
)(uint32_t patchTableIndex
, const BinaryImageData
* image
, uint32_t imageOffset
, bool& stop
)) const
836 const binary_format::DyldCacheOverride
* entries
= (binary_format::DyldCacheOverride
*)((char*)_binaryData
+ _binaryData
->symbolOverrideTableOffset
);
837 for (uint32_t i
=0; (i
< _binaryData
->symbolOverrideTableCount
) && !stop
; ++i
) {
838 handler(entries
[i
].patchTableIndex
, imageBinary(entries
[i
].imageIndex
), entries
[i
].imageOffset
, stop
);
844 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler
)(uint32_t patchTableIndex
, uint32_t imageIndexInClosure
, uint32_t imageOffset
, bool& stop
)) const
847 const binary_format::DyldCacheOverride
* entries
= (binary_format::DyldCacheOverride
*)((char*)_binaryData
+ _binaryData
->symbolOverrideTableOffset
);
848 for (uint32_t i
=0; (i
< _binaryData
->symbolOverrideTableCount
) && !stop
; ++i
) {
849 handler(entries
[i
].patchTableIndex
, entries
[i
].imageIndex
, entries
[i
].imageOffset
, stop
);
853 void ImageGroup::forEachDyldCachePatchLocation(const DyldCacheParser
& cacheParser
, void (^handler
)(uint32_t targetCacheOffset
, const std::vector
<uint32_t>& usesPointersCacheOffsets
, bool& stop
)) const
855 uint32_t cacheDataVmOffset
= (uint32_t)cacheParser
.dataRegionRuntimeVmOffset();
856 __block
std::vector
<uint32_t> pointerCacheOffsets
;
858 for (uint32_t patchIndex
=0; patchIndex
< _binaryData
->cachePatchTableCount
; ++patchIndex
) {
859 pointerCacheOffsets
.clear();
860 __block
uint32_t targetCacheOffset
= 0;
861 forEachDyldCachePatch(patchIndex
, cacheDataVmOffset
, ^(uint32_t targetCacheOff
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool&) {
862 targetCacheOffset
= targetCacheOff
;
863 pointerCacheOffsets
.push_back(usePointersCacheOffset
);
865 std::sort(pointerCacheOffsets
.begin(), pointerCacheOffsets
.end(), [&](uint32_t a
, uint32_t b
) { return a
< b
; });
866 handler(targetCacheOffset
, pointerCacheOffsets
, stop
);
872 bool ImageGroup::hasPatchTableIndex(uint32_t targetCacheOffset
, uint32_t& foundIndex
) const
874 const binary_format::PatchTable
* patches
= (binary_format::PatchTable
*)((char*)_binaryData
+ _binaryData
->cachePatchTableOffset
);
875 for (uint32_t i
=0; i
< _binaryData
->cachePatchTableCount
; ++i
) {
876 if ( patches
[i
].targetCacheOffset
== targetCacheOffset
) {
887 //////////////////////////// Image ////////////////////////////////////////
891 const ImageGroup
Image::group() const
893 return ImageGroup((binary_format::ImageGroup
*)(((char*)_binaryData
) + (_binaryData
->groupOffset
)));
896 uint32_t Image::maxLoadCount() const
898 return _binaryData
->maxLoadCount
;
901 const char* Image::path() const
903 return group().stringFromPool(_binaryData
->pathPoolOffset
);
906 uint32_t Image::pathHash() const
908 return _binaryData
->pathHash
;
911 const char* Image::leafName() const
913 const char* path
= group().stringFromPool(_binaryData
->pathPoolOffset
);
914 const char* lastSlash
= strrchr(path
, '/');
915 if ( lastSlash
!= nullptr )
921 const uuid_t
* Image::uuid() const
923 return &(_binaryData
->uuid
);
926 bool Image::isInvalid() const
928 return (_binaryData
== nullptr) || _binaryData
->isInvalid
;
931 bool Image::hasObjC() const
933 return _binaryData
->hasObjC
;
936 bool Image::isBundle() const
938 return _binaryData
->isBundle
;
941 bool Image::hasWeakDefs() const
943 return _binaryData
->hasWeakDefs
;
946 bool Image::mayHavePlusLoads() const
948 return _binaryData
->mayHavePlusLoads
;
951 bool Image::hasTextRelocs() const
953 return _binaryData
->hasTextRelocs
;
956 bool Image::neverUnload() const
958 return _binaryData
->neverUnload
;
961 bool Image::cwdMustBeThisDir() const
963 return _binaryData
->cwdSameAsThis
;
966 bool Image::isPlatformBinary() const
968 return _binaryData
->isPlatformBinary
;
971 bool Image::overridableDylib() const
973 return _binaryData
->overridableDylib
;
976 void Image::forEachDependentImage(const ImageGroupList
& groups
, void (^handler
)(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
)) const
978 assert(!_binaryData
->isInvalid
);
979 binary_format::ImageRef missingRef
= binary_format::ImageRef::weakImportMissing();
980 __block
bool stop
= false;
981 for (uint32_t depIndex
=0; (depIndex
< _binaryData
->dependentsArrayCount
) && !stop
; ++depIndex
) {
982 binary_format::ImageRef ref
= group().dependentPool(_binaryData
->dependentsArrayStartIndex
+ depIndex
);
983 if ( ref
!= missingRef
) {
984 Image
depImage(resolveImageRef(groups
, ref
));
985 handler(depIndex
, depImage
, (LinkKind
)ref
.kind(), stop
);
992 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, std::unordered_set
<const BinaryImageData
*>& allDependents
) const
996 __block
bool result
= true;
997 forEachDependentImage(groups
, ^(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
) {
998 if ( allDependents
.count(depImage
.binaryData()) == 0 ) {
999 allDependents
.insert(depImage
.binaryData());
1000 if ( !depImage
.recurseAllDependentImages(groups
, allDependents
) ) {
1010 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, SlowLoadSet
& allDependents
, bool& stopped
,
1011 void (^handler
)(const dyld3::launch_cache::binary_format::Image
* aBinImage
, bool& stop
)) const
1013 __block
bool result
= true;
1014 // breadth first, add all directly dependent images
1015 const dyld3::launch_cache::binary_format::Image
* needToProcessArray
[_binaryData
->dependentsArrayCount
];
1016 memset((void*)needToProcessArray
, 0, _binaryData
->dependentsArrayCount
* sizeof(*needToProcessArray
));
1017 const dyld3::launch_cache::binary_format::Image
** const needToProcess
= needToProcessArray
;
1018 forEachDependentImage(groups
, ^(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
) {
1019 const dyld3::launch_cache::binary_format::Image
* depImageData
= depImage
.binaryData();
1020 if ( allDependents
.contains(depImageData
) ) {
1021 needToProcess
[depIndex
] = nullptr;
1024 needToProcess
[depIndex
] = depImageData
;
1025 if ( !allDependents
.add(depImageData
) ) {
1031 handler(depImageData
, stop
);
1038 // recurse on each dependent image
1039 for (int i
=0; !stopped
&& (i
< _binaryData
->dependentsArrayCount
); ++i
) {
1040 if ( const dyld3::launch_cache::binary_format::Image
* depImageData
= needToProcess
[i
] ) {
1041 Image
depImage(depImageData
);
1042 if ( !depImage
.recurseAllDependentImages(groups
, allDependents
, stopped
, handler
) ) {
1051 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, SlowLoadSet
& allDependents
,
1052 void (^handler
)(const dyld3::launch_cache::binary_format::Image
* aBinImage
, bool& stop
)) const
1054 bool stopped
= false;
1055 return recurseAllDependentImages(groups
, allDependents
, stopped
, handler
);
1058 void Image::forEachDiskSegment(void (^handler
)(uint32_t segIndex
, uint32_t fileOffset
, uint32_t fileSize
, int64_t vmOffset
, uint64_t vmSize
, uint8_t permissions
, bool& stop
)) const
1060 assert(isDiskImage());
1061 const uint32_t pageSize
= (_binaryData
->has16KBpages
? 0x4000 : 0x1000);
1062 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1063 const binary_format::DiskSegment
* diskSegs
= (binary_format::DiskSegment
*)rawSegs
;
1064 uint32_t segIndex
= 0;
1065 uint32_t fileOffset
= 0;
1066 int64_t vmOffset
= 0;
1067 // decrement vmOffset by all segments before TEXT (e.g. PAGEZERO)
1068 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1069 const binary_format::DiskSegment
* seg
= &diskSegs
[i
];
1070 if ( seg
->filePageCount
!= 0 ) {
1073 vmOffset
-= (uint64_t)seg
->vmPageCount
* pageSize
;
1075 // walk each segment and call handler
1076 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1077 const binary_format::DiskSegment
* seg
= &diskSegs
[i
];
1078 uint64_t vmSize
= (uint64_t)seg
->vmPageCount
* pageSize
;
1079 uint32_t fileSize
= seg
->filePageCount
* pageSize
;
1080 if ( !seg
->paddingNotSeg
) {
1082 handler(segIndex
, ( fileSize
== 0) ? 0 : fileOffset
, fileSize
, vmOffset
, vmSize
, seg
->permissions
, stop
);
1088 fileOffset
+= fileSize
;
1092 void Image::forEachCacheSegment(void (^handler
)(uint32_t segIndex
, uint64_t vmOffset
, uint64_t vmSize
, uint8_t permissions
, bool& stop
)) const
1094 assert(!isDiskImage());
1095 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1096 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)rawSegs
;
1098 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1099 uint64_t vmOffset
= cacheSegs
[i
].cacheOffset
- cacheSegs
[0].cacheOffset
;
1100 uint64_t vmSize
= cacheSegs
[i
].size
;
1101 uint8_t permissions
= cacheSegs
[i
].permissions
;
1102 handler(i
, vmOffset
, vmSize
, permissions
, stop
);
1108 bool Image::segmentHasFixups(uint32_t segIndex
) const
1110 return (segmentFixups(segIndex
) != nullptr);
1113 bool Image::containsAddress(const void* addr
, const void* imageLoadAddress
, uint8_t* permissions
) const
1115 if ( addr
< imageLoadAddress
)
1118 __block
bool found
= false;
1119 uint64_t offsetInImage
= (char*)addr
- (char*)imageLoadAddress
;
1120 if ( _binaryData
->isDiskImage
) {
1121 forEachDiskSegment(^(uint32_t segIterIndex
, uint32_t fileOffset
, uint32_t fileSize
, int64_t vmOffset
, uint64_t vmSize
, uint8_t segPerms
, bool& stop
) {
1122 if ( (offsetInImage
>= vmOffset
) && (offsetInImage
< vmOffset
+vmSize
) ) {
1123 if ( permissions
!= nullptr )
1124 *permissions
= segPerms
;
1131 forEachCacheSegment(^(uint32_t segIterIndex
, uint64_t vmOffset
, uint64_t vmSize
, uint8_t segPerms
, bool& stop
) {
1132 if ( (offsetInImage
>= vmOffset
) && (offsetInImage
< vmOffset
+vmSize
) ) {
1133 if ( permissions
!= nullptr )
1134 *permissions
= segPerms
;
1143 void Image::forEachInitializer(const void* imageLoadAddress
, void (^handler
)(const void* initializer
)) const
1145 const uint32_t initCount
= _binaryData
->initOffsetsArrayCount
;
1146 const uint32_t startIndex
= _binaryData
->initOffsetsArrayStartIndex
;
1147 const uint32_t* initOffsets
= group().initializerOffsetsPool();
1148 assert(startIndex
+ initCount
<= group().initializerOffsetsCount());
1149 for (uint32_t i
=0; i
< initCount
; ++i
) {
1150 uint32_t anOffset
= initOffsets
[startIndex
+i
];
1151 const void* func
= (char*)imageLoadAddress
+ anOffset
;
1156 void Image::forEachInitBefore(void (^handler
)(binary_format::ImageRef imageToInit
)) const
1158 const uint32_t initCount
= _binaryData
->initBeforeArrayCount
;
1159 const uint32_t startIndex
= _binaryData
->initBeforeArrayStartIndex
;
1160 const uint32_t endIndex
= group().intializerListPoolCount();
1161 const binary_format::ImageRef
* initRefs
= group().intializerListPool();
1162 assert(startIndex
+ initCount
<= endIndex
);
1163 for (uint32_t i
=0; i
< initCount
; ++i
) {
1164 binary_format::ImageRef ref
= initRefs
[startIndex
+i
];
1169 void Image::forEachDOF(const void* imageLoadAddress
, void (^handler
)(const void* section
)) const
1171 const uint32_t dofCount
= _binaryData
->dofOffsetsArrayCount
;
1172 const uint32_t startIndex
= _binaryData
->dofOffsetsArrayStartIndex
;
1173 const uint32_t* dofOffsets
= group().dofOffsetsPool();
1174 assert(startIndex
+ dofCount
<= group().dofOffsetsCount());
1175 for (uint32_t i
=0; i
< dofCount
; ++i
) {
1176 uint32_t anOffset
= dofOffsets
[startIndex
+i
];
1177 const void* section
= (char*)imageLoadAddress
+ anOffset
;
1182 Image
Image::resolveImageRef(const ImageGroupList
& groups
, binary_format::ImageRef ref
, bool applyOverrides
)
1184 // first look if ref image is overridden in closure
1185 __block
binary_format::ImageRef targetRef
= ref
;
1186 if ( applyOverrides
) {
1187 binary_format::ImageRef refToMatch
= ref
;
1188 refToMatch
.clearKind();
1189 for (int i
=0; i
< groups
.count(); ++i
) {
1190 ImageGroup
aGroup(groups
[i
]);
1191 if ( aGroup
.groupNum() >= 2 ) {
1192 aGroup
.forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool &stop
) {
1193 if ( refToMatch
== standardDylibRef
) {
1194 targetRef
= overrideDylibRef
;
1201 // create Image object from targetRef
1202 for (int i
=0; i
< groups
.count(); ++i
) {
1203 ImageGroup
aGroup(groups
[i
]);
1204 if ( aGroup
.groupNum() == targetRef
.groupNum() ) {
1205 return aGroup
.image(targetRef
.indexInGroup());
1208 //assert(0 && "invalid ImageRef");
1209 return Image(nullptr);
1212 void Image::forEachInitBefore(const ImageGroupList
& groups
, void (^handler
)(Image imageToInit
)) const
1214 forEachInitBefore(^(binary_format::ImageRef ref
) {
1215 handler(resolveImageRef(groups
, ref
));
1219 bool Image::validateUsingModTimeAndInode() const
1221 return !group().binaryData()->imageFileInfoIsCdHash
;
1224 bool Image::validateUsingCdHash() const
1226 // don't have cdHash info if union has modtime info in it
1227 if ( !group().binaryData()->imageFileInfoIsCdHash
)
1230 // don't have codesign blob in dyld cache
1231 if ( !_binaryData
->isDiskImage
)
1234 // return true if image is code signed and cdHash16 is non-zero
1235 const binary_format::DiskImage
* diskImage
= asDiskImage();
1236 if ( diskImage
->codeSignFileOffset
== 0 )
1241 return (memcmp(cdHash16(), zeros
, 16) != 0);
1244 const uint8_t* Image::cdHash16() const
1246 return _binaryData
->fileInfo
.cdHash16
.bytes
;
1249 uint64_t Image::fileModTime() const
1251 return _binaryData
->fileInfo
.statInfo
.mtime
;
1254 uint64_t Image::fileINode() const
1256 return _binaryData
->fileInfo
.statInfo
.inode
;
1260 bool Image::isDiskImage() const
1262 return _binaryData
->isDiskImage
;
1265 const binary_format::DiskImage
* Image::asDiskImage() const
1267 assert(_binaryData
->isDiskImage
);
1268 return (binary_format::DiskImage
*)_binaryData
;
1271 const binary_format::CachedImage
* Image::asCachedImage() const
1273 assert(!_binaryData
->isDiskImage
);
1274 return (binary_format::CachedImage
*)_binaryData
;
1277 uint32_t Image::pageSize() const
1279 return (_binaryData
->has16KBpages
? 0x4000 : 0x1000);
1282 uint32_t Image::cacheOffset() const
1284 assert(!_binaryData
->isDiskImage
);
1285 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1286 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)rawSegs
;
1287 return cacheSegs
[0].cacheOffset
;
1290 uint32_t Image::patchStartIndex() const
1292 return asCachedImage()->patchStartIndex
;
1295 uint32_t Image::patchCount() const
1297 return asCachedImage()->patchCount
;
1300 uint64_t Image::sliceOffsetInFile() const
1302 return asDiskImage()->sliceOffsetIn4K
* 4096;
1305 bool Image::hasCodeSignature(uint32_t& fileOffset
, uint32_t& size
) const
1307 const binary_format::DiskImage
* diskImage
= asDiskImage();
1308 if ( diskImage
->codeSignFileOffset
!= 0 ) {
1309 fileOffset
= diskImage
->codeSignFileOffset
;
1310 size
= diskImage
->codeSignFileSize
;
1316 bool Image::isFairPlayEncrypted(uint32_t& textOffset
, uint32_t& size
) const
1318 const binary_format::DiskImage
* diskImage
= asDiskImage();
1319 if ( diskImage
->fairPlayTextPageCount
!= 0 ) {
1320 textOffset
= diskImage
->fairPlayTextStartPage
* pageSize();
1321 size
= diskImage
->fairPlayTextPageCount
* pageSize();
1327 uint64_t Image::vmSizeToMap() const
1329 return asDiskImage()->totalVmPages
* pageSize();
1332 void Image::forEachFixup(const uint8_t* pageFixups
, const void* segContent
, uint32_t& offset
, uint32_t& ordinal
,
1333 void (^handler
)(uint32_t pageOffset
, FixupKind kind
, uint32_t ordinal
, bool& stop
))
1336 for (const uint8_t* p
= pageFixups
; (*p
!= 0) && !stop
;) {
1337 binary_format::FixUpOpcode fullOp
= (binary_format::FixUpOpcode
)(*p
);
1338 binary_format::FixUpOpcode majorOp
= (binary_format::FixUpOpcode
)(*p
& 0xF0);
1339 uint8_t low4
= (*p
& 0x0F);
1340 switch ( majorOp
) {
1341 case binary_format::FixUpOpcode::done
:
1343 case binary_format::FixUpOpcode::rebase32
: // apply
1345 case binary_format::FixUpOpcode::bind64
:
1346 handler(offset
, FixupKind::bind64
, ordinal
, stop
);
1350 case binary_format::FixUpOpcode::bind32
:
1351 handler(offset
, FixupKind::bind32
, ordinal
, stop
);
1355 case binary_format::FixUpOpcode::rebase64
:
1356 handler(offset
, FixupKind::rebase64
, 0, stop
);
1360 case binary_format::FixUpOpcode::rebase32
:
1361 handler(offset
, FixupKind::rebase32
, 0, stop
);
1365 case binary_format::FixUpOpcode::rebaseText32
:
1366 handler(offset
, FixupKind::rebaseText32
, 0, stop
);
1370 case binary_format::FixUpOpcode::bindText32
:
1371 handler(offset
, FixupKind::bindText32
, ordinal
, stop
);
1375 case binary_format::FixUpOpcode::bindTextRel32
:
1376 handler(offset
, FixupKind::bindTextRel32
, ordinal
, stop
);
1380 case binary_format::FixUpOpcode::bindImportJmp32
:
1381 handler(offset
, FixupKind::bindImportJmp32
, ordinal
, stop
);
1385 //case binary_format::FixUpOpcode::fixupChain64:
1386 // assert(0 && "rebase/bind chain support not implemented yet");
1389 assert(0 && "bad opcode");
1393 case binary_format::FixUpOpcode::incPageOffset
:
1396 offset
+= read_uleb128(p
, p
+8)*4;
1403 case binary_format::FixUpOpcode::setPageOffset
:
1406 offset
= (uint32_t)read_uleb128(p
, p
+8);
1413 case binary_format::FixUpOpcode::incOrdinal
:
1416 ordinal
+= read_uleb128(p
, p
+8);
1423 case binary_format::FixUpOpcode::setOrdinal
:
1426 ordinal
= (uint32_t)read_uleb128(p
, p
+8);
1433 case binary_format::FixUpOpcode::repeat
: {
1435 uint32_t count
= (uint32_t)read_uleb128(p
, p
+8);
1436 uint8_t pattern
[32];
1437 for (int j
=0; j
< low4
; ++j
) {
1440 pattern
[low4
] = (uint8_t)binary_format::FixUpOpcode::done
;
1441 for (int j
=0; j
< count
; ++j
) {
1442 forEachFixup(&pattern
[0], segContent
, offset
, ordinal
, handler
);
1449 assert(0 && "bad opcode");
1455 const binary_format::SegmentFixupsByPage
* Image::segmentFixups(uint32_t segIndex
) const
1457 const binary_format::DiskImage
* diskImage
= asDiskImage();
1458 //const BinaryImageGroupData* g = group().binaryData();
1459 uint32_t segCountWithFixups
= diskImage
->fixupsPoolSegCount
;
1460 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d), group=%p, segCountWithFixup=%d\n", _binaryData, segIndex, g, segCountWithFixups);
1461 const binary_format::AllFixupsBySegment
* allFixups
= group().fixUps(diskImage
->fixupsPoolOffset
);
1462 for (uint32_t i
=0; i
< segCountWithFixups
; ++i
) {
1463 if ( allFixups
[i
].segIndex
== segIndex
) {
1464 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) allFixups=%p, allFixups[%d].segIndex=%d, allFixups[%d].offset=%d\n", _binaryData, segIndex, allFixups, i, allFixups[i].segIndex, i, allFixups[i].offset);
1465 return (binary_format::SegmentFixupsByPage
*)((char*)allFixups
+ allFixups
[i
].offset
);
1468 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) => nullptr\n", _binaryData, segIndex);
1472 void Image::forEachFixup(uint32_t segIndex
, MemoryRange segContent
, void (^handler
)(uint64_t segOffset
, FixupKind
, TargetSymbolValue
, bool& stop
)) const
1474 const binary_format::SegmentFixupsByPage
* segFixups
= segmentFixups(segIndex
);
1475 if ( segFixups
== nullptr )
1478 assert(segFixups
->pageCount
*segFixups
->pageSize
<= segContent
.size
);
1480 const uint32_t ordinalsIndexInGroupPool
= asDiskImage()->targetsArrayStartIndex
;
1481 const uint32_t maxOrdinal
= asDiskImage()->targetsArrayCount
;
1482 const TargetSymbolValue
* groupArray
= group().targetValuesArray();
1483 assert(ordinalsIndexInGroupPool
< group().targetValuesCount());
1484 const TargetSymbolValue
* targetOrdinalArray
= &groupArray
[ordinalsIndexInGroupPool
];
1486 for (uint32_t pageIndex
=0; pageIndex
< segFixups
->pageCount
; ++pageIndex
) {
1487 const uint8_t* opcodes
= (uint8_t*)(segFixups
) + segFixups
->pageInfoOffsets
[pageIndex
];
1488 uint64_t pageStartOffet
= pageIndex
* segFixups
->pageSize
;
1489 uint32_t curOffset
= 0;
1490 uint32_t curOrdinal
= 0;
1491 forEachFixup(opcodes
, segContent
.address
, curOffset
, curOrdinal
, ^(uint32_t pageOffset
, FixupKind kind
, uint32_t targetOrdinal
, bool& stop
) {
1492 assert(targetOrdinal
< maxOrdinal
);
1493 handler(pageStartOffet
+ pageOffset
, kind
, targetOrdinalArray
[targetOrdinal
], stop
);
1499 } // namespace launch_cache
1500 } // namespace dyld3