2 * Copyright (c) 2017 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
27 #include <uuid/uuid.h>
31 #include "LaunchCacheFormat.h"
32 #include "LaunchCache.h"
33 #include "MachOParser.h"
34 #include "DyldCacheParser.h"
37 extern void log(const char* format
, ...) __attribute__((format(printf
, 1, 2)));
41 namespace launch_cache
{
43 static uintptr_t read_uleb128(const uint8_t*& p
, const uint8_t* end
)
49 assert("malformed uleb128");
52 uint64_t slice
= *p
& 0x7f;
55 assert("uleb128 too big for uint64");
59 result
|= (slice
<< bit
);
62 } while (*p
++ & 0x80);
63 return (uintptr_t)result
;
67 bool MemoryRange::contains(const MemoryRange
& other
) const
69 if ( this->address
> other
.address
)
71 const uint8_t* thisEnd
= (uint8_t*)address
+ size
;
72 const uint8_t* otherEnd
= (uint8_t*)other
.address
+ other
.size
;
73 return (thisEnd
>= otherEnd
);
76 bool MemoryRange::intersects(const MemoryRange
& other
) const
78 const uint8_t* thisEnd
= (uint8_t*)address
+ size
;
79 const uint8_t* otherEnd
= (uint8_t*)other
.address
+ other
.size
;
80 if ( otherEnd
< this->address
)
82 return ( other
.address
< thisEnd
);
86 //////////////////////////// SlowLoadSet ////////////////////////////////////////
88 bool SlowLoadSet::contains(const BinaryImageData
* image
)
90 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
97 bool SlowLoadSet::add(const BinaryImageData
* image
)
99 if ( _current
< _end
) {
106 void SlowLoadSet::forEach(void (^handler
)(const BinaryImageData
*))
108 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
113 void SlowLoadSet::forEach(void (^handler
)(const BinaryImageData
*, bool& stop
))
116 for (const BinaryImageData
** p
=_start
; p
< _current
; ++p
) {
124 long SlowLoadSet::count() const
126 return (_current
- _start
);
130 //////////////////////////// TargetSymbolValue ////////////////////////////////////////
135 uintptr_t TargetSymbolValue::resolveTarget(Diagnostics
& diag
, const ImageGroup
& inGroup
, LoadedImages
& images
) const
137 // this block is only used if findExportedSymbol() needs to trace re-exported dylibs to find a symbol
138 MachOParser::DependentFinder reExportFollower
= ^(uint32_t depIndex
, const char* depLoadPath
, void* extra
, const mach_header
** foundMH
, void** foundExtra
) {
140 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
141 Image
anImage(binImage
);
142 if ( strcmp(depLoadPath
, anImage
.path()) == 0 ) {
147 return (*foundMH
!= nullptr);
151 switch ( _data
.sharedCache
.kind
) {
153 case TargetSymbolValue::kindSharedCache
:
154 assert(_data
.sharedCache
.offsetIntoCache
!= 0);
155 return (uintptr_t)(images
.dyldCacheLoadAddressForImage() + _data
.sharedCache
.offsetIntoCache
);
157 case TargetSymbolValue::kindAbsolute
:
158 offset
= (uintptr_t)_data
.absolute
.value
;
159 // sign extend 42 bit value
160 if ( offset
& 0x2000000000000000ULL
)
161 offset
|= 0xC000000000000000ULL
;
164 case TargetSymbolValue::kindGroup
: {
165 uint32_t groupNum
= _data
.group
.isIndirectGroup
? inGroup
.indirectGroupNum(_data
.group
.groupNum
) : _data
.group
.groupNum
;
166 uintptr_t targetImageLoadAddress
= (uintptr_t)(images
.loadAddressFromGroupAndIndex(groupNum
, _data
.group
.indexInGroup
));
167 if ( targetImageLoadAddress
== 0 )
168 diag
.error("image for groupNum=%d, indexInGroup=%d not found", groupNum
, _data
.group
.indexInGroup
);
169 offset
= (uintptr_t)_data
.group
.offsetInImage
;
170 // sign extend 42 bit offset
171 if ( offset
& 0x0000020000000000ULL
)
172 offset
|= 0xFFFFFC0000000000ULL
;
173 return targetImageLoadAddress
+ offset
;
176 case TargetSymbolValue::kindDynamicGroup
: {
177 const char* imagePath
= inGroup
.stringFromPool(_data
.dynamicGroup
.imagePathOffset
);
178 const char* symbolName
= inGroup
.stringFromPool(_data
.dynamicGroup
.symbolNameOffset
);
179 __block
uintptr_t result
= 0;
180 __block
bool found
= false;
181 if ( strcmp(imagePath
, "@flat") == 0 ) {
182 // search all images in load order
183 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
184 Diagnostics findSymbolDiag
;
185 dyld3::MachOParser
parser(mh
);
186 dyld3::MachOParser::FoundSymbol foundInfo
;
187 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, ^(uint32_t, const char* depLoadPath
, void*, const mach_header
** foundMH
, void**) {
188 // <rdar://problem/31921090> need to follow re-exported symbols to support libc renamed and reexported symbols
190 images
.forEachImage(^(uint32_t innerIndex
, const BinaryImageData
* innerBinImage
, const mach_header
* innerMH
, bool& innerStop
) {
191 Image
innerImage(innerBinImage
);
192 if ( strcmp(depLoadPath
, innerImage
.path()) == 0 ) {
197 return (*foundMH
!= nullptr);
199 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
200 images
.setAsNeverUnload(idx
);
205 // <rdar://problem/31944092> bind unfound flat symbols to NULL to support lazy binding semantics
211 else if ( strcmp(imagePath
, "@main") == 0 ) {
212 // search only main executable
213 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
214 if ( mh
->filetype
== MH_EXECUTE
) {
215 Diagnostics findSymbolDiag
;
216 dyld3::MachOParser
parser(mh
);
217 dyld3::MachOParser::FoundSymbol foundInfo
;
218 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, nullptr) ) {
219 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
226 else if ( strcmp(imagePath
, "@weak_def") == 0 ) {
227 // search images with weak definitions in load order
228 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
229 Image
anImage(binImage
);
230 if ( anImage
.hasWeakDefs() ) {
231 Diagnostics findSymbolDiag
;
232 dyld3::MachOParser
parser(mh
);
233 dyld3::MachOParser::FoundSymbol foundInfo
;
234 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, nullptr) ) {
235 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
237 images
.setAsNeverUnload(idx
);
244 // search only image the matches supplied path
245 images
.forEachImage(^(uint32_t idx
, const BinaryImageData
* binImage
, const mach_header
* mh
, bool& stop
) {
246 Image
anImage(binImage
);
247 if ( strcmp(anImage
.path(), imagePath
) == 0 ) {
248 Diagnostics findSymbolDiag
;
249 dyld3::MachOParser
parser(mh
);
250 dyld3::MachOParser::FoundSymbol foundInfo
;
251 if ( parser
.findExportedSymbol(findSymbolDiag
, symbolName
, nullptr, foundInfo
, reExportFollower
) ) {
252 result
= ((uintptr_t)(foundInfo
.foundInDylib
) + (uintptr_t)foundInfo
.value
);
261 if ( _data
.dynamicGroup
.weakImport
)
263 diag
.error("dynamic symbol '%s' not found for %s", symbolName
, imagePath
);
267 assert(0 && "resolveTarget() not reachable");
272 TargetSymbolValue::TargetSymbolValue()
277 TargetSymbolValue
TargetSymbolValue::makeInvalid()
279 return TargetSymbolValue();
282 TargetSymbolValue
TargetSymbolValue::makeSharedCacheOffset(uint32_t offset
)
285 t
._data
.sharedCache
.kind
= kindSharedCache
;
286 t
._data
.sharedCache
.offsetIntoCache
= offset
;
290 TargetSymbolValue
TargetSymbolValue::makeAbsolute(uint64_t value
)
293 t
._data
.absolute
.kind
= kindAbsolute
;
294 t
._data
.absolute
.value
= value
;
298 TargetSymbolValue
TargetSymbolValue::makeGroupValue(uint32_t groupIndex
, uint32_t imageIndexInGroup
, uint64_t offsetInImage
, bool isIndirectGroupNum
)
300 assert(groupIndex
!= 0 || isIndirectGroupNum
);
301 assert(groupIndex
< 128);
302 assert(imageIndexInGroup
< 4096);
304 t
._data
.group
.kind
= kindGroup
;
305 t
._data
.group
.isIndirectGroup
= isIndirectGroupNum
;
306 t
._data
.group
.groupNum
= groupIndex
;
307 t
._data
.group
.indexInGroup
= imageIndexInGroup
;
308 t
._data
.group
.offsetInImage
= offsetInImage
;
312 TargetSymbolValue
TargetSymbolValue::makeDynamicGroupValue(uint32_t imagePathPoolOffset
, uint32_t imageSymbolPoolOffset
, bool weakImport
)
315 t
._data
.dynamicGroup
.kind
= kindDynamicGroup
;
316 t
._data
.dynamicGroup
.weakImport
= weakImport
;
317 t
._data
.dynamicGroup
.imagePathOffset
= imagePathPoolOffset
;
318 t
._data
.dynamicGroup
.symbolNameOffset
= imageSymbolPoolOffset
;
322 bool TargetSymbolValue::isSharedCacheTarget(uint64_t& offsetInCache
) const
324 if ( _data
.sharedCache
.kind
!= kindSharedCache
)
326 offsetInCache
= _data
.sharedCache
.offsetIntoCache
;
330 bool TargetSymbolValue::isGroupImageTarget(uint32_t& groupNum
, uint32_t& indexInGroup
, uint64_t& offsetInImage
) const
332 if ( _data
.sharedCache
.kind
!= kindGroup
)
334 // This is only used for interposing, so refuse to allow indirect for group 2
335 assert(!_data
.group
.isIndirectGroup
);
336 groupNum
= _data
.group
.groupNum
;
337 indexInGroup
= _data
.group
.indexInGroup
;
338 offsetInImage
= _data
.group
.offsetInImage
;
342 bool TargetSymbolValue::isInvalid() const
344 return (_data
.raw
== 0);
347 static std::string
hex8(uint64_t value
) {
349 sprintf(buff
, "0x%08llX", value
);
353 static std::string
decimal(uint64_t value
) {
355 sprintf(buff
, "%llu", value
);
359 std::string
TargetSymbolValue::asString(ImageGroup group
) const
362 switch ( _data
.sharedCache
.kind
) {
363 case kindSharedCache
:
364 if ( _data
.sharedCache
.offsetIntoCache
== 0 )
365 return "{invalid target}";
367 return "{cache+" + hex8(_data
.sharedCache
.offsetIntoCache
) + "}";
369 offset
= (uintptr_t)_data
.absolute
.value
;
370 // sign extend 42 bit value
371 if ( offset
& 0x2000000000000000ULL
)
372 offset
|= 0xC000000000000000ULL
;
373 return "{absolute:" + hex8(offset
) + "}";
375 offset
= _data
.group
.offsetInImage
;
376 // sign extend 42 bit offset
377 if ( offset
& 0x0000020000000000ULL
)
378 offset
|= 0xFFFFFC0000000000ULL
;
379 if ( _data
.group
.groupNum
== 1 )
380 return "{otherDylib[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
381 if ( _data
.group
.groupNum
== 2 )
382 return "{closure[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
384 uint32_t groupNum
= _data
.group
.isIndirectGroup
? group
.indirectGroupNum(_data
.group
.groupNum
) : _data
.group
.groupNum
;
385 return "{dlopen-group-" + decimal(groupNum
-2) + "[" + decimal(_data
.group
.indexInGroup
) +"]+" + hex8(offset
) + "}";
387 case kindDynamicGroup
:
388 return "{dynamic image='" + std::string(group
.stringFromPool(_data
.dynamicGroup
.imagePathOffset
))
389 + "' symbol='" + std::string(group
.stringFromPool(_data
.dynamicGroup
.symbolNameOffset
)) + "'}";
391 assert(0 && "unreachable");
397 //////////////////////////// ImageRef ////////////////////////////////////////
399 binary_format::ImageRef
binary_format::ImageRef::weakImportMissing()
401 ImageRef
missing(0xFFFFFFFF);
407 //////////////////////////// Closure ////////////////////////////////////////
409 Closure::Closure(const binary_format::Closure
* closure
)
410 : _binaryData(closure
)
412 assert(closure
->magic
== binary_format::Closure::magicV1
);
415 size_t Closure::size() const
417 return _binaryData
->stringPoolOffset
+ _binaryData
->stringPoolSize
;
420 const ImageGroup
Closure::group() const
422 return ImageGroup(&_binaryData
->group
);
425 void Closure::forEachEnvVar(void (^handler
)(const char* keyEqualValue
, bool& stop
)) const
427 const uint32_t* envVarStringOffsets
= (uint32_t*)((uint8_t*)_binaryData
+ _binaryData
->dyldEnvVarsOffset
);
428 const char* stringPool
= (char*)_binaryData
+ _binaryData
->stringPoolOffset
;
430 for (uint32_t i
=0; i
< _binaryData
->dyldEnvVarsCount
; ++i
) {
431 handler(&stringPool
[envVarStringOffsets
[i
]], stop
);
437 void Closure::forEachMustBeMissingFile(void (^handler
)(const char* path
, bool& stop
)) const
439 const uint16_t* offsets
= (uint16_t*)((uint8_t*)_binaryData
+ _binaryData
->missingFileComponentsOffset
);
442 const char* stringPool
= (char*)_binaryData
+ _binaryData
->stringPoolOffset
;
447 while ( *offsets
!= 0 ) {
448 const char* component
= &stringPool
[*offsets
++];
449 strlcat(path
, "/", PATH_MAX
);
450 strlcat(path
, component
, PATH_MAX
);
453 ++offsets
; // move to next path
454 if ( *offsets
== 0 ) // if no next path, then end of list of strings
459 const uuid_t
* Closure::dyldCacheUUID() const
461 return &(_binaryData
->dyldCacheUUID
);
465 const uint8_t* Closure::cdHash() const
467 return _binaryData
->mainExecutableCdHash
;
471 uint32_t Closure::initialImageCount() const
473 return _binaryData
->initialImageCount
;
477 uint32_t Closure::mainExecutableImageIndex() const
479 return _binaryData
->mainExecutableIndexInGroup
;
483 uint32_t Closure::mainExecutableEntryOffset() const
485 return _binaryData
->mainExecutableEntryOffset
;
488 bool Closure::mainExecutableUsesCRT() const
490 return _binaryData
->usesCRT
;
493 bool Closure::isRestricted() const
495 return _binaryData
->isRestricted
;
498 bool Closure::usesLibraryValidation() const
500 return _binaryData
->usesLibraryValidation
;
503 uint32_t Closure::libdyldVectorOffset() const
505 return _binaryData
->libdyldVectorOffset
;
508 const BinaryImageData
* Closure::libSystem(const ImageGroupList
& groups
)
510 return Image::resolveImageRef(groups
, _binaryData
->libSystemRef
).binaryData();
513 const BinaryImageData
* Closure::libDyld(const ImageGroupList
& groups
)
515 return Image::resolveImageRef(groups
, _binaryData
->libDyldRef
).binaryData();
519 //////////////////////////// ImageGroup ////////////////////////////////////////
521 size_t ImageGroup::size() const
523 return (_binaryData
->stringsPoolOffset
+ _binaryData
->stringsPoolSize
+ 3) & (-4);
526 uint32_t ImageGroup::groupNum() const
528 return _binaryData
->groupNum
;
531 bool ImageGroup::dylibsExpectedOnDisk() const
533 return _binaryData
->dylibsExpectedOnDisk
;
536 uint32_t ImageGroup::imageCount() const
538 return _binaryData
->imagesPoolCount
;
541 const binary_format::Image
* ImageGroup::imageBinary(uint32_t index
) const
543 assert(index
<_binaryData
->imagesPoolCount
);
544 return (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
+ (index
* _binaryData
->imagesEntrySize
));
548 const Image
ImageGroup::image(uint32_t index
) const
550 return Image(imageBinary(index
));
553 uint32_t ImageGroup::indexInGroup(const binary_format::Image
* img
) const
555 long delta
= (char*)img
- ((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
556 uint32_t index
= (uint32_t)(delta
/_binaryData
->imagesEntrySize
);
557 assert(image(index
)._binaryData
== img
);
561 const binary_format::Image
* ImageGroup::findImageByPath(const char* path
, uint32_t& foundIndex
) const
563 // check path of each image in group
564 uint32_t targetHash
= hashFunction(path
);
565 const uint8_t* p
= (uint8_t*)_binaryData
+ _binaryData
->imagesPoolOffset
;
566 for (uint32_t i
=0; i
< _binaryData
->imagesPoolCount
; ++i
) {
567 const binary_format::Image
* binImage
= (binary_format::Image
*)p
;
568 if ( binImage
->pathHash
== targetHash
) {
570 if ( !img
.isInvalid() && (strcmp(img
.path(), path
) == 0) ) {
575 p
+= _binaryData
->imagesEntrySize
;
578 const binary_format::AliasEntry
* aliasEntries
= (binary_format::AliasEntry
*)((uint8_t*)_binaryData
+ _binaryData
->imageAliasOffset
);
579 for (uint32_t i
=0; i
< _binaryData
->imageAliasCount
; ++i
) {
580 const char* aliasPath
= stringFromPool(aliasEntries
[i
].aliasOffsetInStringPool
);
581 if ( aliasEntries
[i
].aliasHash
== targetHash
) {
582 if ( strcmp(aliasPath
, path
) == 0 ) {
583 Image img
= image(aliasEntries
[i
].imageIndexInGroup
);
584 if ( !img
.isInvalid() ) {
585 foundIndex
= aliasEntries
[i
].imageIndexInGroup
;
586 return img
.binaryData();
594 const binary_format::Image
* ImageGroup::findImageByCacheOffset(size_t cacheVmOffset
, uint32_t& mhCacheOffset
, uint8_t& foundPermissions
) const
596 assert(groupNum() == 0);
598 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)segmentPool(0);
599 const binary_format::Image
* image
= (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
600 // most address lookups are in TEXT, so just search first segment in first pass
601 for (uint32_t imageIndex
=0; imageIndex
< _binaryData
->imagesPoolCount
; ++imageIndex
) {
602 const binary_format::DyldCacheSegment
* segInfo
= &cacheSegs
[image
->segmentsArrayStartIndex
];
603 if ( (cacheVmOffset
>= segInfo
->cacheOffset
) && (cacheVmOffset
< (segInfo
->cacheOffset
+ segInfo
->size
)) ) {
604 mhCacheOffset
= segInfo
->cacheOffset
;
605 foundPermissions
= segInfo
->permissions
;
608 image
= (binary_format::Image
*)((char*)image
+ _binaryData
->imagesEntrySize
);
610 // second pass, skip TEXT segment
611 image
= (binary_format::Image
*)((char*)_binaryData
+ _binaryData
->imagesPoolOffset
);
612 for (uint32_t imageIndex
=0; imageIndex
< _binaryData
->imagesPoolCount
; ++imageIndex
) {
613 for (uint32_t segIndex
=1; segIndex
< image
->segmentsArrayCount
; ++segIndex
) {
614 const binary_format::DyldCacheSegment
* segInfo
= &cacheSegs
[image
->segmentsArrayStartIndex
+segIndex
];
615 if ( (cacheVmOffset
>= segInfo
->cacheOffset
) && (cacheVmOffset
< (segInfo
->cacheOffset
+ segInfo
->size
)) ) {
616 mhCacheOffset
= cacheSegs
[image
->segmentsArrayStartIndex
].cacheOffset
;
617 foundPermissions
= segInfo
->permissions
;
621 image
= (binary_format::Image
*)((char*)image
+ _binaryData
->imagesEntrySize
);
626 void ImageGroup::forEachAliasOf(uint32_t imageIndex
, void (^handler
)(const char* aliasPath
, uint32_t aliasPathHash
, bool& stop
)) const
629 const binary_format::AliasEntry
* aliasEntries
= (binary_format::AliasEntry
*)((uint8_t*)_binaryData
+ _binaryData
->imageAliasOffset
);
630 for (uint32_t i
=0; i
< _binaryData
->imageAliasCount
; ++i
) {
631 if ( aliasEntries
[i
].imageIndexInGroup
== imageIndex
) {
632 const char* aliasPath
= stringFromPool(aliasEntries
[i
].aliasOffsetInStringPool
);
633 handler(aliasPath
, aliasEntries
[i
].aliasHash
, stop
);
640 const char* ImageGroup::stringPool() const
642 return (char*)_binaryData
+ _binaryData
->stringsPoolOffset
;
645 const char* ImageGroup::stringFromPool(uint32_t offset
) const
647 assert(offset
< _binaryData
->stringsPoolSize
);
648 return (char*)_binaryData
+ _binaryData
->stringsPoolOffset
+ offset
;
651 uint32_t ImageGroup::stringPoolSize() const
653 return _binaryData
->stringsPoolSize
;;
656 binary_format::ImageRef
ImageGroup::dependentPool(uint32_t index
) const
658 assert(index
< _binaryData
->dependentsPoolCount
);
659 const binary_format::ImageRef
* depArray
= (binary_format::ImageRef
*)((char*)_binaryData
+ _binaryData
->dependentsPoolOffset
);
660 return depArray
[index
];
663 const uint64_t* ImageGroup::segmentPool(uint32_t index
) const
665 assert(index
< _binaryData
->segmentsPoolCount
);
666 const uint64_t* segArray
= (uint64_t*)((char*)_binaryData
+ _binaryData
->segmentsPoolOffset
);
667 return &segArray
[index
];
671 const uint32_t* ImageGroup::initializerOffsetsPool() const
673 return (uint32_t*)((char*)_binaryData
+ _binaryData
->intializerOffsetPoolOffset
);
676 const uint32_t ImageGroup::initializerOffsetsCount() const
678 return _binaryData
->intializerOffsetPoolCount
;
681 const binary_format::ImageRef
* ImageGroup::intializerListPool() const
683 return (binary_format::ImageRef
*)((char*)_binaryData
+ _binaryData
->intializerListPoolOffset
);
686 const uint32_t ImageGroup::intializerListPoolCount() const
688 return _binaryData
->intializerListPoolCount
;
691 const binary_format::AllFixupsBySegment
* ImageGroup::fixUps(uint32_t offset
) const
693 return (binary_format::AllFixupsBySegment
*)((char*)_binaryData
+ _binaryData
->fixupsOffset
+ offset
);
696 const TargetSymbolValue
* ImageGroup::targetValuesArray() const
698 return (TargetSymbolValue
*)((char*)_binaryData
+ _binaryData
->targetsOffset
);
701 uint32_t ImageGroup::targetValuesCount() const
703 return _binaryData
->targetsPoolCount
;
707 const uint32_t* ImageGroup::dofOffsetsPool() const
709 return (uint32_t*)((char*)_binaryData
+ _binaryData
->dofOffsetPoolOffset
);
712 const uint32_t ImageGroup::dofOffsetsCount() const
714 return _binaryData
->dofOffsetPoolCount
;
718 const uint32_t* ImageGroup::indirectGroupNumsPool() const
720 return (uint32_t*)((char*)_binaryData
+ _binaryData
->indirectGroupNumPoolOffset
);
723 const uint32_t ImageGroup::indirectGroupNumsCount() const
725 return _binaryData
->indirectGroupNumPoolCount
;
728 uint32_t ImageGroup::indirectGroupNum(uint32_t offset
) const
730 assert(offset
< _binaryData
->indirectGroupNumPoolCount
);
731 return indirectGroupNumsPool()[offset
];
734 uint32_t ImageGroup::hashFunction(const char* str
)
737 for (const char* s
=str
; *s
!= '\0'; ++s
)
743 void ImageGroup::forEachDyldCachePatch(uint32_t patchTargetIndex
, uint32_t cacheDataVmOffset
, void (^handler
)(uint32_t targetCacheOffset
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool& stop
)) const
745 assert(_binaryData
->imagesEntrySize
== sizeof(binary_format::CachedImage
) && "only callable on group-0 in shared cache");
746 assert(patchTargetIndex
< _binaryData
->cachePatchTableCount
);
747 const binary_format::PatchTable
* patches
= (binary_format::PatchTable
*)((char*)_binaryData
+ _binaryData
->cachePatchTableOffset
);
748 uint32_t offsetsIndex
= patches
[patchTargetIndex
].offsetsStartIndex
;
749 uint32_t targetCacheOffset
= patches
[patchTargetIndex
].targetCacheOffset
;
750 const binary_format::PatchOffset
* patchLocationOffsets
= (binary_format::PatchOffset
*)((char*)_binaryData
+ _binaryData
->cachePatchOffsetsOffset
);
753 assert(offsetsIndex
< _binaryData
->cachePatchOffsetsCount
);
754 binary_format::PatchOffset entry
= patchLocationOffsets
[offsetsIndex
];
756 handler(targetCacheOffset
, cacheDataVmOffset
+entry
.dataRegionOffset
, entry
.hasAddend
, stop
);
762 void ImageGroup::forEachImageRefOverride(void (^handler
)(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool& stop
)) const
765 const binary_format::ImageRefOverride
* entries
= (binary_format::ImageRefOverride
*)((char*)_binaryData
+ _binaryData
->imageOverrideTableOffset
);
766 for (uint32_t i
=0; (i
< _binaryData
->imageOverrideTableCount
) && !stop
; ++i
) {
767 handler(entries
[i
].standardDylib
, entries
[i
].overrideDylib
, stop
);
771 void ImageGroup::forEachImageRefOverride(const ImageGroupList
& groupList
, void (^handler
)(Image standardDylib
, Image overrideDylib
, bool& stop
)) const
773 forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool& stop
) {
774 Image standardDylib
= Image::resolveImageRef(groupList
, standardDylibRef
, false);
775 Image overrideDylib
= Image::resolveImageRef(groupList
, overrideDylibRef
, false);
776 handler(standardDylib
, overrideDylib
, stop
);
783 void ImageGroup::forEachDyldCachePatchLocation(const void* dyldCacheLoadAddress
, uint32_t patchTargetIndex
, void (^handler
)(uintptr_t* locationToPatch
, uintptr_t addend
, bool&)) const
785 DyldCacheParser
cacheParser((DyldSharedCache
*)dyldCacheLoadAddress
, false);
786 uint32_t cacheDataVmOffset
= (uint32_t)cacheParser
.dataRegionRuntimeVmOffset();
787 forEachDyldCachePatch(patchTargetIndex
, cacheDataVmOffset
, ^(uint32_t targetCacheOffset
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool& stop
) {
788 uintptr_t addend
= 0;
789 uintptr_t* fixupLoc
= (uintptr_t*)((char*)dyldCacheLoadAddress
+ usePointersCacheOffset
);
791 uintptr_t currentValue
= *fixupLoc
;
792 uintptr_t expectedValue
= (uintptr_t)dyldCacheLoadAddress
+ targetCacheOffset
;
793 uintptr_t delta
= currentValue
- expectedValue
;
797 handler(fixupLoc
, addend
, stop
);
801 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler
)(uint32_t patchTableIndex
, const BinaryImageData
* image
, uint32_t imageOffset
, bool& stop
)) const
804 const binary_format::DyldCacheOverride
* entries
= (binary_format::DyldCacheOverride
*)((char*)_binaryData
+ _binaryData
->symbolOverrideTableOffset
);
805 for (uint32_t i
=0; (i
< _binaryData
->symbolOverrideTableCount
) && !stop
; ++i
) {
806 handler(entries
[i
].patchTableIndex
, imageBinary(entries
[i
].imageIndex
), entries
[i
].imageOffset
, stop
);
812 void ImageGroup::forEachDyldCacheSymbolOverride(void (^handler
)(uint32_t patchTableIndex
, uint32_t imageIndexInClosure
, uint32_t imageOffset
, bool& stop
)) const
815 const binary_format::DyldCacheOverride
* entries
= (binary_format::DyldCacheOverride
*)((char*)_binaryData
+ _binaryData
->symbolOverrideTableOffset
);
816 for (uint32_t i
=0; (i
< _binaryData
->symbolOverrideTableCount
) && !stop
; ++i
) {
817 handler(entries
[i
].patchTableIndex
, entries
[i
].imageIndex
, entries
[i
].imageOffset
, stop
);
821 void ImageGroup::forEachDyldCachePatchLocation(const DyldCacheParser
& cacheParser
, void (^handler
)(uint32_t targetCacheOffset
, const std::vector
<uint32_t>& usesPointersCacheOffsets
, bool& stop
)) const
823 uint32_t cacheDataVmOffset
= (uint32_t)cacheParser
.dataRegionRuntimeVmOffset();
824 __block
std::vector
<uint32_t> pointerCacheOffsets
;
826 for (uint32_t patchIndex
=0; patchIndex
< _binaryData
->cachePatchTableCount
; ++patchIndex
) {
827 pointerCacheOffsets
.clear();
828 __block
uint32_t targetCacheOffset
= 0;
829 forEachDyldCachePatch(patchIndex
, cacheDataVmOffset
, ^(uint32_t targetCacheOff
, uint32_t usePointersCacheOffset
, bool hasAddend
, bool&) {
830 targetCacheOffset
= targetCacheOff
;
831 pointerCacheOffsets
.push_back(usePointersCacheOffset
);
833 std::sort(pointerCacheOffsets
.begin(), pointerCacheOffsets
.end(), [&](uint32_t a
, uint32_t b
) { return a
< b
; });
834 handler(targetCacheOffset
, pointerCacheOffsets
, stop
);
840 bool ImageGroup::hasPatchTableIndex(uint32_t targetCacheOffset
, uint32_t& foundIndex
) const
842 const binary_format::PatchTable
* patches
= (binary_format::PatchTable
*)((char*)_binaryData
+ _binaryData
->cachePatchTableOffset
);
843 for (uint32_t i
=0; i
< _binaryData
->cachePatchTableCount
; ++i
) {
844 if ( patches
[i
].targetCacheOffset
== targetCacheOffset
) {
855 //////////////////////////// Image ////////////////////////////////////////
859 const ImageGroup
Image::group() const
861 return ImageGroup((binary_format::ImageGroup
*)(((char*)_binaryData
) + (_binaryData
->groupOffset
)));
864 uint32_t Image::maxLoadCount() const
866 return _binaryData
->maxLoadCount
;
869 const char* Image::path() const
871 return group().stringFromPool(_binaryData
->pathPoolOffset
);
874 uint32_t Image::pathHash() const
876 return _binaryData
->pathHash
;
879 const char* Image::leafName() const
881 const char* path
= group().stringFromPool(_binaryData
->pathPoolOffset
);
882 const char* lastSlash
= strrchr(path
, '/');
883 if ( lastSlash
!= nullptr )
889 const uuid_t
* Image::uuid() const
891 return &(_binaryData
->uuid
);
894 bool Image::isInvalid() const
896 return (_binaryData
== nullptr) || _binaryData
->isInvalid
;
899 bool Image::hasObjC() const
901 return _binaryData
->hasObjC
;
904 bool Image::isBundle() const
906 return _binaryData
->isBundle
;
909 bool Image::hasWeakDefs() const
911 return _binaryData
->hasWeakDefs
;
914 bool Image::mayHavePlusLoads() const
916 return _binaryData
->mayHavePlusLoads
;
919 bool Image::hasTextRelocs() const
921 return _binaryData
->hasTextRelocs
;
924 bool Image::neverUnload() const
926 return _binaryData
->neverUnload
;
929 bool Image::cwdMustBeThisDir() const
931 return _binaryData
->cwdSameAsThis
;
934 bool Image::isPlatformBinary() const
936 return _binaryData
->isPlatformBinary
;
939 bool Image::overridableDylib() const
941 return _binaryData
->overridableDylib
;
944 void Image::forEachDependentImage(const ImageGroupList
& groups
, void (^handler
)(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
)) const
946 assert(!_binaryData
->isInvalid
);
947 binary_format::ImageRef missingRef
= binary_format::ImageRef::weakImportMissing();
948 __block
bool stop
= false;
949 for (uint32_t depIndex
=0; (depIndex
< _binaryData
->dependentsArrayCount
) && !stop
; ++depIndex
) {
950 binary_format::ImageRef ref
= group().dependentPool(_binaryData
->dependentsArrayStartIndex
+ depIndex
);
951 if ( ref
!= missingRef
) {
952 Image
depImage(resolveImageRef(groups
, ref
));
953 handler(depIndex
, depImage
, (LinkKind
)ref
.kind(), stop
);
960 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, std::unordered_set
<const BinaryImageData
*>& allDependents
) const
964 __block
bool result
= true;
965 forEachDependentImage(groups
, ^(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
) {
966 if ( allDependents
.count(depImage
.binaryData()) == 0 ) {
967 allDependents
.insert(depImage
.binaryData());
968 if ( !depImage
.recurseAllDependentImages(groups
, allDependents
) ) {
978 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, SlowLoadSet
& allDependents
, bool& stopped
,
979 void (^handler
)(const dyld3::launch_cache::binary_format::Image
* aBinImage
, bool& stop
)) const
981 __block
bool result
= true;
982 // breadth first, add all directly dependent images
983 const dyld3::launch_cache::binary_format::Image
* needToProcessArray
[_binaryData
->dependentsArrayCount
];
984 memset((void*)needToProcessArray
, 0, _binaryData
->dependentsArrayCount
* sizeof(*needToProcessArray
));
985 const dyld3::launch_cache::binary_format::Image
** const needToProcess
= needToProcessArray
;
986 forEachDependentImage(groups
, ^(uint32_t depIndex
, Image depImage
, LinkKind kind
, bool& stop
) {
987 const dyld3::launch_cache::binary_format::Image
* depImageData
= depImage
.binaryData();
988 if ( allDependents
.contains(depImageData
) ) {
989 needToProcess
[depIndex
] = nullptr;
992 needToProcess
[depIndex
] = depImageData
;
993 if ( !allDependents
.add(depImageData
) ) {
999 handler(depImageData
, stop
);
1006 // recurse on each dependent image
1007 for (int i
=0; !stopped
&& (i
< _binaryData
->dependentsArrayCount
); ++i
) {
1008 if ( const dyld3::launch_cache::binary_format::Image
* depImageData
= needToProcess
[i
] ) {
1009 Image
depImage(depImageData
);
1010 if ( !depImage
.recurseAllDependentImages(groups
, allDependents
, stopped
, handler
) ) {
1019 bool Image::recurseAllDependentImages(const ImageGroupList
& groups
, SlowLoadSet
& allDependents
,
1020 void (^handler
)(const dyld3::launch_cache::binary_format::Image
* aBinImage
, bool& stop
)) const
1022 bool stopped
= false;
1023 return recurseAllDependentImages(groups
, allDependents
, stopped
, handler
);
1026 void Image::forEachDiskSegment(void (^handler
)(uint32_t segIndex
, uint32_t fileOffset
, uint32_t fileSize
, int64_t vmOffset
, uint64_t vmSize
, uint8_t permissions
, bool& stop
)) const
1028 assert(isDiskImage());
1029 const uint32_t pageSize
= (_binaryData
->has16KBpages
? 0x4000 : 0x1000);
1030 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1031 const binary_format::DiskSegment
* diskSegs
= (binary_format::DiskSegment
*)rawSegs
;
1032 uint32_t segIndex
= 0;
1033 uint32_t fileOffset
= 0;
1034 int64_t vmOffset
= 0;
1035 // decrement vmOffset by all segments before TEXT (e.g. PAGEZERO)
1036 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1037 const binary_format::DiskSegment
* seg
= &diskSegs
[i
];
1038 if ( seg
->filePageCount
!= 0 ) {
1041 vmOffset
-= (uint64_t)seg
->vmPageCount
* pageSize
;
1043 // walk each segment and call handler
1044 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1045 const binary_format::DiskSegment
* seg
= &diskSegs
[i
];
1046 uint64_t vmSize
= (uint64_t)seg
->vmPageCount
* pageSize
;
1047 uint32_t fileSize
= seg
->filePageCount
* pageSize
;
1048 if ( !seg
->paddingNotSeg
) {
1050 handler(segIndex
, ( fileSize
== 0) ? 0 : fileOffset
, fileSize
, vmOffset
, vmSize
, seg
->permissions
, stop
);
1056 fileOffset
+= fileSize
;
1060 void Image::forEachCacheSegment(void (^handler
)(uint32_t segIndex
, uint64_t vmOffset
, uint64_t vmSize
, uint8_t permissions
, bool& stop
)) const
1062 assert(!isDiskImage());
1063 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1064 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)rawSegs
;
1066 for (uint32_t i
=0; i
< _binaryData
->segmentsArrayCount
; ++i
) {
1067 uint64_t vmOffset
= cacheSegs
[i
].cacheOffset
- cacheSegs
[0].cacheOffset
;
1068 uint64_t vmSize
= cacheSegs
[i
].size
;
1069 uint8_t permissions
= cacheSegs
[i
].permissions
;
1070 handler(i
, vmOffset
, vmSize
, permissions
, stop
);
1076 bool Image::segmentHasFixups(uint32_t segIndex
) const
1078 return (segmentFixups(segIndex
) != nullptr);
1081 bool Image::containsAddress(const void* addr
, const void* imageLoadAddress
, uint8_t* permissions
) const
1083 if ( addr
< imageLoadAddress
)
1086 __block
bool found
= false;
1087 uint64_t offsetInImage
= (char*)addr
- (char*)imageLoadAddress
;
1088 if ( _binaryData
->isDiskImage
) {
1089 forEachDiskSegment(^(uint32_t segIterIndex
, uint32_t fileOffset
, uint32_t fileSize
, int64_t vmOffset
, uint64_t vmSize
, uint8_t segPerms
, bool& stop
) {
1090 if ( (offsetInImage
>= vmOffset
) && (offsetInImage
< vmOffset
+vmSize
) ) {
1091 if ( permissions
!= nullptr )
1092 *permissions
= segPerms
;
1099 forEachCacheSegment(^(uint32_t segIterIndex
, uint64_t vmOffset
, uint64_t vmSize
, uint8_t segPerms
, bool& stop
) {
1100 if ( (offsetInImage
>= vmOffset
) && (offsetInImage
< vmOffset
+vmSize
) ) {
1101 if ( permissions
!= nullptr )
1102 *permissions
= segPerms
;
1111 void Image::forEachInitializer(const void* imageLoadAddress
, void (^handler
)(const void* initializer
)) const
1113 const uint32_t initCount
= _binaryData
->initOffsetsArrayCount
;
1114 const uint32_t startIndex
= _binaryData
->initOffsetsArrayStartIndex
;
1115 const uint32_t* initOffsets
= group().initializerOffsetsPool();
1116 assert(startIndex
+ initCount
<= group().initializerOffsetsCount());
1117 for (uint32_t i
=0; i
< initCount
; ++i
) {
1118 uint32_t anOffset
= initOffsets
[startIndex
+i
];
1119 const void* func
= (char*)imageLoadAddress
+ anOffset
;
1124 void Image::forEachInitBefore(void (^handler
)(binary_format::ImageRef imageToInit
)) const
1126 const uint32_t initCount
= _binaryData
->initBeforeArrayCount
;
1127 const uint32_t startIndex
= _binaryData
->initBeforeArrayStartIndex
;
1128 const uint32_t endIndex
= group().intializerListPoolCount();
1129 const binary_format::ImageRef
* initRefs
= group().intializerListPool();
1130 assert(startIndex
+ initCount
<= endIndex
);
1131 for (uint32_t i
=0; i
< initCount
; ++i
) {
1132 binary_format::ImageRef ref
= initRefs
[startIndex
+i
];
1137 void Image::forEachDOF(const void* imageLoadAddress
, void (^handler
)(const void* section
)) const
1139 const uint32_t dofCount
= _binaryData
->dofOffsetsArrayCount
;
1140 const uint32_t startIndex
= _binaryData
->dofOffsetsArrayStartIndex
;
1141 const uint32_t* dofOffsets
= group().dofOffsetsPool();
1142 assert(startIndex
+ dofCount
<= group().dofOffsetsCount());
1143 for (uint32_t i
=0; i
< dofCount
; ++i
) {
1144 uint32_t anOffset
= dofOffsets
[startIndex
+i
];
1145 const void* section
= (char*)imageLoadAddress
+ anOffset
;
1150 Image
Image::resolveImageRef(const ImageGroupList
& groups
, binary_format::ImageRef ref
, bool applyOverrides
)
1152 // first look if ref image is overridden in closure
1153 __block
binary_format::ImageRef targetRef
= ref
;
1154 if ( applyOverrides
) {
1155 binary_format::ImageRef refToMatch
= ref
;
1156 refToMatch
.clearKind();
1157 for (int i
=0; i
< groups
.count(); ++i
) {
1158 ImageGroup
aGroup(groups
[i
]);
1159 if ( aGroup
.groupNum() >= 2 ) {
1160 aGroup
.forEachImageRefOverride(^(binary_format::ImageRef standardDylibRef
, binary_format::ImageRef overrideDylibRef
, bool &stop
) {
1161 if ( refToMatch
== standardDylibRef
) {
1162 targetRef
= overrideDylibRef
;
1169 // create Image object from targetRef
1170 for (int i
=0; i
< groups
.count(); ++i
) {
1171 ImageGroup
aGroup(groups
[i
]);
1172 if ( aGroup
.groupNum() == targetRef
.groupNum() ) {
1173 return aGroup
.image(targetRef
.indexInGroup());
1176 //assert(0 && "invalid ImageRef");
1177 return Image(nullptr);
1180 void Image::forEachInitBefore(const ImageGroupList
& groups
, void (^handler
)(Image imageToInit
)) const
1182 forEachInitBefore(^(binary_format::ImageRef ref
) {
1183 handler(resolveImageRef(groups
, ref
));
1187 bool Image::validateUsingModTimeAndInode() const
1189 return !group().binaryData()->imageFileInfoIsCdHash
;
1192 bool Image::validateUsingCdHash() const
1194 // don't have cdHash info if union has modtime info in it
1195 if ( !group().binaryData()->imageFileInfoIsCdHash
)
1198 // don't have codesign blob in dyld cache
1199 if ( !_binaryData
->isDiskImage
)
1202 // return true if image is code signed and cdHash16 is non-zero
1203 const binary_format::DiskImage
* diskImage
= asDiskImage();
1204 if ( diskImage
->codeSignFileOffset
== 0 )
1209 return (memcmp(cdHash16(), zeros
, 16) != 0);
1212 const uint8_t* Image::cdHash16() const
1214 return _binaryData
->fileInfo
.cdHash16
.bytes
;
1217 uint64_t Image::fileModTime() const
1219 return _binaryData
->fileInfo
.statInfo
.mtime
;
1222 uint64_t Image::fileINode() const
1224 return _binaryData
->fileInfo
.statInfo
.inode
;
1228 bool Image::isDiskImage() const
1230 return _binaryData
->isDiskImage
;
1233 const binary_format::DiskImage
* Image::asDiskImage() const
1235 assert(_binaryData
->isDiskImage
);
1236 return (binary_format::DiskImage
*)_binaryData
;
1239 const binary_format::CachedImage
* Image::asCachedImage() const
1241 assert(!_binaryData
->isDiskImage
);
1242 return (binary_format::CachedImage
*)_binaryData
;
1245 uint32_t Image::pageSize() const
1247 return (_binaryData
->has16KBpages
? 0x4000 : 0x1000);
1250 uint32_t Image::cacheOffset() const
1252 assert(!_binaryData
->isDiskImage
);
1253 const uint64_t* rawSegs
= group().segmentPool(_binaryData
->segmentsArrayStartIndex
);
1254 const binary_format::DyldCacheSegment
* cacheSegs
= (binary_format::DyldCacheSegment
*)rawSegs
;
1255 return cacheSegs
[0].cacheOffset
;
1258 uint32_t Image::patchStartIndex() const
1260 return asCachedImage()->patchStartIndex
;
1263 uint32_t Image::patchCount() const
1265 return asCachedImage()->patchCount
;
1268 uint64_t Image::sliceOffsetInFile() const
1270 return asDiskImage()->sliceOffsetIn4K
* 4096;
1273 bool Image::hasCodeSignature(uint32_t& fileOffset
, uint32_t& size
) const
1275 const binary_format::DiskImage
* diskImage
= asDiskImage();
1276 if ( diskImage
->codeSignFileOffset
!= 0 ) {
1277 fileOffset
= diskImage
->codeSignFileOffset
;
1278 size
= diskImage
->codeSignFileSize
;
1284 bool Image::isFairPlayEncrypted(uint32_t& textOffset
, uint32_t& size
) const
1286 const binary_format::DiskImage
* diskImage
= asDiskImage();
1287 if ( diskImage
->fairPlayTextPageCount
!= 0 ) {
1288 textOffset
= diskImage
->fairPlayTextStartPage
* pageSize();
1289 size
= diskImage
->fairPlayTextPageCount
* pageSize();
1295 uint64_t Image::vmSizeToMap() const
1297 return asDiskImage()->totalVmPages
* pageSize();
1300 void Image::forEachFixup(const uint8_t* pageFixups
, const void* segContent
, uint32_t& offset
, uint32_t& ordinal
,
1301 void (^handler
)(uint32_t pageOffset
, FixupKind kind
, uint32_t ordinal
, bool& stop
))
1304 for (const uint8_t* p
= pageFixups
; (*p
!= 0) && !stop
;) {
1305 binary_format::FixUpOpcode fullOp
= (binary_format::FixUpOpcode
)(*p
);
1306 binary_format::FixUpOpcode majorOp
= (binary_format::FixUpOpcode
)(*p
& 0xF0);
1307 uint8_t low4
= (*p
& 0x0F);
1308 switch ( majorOp
) {
1309 case binary_format::FixUpOpcode::done
:
1311 case binary_format::FixUpOpcode::rebase32
: // apply
1313 case binary_format::FixUpOpcode::bind64
:
1314 handler(offset
, FixupKind::bind64
, ordinal
, stop
);
1318 case binary_format::FixUpOpcode::bind32
:
1319 handler(offset
, FixupKind::bind32
, ordinal
, stop
);
1323 case binary_format::FixUpOpcode::rebase64
:
1324 handler(offset
, FixupKind::rebase64
, 0, stop
);
1328 case binary_format::FixUpOpcode::rebase32
:
1329 handler(offset
, FixupKind::rebase32
, 0, stop
);
1333 case binary_format::FixUpOpcode::rebaseText32
:
1334 handler(offset
, FixupKind::rebaseText32
, 0, stop
);
1338 case binary_format::FixUpOpcode::bindText32
:
1339 handler(offset
, FixupKind::bindText32
, ordinal
, stop
);
1343 case binary_format::FixUpOpcode::bindTextRel32
:
1344 handler(offset
, FixupKind::bindTextRel32
, ordinal
, stop
);
1348 case binary_format::FixUpOpcode::bindImportJmp32
:
1349 handler(offset
, FixupKind::bindImportJmp32
, ordinal
, stop
);
1353 //case binary_format::FixUpOpcode::fixupChain64:
1354 // assert(0 && "rebase/bind chain support not implemented yet");
1357 assert(0 && "bad opcode");
1361 case binary_format::FixUpOpcode::incPageOffset
:
1364 offset
+= read_uleb128(p
, p
+8)*4;
1371 case binary_format::FixUpOpcode::setPageOffset
:
1374 offset
= (uint32_t)read_uleb128(p
, p
+8);
1381 case binary_format::FixUpOpcode::incOrdinal
:
1384 ordinal
+= read_uleb128(p
, p
+8);
1391 case binary_format::FixUpOpcode::setOrdinal
:
1394 ordinal
= (uint32_t)read_uleb128(p
, p
+8);
1401 case binary_format::FixUpOpcode::repeat
: {
1403 uint32_t count
= (uint32_t)read_uleb128(p
, p
+8);
1404 uint8_t pattern
[32];
1405 for (int j
=0; j
< low4
; ++j
) {
1408 pattern
[low4
] = (uint8_t)binary_format::FixUpOpcode::done
;
1409 for (int j
=0; j
< count
; ++j
) {
1410 forEachFixup(&pattern
[0], segContent
, offset
, ordinal
, handler
);
1417 assert(0 && "bad opcode");
1423 const binary_format::SegmentFixupsByPage
* Image::segmentFixups(uint32_t segIndex
) const
1425 const binary_format::DiskImage
* diskImage
= asDiskImage();
1426 //const BinaryImageGroupData* g = group().binaryData();
1427 uint32_t segCountWithFixups
= diskImage
->fixupsPoolSegCount
;
1428 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d), group=%p, segCountWithFixup=%d\n", _binaryData, segIndex, g, segCountWithFixups);
1429 const binary_format::AllFixupsBySegment
* allFixups
= group().fixUps(diskImage
->fixupsPoolOffset
);
1430 for (uint32_t i
=0; i
< segCountWithFixups
; ++i
) {
1431 if ( allFixups
[i
].segIndex
== segIndex
) {
1432 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) allFixups=%p, allFixups[%d].segIndex=%d, allFixups[%d].offset=%d\n", _binaryData, segIndex, allFixups, i, allFixups[i].segIndex, i, allFixups[i].offset);
1433 return (binary_format::SegmentFixupsByPage
*)((char*)allFixups
+ allFixups
[i
].offset
);
1436 //fprintf(stderr,"segmentFixups(binImage=%p, segIndex=%d) => nullptr\n", _binaryData, segIndex);
1440 void Image::forEachFixup(uint32_t segIndex
, MemoryRange segContent
, void (^handler
)(uint64_t segOffset
, FixupKind
, TargetSymbolValue
, bool& stop
)) const
1442 const binary_format::SegmentFixupsByPage
* segFixups
= segmentFixups(segIndex
);
1443 if ( segFixups
== nullptr )
1446 assert(segFixups
->pageCount
*segFixups
->pageSize
<= segContent
.size
);
1448 const uint32_t ordinalsIndexInGroupPool
= asDiskImage()->targetsArrayStartIndex
;
1449 const uint32_t maxOrdinal
= asDiskImage()->targetsArrayCount
;
1450 const TargetSymbolValue
* groupArray
= group().targetValuesArray();
1451 assert(ordinalsIndexInGroupPool
< group().targetValuesCount());
1452 const TargetSymbolValue
* targetOrdinalArray
= &groupArray
[ordinalsIndexInGroupPool
];
1454 for (uint32_t pageIndex
=0; pageIndex
< segFixups
->pageCount
; ++pageIndex
) {
1455 const uint8_t* opcodes
= (uint8_t*)(segFixups
) + segFixups
->pageInfoOffsets
[pageIndex
];
1456 uint64_t pageStartOffet
= pageIndex
* segFixups
->pageSize
;
1457 uint32_t curOffset
= 0;
1458 uint32_t curOrdinal
= 0;
1459 forEachFixup(opcodes
, segContent
.address
, curOffset
, curOrdinal
, ^(uint32_t pageOffset
, FixupKind kind
, uint32_t targetOrdinal
, bool& stop
) {
1460 assert(targetOrdinal
< maxOrdinal
);
1461 handler(pageStartOffet
+ pageOffset
, kind
, targetOrdinalArray
[targetOrdinal
], stop
);
1467 } // namespace launch_cache
1468 } // namespace dyld3