dyld-551.3.tar.gz
[apple/dyld.git] / dyld3 / SharedCacheRuntime.cpp
1 /*
2 * Copyright (c) 2017 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24
25
26 #include <stdint.h>
27 #include <string.h>
28 #include <unistd.h>
29 #include <fcntl.h>
30 #include <sys/param.h>
31 #include <sys/types.h>
32 #include <sys/stat.h>
33 #include <sys/syscall.h>
34 #include <sys/syslog.h>
35 #include <sys/sysctl.h>
36 #include <sys/mman.h>
37 #include <mach/mach.h>
38 #include <mach-o/fat.h>
39 #include <mach-o/loader.h>
40 #include <mach-o/ldsyms.h>
41 #include <mach/shared_region.h>
42 #include <mach/mach.h>
43 #include <Availability.h>
44 #include <TargetConditionals.h>
45
46 #include "dyld_cache_format.h"
47 #include "SharedCacheRuntime.h"
48 #include "LaunchCache.h"
49 #include "LaunchCacheFormat.h"
50 #include "Loading.h"
51
52 #define ENABLE_DYLIBS_TO_OVERRIDE_CACHE_SIZE 1024
53
54 // should be in mach/shared_region.h
55 extern "C" int __shared_region_check_np(uint64_t* startaddress);
56 extern "C" int __shared_region_map_and_slide_np(int fd, uint32_t count, const shared_file_mapping_np mappings[], long slide, const dyld_cache_slide_info2* slideInfo, size_t slideInfoSize);
57
58
59 namespace dyld {
60 extern int my_stat(const char* path, struct stat* buf);
61 extern int my_open(const char* path, int flag, int other);
62 extern void log(const char*, ...);
63 }
64
65
66 namespace dyld3 {
67
68
69 struct CacheInfo
70 {
71 int fd;
72 shared_file_mapping_np mappings[3];
73 uint64_t slideInfoAddressUnslid;
74 size_t slideInfoSize;
75 uint64_t cachedDylibsGroupUnslid;
76 uint64_t sharedRegionStart;
77 uint64_t sharedRegionSize;
78 uint64_t maxSlide;
79 };
80
81
82
83
84 #if __i386__
85 #define ARCH_NAME "i386"
86 #define ARCH_CACHE_MAGIC "dyld_v1 i386"
87 #elif __x86_64__
88 #define ARCH_NAME "x86_64"
89 #define ARCH_CACHE_MAGIC "dyld_v1 x86_64"
90 #define ARCH_NAME_H "x86_64h"
91 #define ARCH_CACHE_MAGIC_H "dyld_v1 x86_64h"
92 #elif __ARM_ARCH_7K__
93 #define ARCH_NAME "armv7k"
94 #define ARCH_CACHE_MAGIC "dyld_v1 armv7k"
95 #elif __ARM_ARCH_7A__
96 #define ARCH_NAME "armv7"
97 #define ARCH_CACHE_MAGIC "dyld_v1 armv7"
98 #elif __ARM_ARCH_7S__
99 #define ARCH_NAME "armv7s"
100 #define ARCH_CACHE_MAGIC "dyld_v1 armv7s"
101 #elif __arm64e__
102 #define ARCH_NAME "arm64e"
103 #define ARCH_CACHE_MAGIC "dyld_v1 arm64e"
104 #elif __arm64__
105 #define ARCH_NAME "arm64"
106 #define ARCH_CACHE_MAGIC "dyld_v1 arm64"
107 #endif
108
109
110
111 static void rebaseChain(uint8_t* pageContent, uint16_t startOffset, uintptr_t slideAmount, const dyld_cache_slide_info2* slideInfo)
112 {
113 const uintptr_t deltaMask = (uintptr_t)(slideInfo->delta_mask);
114 const uintptr_t valueMask = ~deltaMask;
115 const uintptr_t valueAdd = (uintptr_t)(slideInfo->value_add);
116 const unsigned deltaShift = __builtin_ctzll(deltaMask) - 2;
117
118 uint32_t pageOffset = startOffset;
119 uint32_t delta = 1;
120 while ( delta != 0 ) {
121 uint8_t* loc = pageContent + pageOffset;
122 uintptr_t rawValue = *((uintptr_t*)loc);
123 delta = (uint32_t)((rawValue & deltaMask) >> deltaShift);
124 uintptr_t value = (rawValue & valueMask);
125 if ( value != 0 ) {
126 value += valueAdd;
127 value += slideAmount;
128 }
129 *((uintptr_t*)loc) = value;
130 //dyld::log(" pageOffset=0x%03X, loc=%p, org value=0x%08llX, new value=0x%08llX, delta=0x%X\n", pageOffset, loc, (uint64_t)rawValue, (uint64_t)value, delta);
131 pageOffset += delta;
132 }
133 }
134
135
136 static void getCachePath(const SharedCacheOptions& options, size_t pathBufferSize, char pathBuffer[])
137 {
138 // set cache dir
139 if ( options.cacheDirOverride != nullptr ) {
140 strlcpy(pathBuffer, options.cacheDirOverride, pathBufferSize);
141 }
142 else {
143 #if __IPHONE_OS_VERSION_MIN_REQUIRED
144 strlcpy(pathBuffer, IPHONE_DYLD_SHARED_CACHE_DIR, sizeof(IPHONE_DYLD_SHARED_CACHE_DIR));
145 #else
146 strlcpy(pathBuffer, MACOSX_DYLD_SHARED_CACHE_DIR, sizeof(MACOSX_DYLD_SHARED_CACHE_DIR));
147 #endif
148 }
149
150 // append file component of cache file
151 if ( pathBuffer[strlen(pathBuffer)-1] != '/' )
152 strlcat(pathBuffer, "/", pathBufferSize);
153 #if __x86_64__ && !__IPHONE_OS_VERSION_MIN_REQUIRED
154 if ( options.useHaswell ) {
155 size_t len = strlen(pathBuffer);
156 struct stat haswellStatBuf;
157 strlcat(pathBuffer, DYLD_SHARED_CACHE_BASE_NAME ARCH_NAME_H, pathBufferSize);
158 if ( dyld::my_stat(pathBuffer, &haswellStatBuf) == 0 )
159 return;
160 // no haswell cache file, use regular x86_64 cache
161 pathBuffer[len] = '\0';
162 }
163 #endif
164 strlcat(pathBuffer, DYLD_SHARED_CACHE_BASE_NAME ARCH_NAME, pathBufferSize);
165
166 #if __IPHONE_OS_VERSION_MIN_REQUIRED && !TARGET_IPHONE_SIMULATOR
167 // use .development cache if it exists
168 struct stat enableStatBuf;
169 struct stat devCacheStatBuf;
170 struct stat optCacheStatBuf;
171 bool enableFileExists = (dyld::my_stat(IPHONE_DYLD_SHARED_CACHE_DIR "enable-dylibs-to-override-cache", &enableStatBuf) == 0);
172 bool devCacheExists = (dyld::my_stat(IPHONE_DYLD_SHARED_CACHE_DIR DYLD_SHARED_CACHE_BASE_NAME ARCH_NAME DYLD_SHARED_CACHE_DEVELOPMENT_EXT, &devCacheStatBuf) == 0);
173 bool optCacheExists = (dyld::my_stat(IPHONE_DYLD_SHARED_CACHE_DIR DYLD_SHARED_CACHE_BASE_NAME ARCH_NAME, &optCacheStatBuf) == 0);
174 if ( (enableFileExists && (enableStatBuf.st_size < ENABLE_DYLIBS_TO_OVERRIDE_CACHE_SIZE) && devCacheExists) || !optCacheExists )
175 strlcat(pathBuffer, DYLD_SHARED_CACHE_DEVELOPMENT_EXT, pathBufferSize);
176 #endif
177
178 }
179
180
181 int openSharedCacheFile(const SharedCacheOptions& options, SharedCacheLoadInfo* results)
182 {
183 getCachePath(options, sizeof(results->path), results->path);
184 return dyld::my_open(results->path, O_RDONLY, 0);
185 }
186
187 static bool validMagic(const SharedCacheOptions& options, const DyldSharedCache* cache)
188 {
189 if ( strcmp(cache->header.magic, ARCH_CACHE_MAGIC) == 0 )
190 return true;
191
192 #if __x86_64__
193 if ( options.useHaswell ) {
194 if ( strcmp(cache->header.magic, ARCH_CACHE_MAGIC_H) == 0 )
195 return true;
196 }
197 #endif
198 return false;
199 }
200
201
202 static bool validPlatform(const SharedCacheOptions& options, const DyldSharedCache* cache)
203 {
204 // grandfather in old cache that does not have platform in header
205 if ( cache->header.mappingOffset < 0xE0 )
206 return true;
207
208 if ( cache->header.platform != (uint32_t)MachOParser::currentPlatform() )
209 return false;
210
211 #if TARGET_IPHONE_SIMULATOR
212 if ( cache->header.simulator == 0 )
213 return false;
214 #else
215 if ( cache->header.simulator != 0 )
216 return false;
217 #endif
218
219 return true;
220 }
221
222
223 static void verboseSharedCacheMappings(const shared_file_mapping_np mappings[3])
224 {
225 for (int i=0; i < 3; ++i) {
226 dyld::log(" 0x%08llX->0x%08llX init=%x, max=%x %s%s%s\n",
227 mappings[i].sfm_address, mappings[i].sfm_address+mappings[i].sfm_size-1,
228 mappings[i].sfm_init_prot, mappings[i].sfm_init_prot,
229 ((mappings[i].sfm_init_prot & VM_PROT_READ) ? "read " : ""),
230 ((mappings[i].sfm_init_prot & VM_PROT_WRITE) ? "write " : ""),
231 ((mappings[i].sfm_init_prot & VM_PROT_EXECUTE) ? "execute " : ""));
232 }
233 }
234
235 static bool preflightCacheFile(const SharedCacheOptions& options, SharedCacheLoadInfo* results, CacheInfo* info)
236 {
237 // find and open shared cache file
238 int fd = openSharedCacheFile(options, results);
239 if ( fd == -1 ) {
240 results->errorMessage = "shared cache file cannot be opened";
241 return false;
242 }
243 struct stat cacheStatBuf;
244 if ( dyld::my_stat(results->path, &cacheStatBuf) != 0 ) {
245 results->errorMessage = "shared cache file cannot be stat()ed";
246 ::close(fd);
247 return false;
248 }
249 size_t cacheFileLength = (size_t)(cacheStatBuf.st_size);
250
251 // sanity check header and mappings
252 uint8_t firstPage[0x4000];
253 if ( ::pread(fd, firstPage, sizeof(firstPage), 0) != sizeof(firstPage) ) {
254 results->errorMessage = "shared cache header could not be read";
255 ::close(fd);
256 return false;
257 }
258 const DyldSharedCache* cache = (DyldSharedCache*)firstPage;
259 if ( !validMagic(options, cache) ) {
260 results->errorMessage = "shared cache file has wrong magic";
261 ::close(fd);
262 return false;
263 }
264 if ( !validPlatform(options, cache) ) {
265 results->errorMessage = "shared cache file is for a different platform";
266 ::close(fd);
267 return false;
268 }
269 if ( (cache->header.mappingCount != 3) || (cache->header.mappingOffset > 0x120) ) {
270 results->errorMessage = "shared cache file mappings are invalid";
271 ::close(fd);
272 return false;
273 }
274 const dyld_cache_mapping_info* const fileMappings = (dyld_cache_mapping_info*)&firstPage[cache->header.mappingOffset];
275 if ( (fileMappings[0].fileOffset != 0)
276 || ((fileMappings[0].address + fileMappings[0].size) > fileMappings[1].address)
277 || ((fileMappings[1].address + fileMappings[1].size) > fileMappings[2].address)
278 || ((fileMappings[0].fileOffset + fileMappings[0].size) != fileMappings[1].fileOffset)
279 || ((fileMappings[1].fileOffset + fileMappings[1].size) != fileMappings[2].fileOffset)
280 || ((cache->header.codeSignatureOffset + cache->header.codeSignatureSize) != cacheFileLength)
281 || (fileMappings[0].maxProt != (VM_PROT_READ|VM_PROT_EXECUTE))
282 || (fileMappings[1].maxProt != (VM_PROT_READ|VM_PROT_WRITE))
283 || (fileMappings[2].maxProt != VM_PROT_READ) ) {
284 results->errorMessage = "shared cache file mappings are invalid";
285 ::close(fd);
286 return false;
287 }
288
289 if ( cache->header.mappingOffset >= 0xF8 ) {
290 if ( (fileMappings[0].address != cache->header.sharedRegionStart) || ((fileMappings[2].address + fileMappings[2].size) > (cache->header.sharedRegionStart+cache->header.sharedRegionSize)) ) {
291 results->errorMessage = "shared cache file mapping addressses invalid";
292 ::close(fd);
293 return false;
294 }
295 }
296 else {
297 if ( (fileMappings[0].address != SHARED_REGION_BASE) || ((fileMappings[2].address + fileMappings[2].size) > (SHARED_REGION_BASE+SHARED_REGION_SIZE)) ) {
298 results->errorMessage = "shared cache file mapping addressses invalid";
299 ::close(fd);
300 return false;
301 }
302 }
303
304 // register code signature of cache file
305 fsignatures_t siginfo;
306 siginfo.fs_file_start = 0; // cache always starts at beginning of file
307 siginfo.fs_blob_start = (void*)cache->header.codeSignatureOffset;
308 siginfo.fs_blob_size = (size_t)(cache->header.codeSignatureSize);
309 int result = fcntl(fd, F_ADDFILESIGS_RETURN, &siginfo);
310 if ( result == -1 ) {
311 results->errorMessage = "code signature registration for shared cache failed";
312 ::close(fd);
313 return false;
314 }
315
316 // <rdar://problem/23188073> validate code signature covers entire shared cache
317 uint64_t codeSignedLength = siginfo.fs_file_start;
318 if ( codeSignedLength < cache->header.codeSignatureOffset ) {
319 results->errorMessage = "code signature does not cover entire shared cache file";
320 ::close(fd);
321 return false;
322 }
323 void* mappedData = ::mmap(NULL, sizeof(firstPage), PROT_READ|PROT_EXEC, MAP_PRIVATE, fd, 0);
324 if ( mappedData == MAP_FAILED ) {
325 results->errorMessage = "first page of shared cache not mmap()able";
326 ::close(fd);
327 return false;
328 }
329 if ( memcmp(mappedData, firstPage, sizeof(firstPage)) != 0 ) {
330 results->errorMessage = "first page of shared cache not mmap()able";
331 ::close(fd);
332 return false;
333 }
334 ::munmap(mappedData, sizeof(firstPage));
335
336 // fill out results
337 info->fd = fd;
338 for (int i=0; i < 3; ++i) {
339 info->mappings[i].sfm_address = fileMappings[i].address;
340 info->mappings[i].sfm_size = fileMappings[i].size;
341 info->mappings[i].sfm_file_offset = fileMappings[i].fileOffset;
342 info->mappings[i].sfm_max_prot = fileMappings[i].maxProt;
343 info->mappings[i].sfm_init_prot = fileMappings[i].initProt;
344 }
345 info->mappings[1].sfm_max_prot |= VM_PROT_SLIDE;
346 info->mappings[1].sfm_init_prot |= VM_PROT_SLIDE;
347 info->slideInfoAddressUnslid = fileMappings[2].address + cache->header.slideInfoOffset - fileMappings[2].fileOffset;
348 info->slideInfoSize = (long)cache->header.slideInfoSize;
349 if ( cache->header.mappingOffset > 0xD0 )
350 info->cachedDylibsGroupUnslid = cache->header.dylibsImageGroupAddr;
351 else
352 info->cachedDylibsGroupUnslid = 0;
353 if ( cache->header.mappingOffset >= 0xf8 ) {
354 info->sharedRegionStart = cache->header.sharedRegionStart;
355 info->sharedRegionSize = cache->header.sharedRegionSize;
356 info->maxSlide = cache->header.maxSlide;
357 }
358 else {
359 info->sharedRegionStart = SHARED_REGION_BASE;
360 info->sharedRegionSize = SHARED_REGION_SIZE;
361 info->maxSlide = SHARED_REGION_SIZE - (fileMappings[2].address + fileMappings[2].size - fileMappings[0].address);
362 }
363 return true;
364 }
365
366 #if !TARGET_IPHONE_SIMULATOR
367 static bool reuseExistingCache(const SharedCacheOptions& options, SharedCacheLoadInfo* results)
368 {
369 uint64_t cacheBaseAddress;
370 #if __i386__
371 if ( syscall(294, &cacheBaseAddress) == 0 ) {
372 #else
373 if ( __shared_region_check_np(&cacheBaseAddress) == 0 ) {
374 #endif
375 const DyldSharedCache* existingCache = (DyldSharedCache*)cacheBaseAddress;
376 if ( validMagic(options, existingCache) ) {
377 const dyld_cache_mapping_info* const fileMappings = (dyld_cache_mapping_info*)(cacheBaseAddress + existingCache->header.mappingOffset);
378 results->loadAddress = existingCache;
379 results->slide = (long)(cacheBaseAddress - fileMappings[0].address);
380 if ( (existingCache->header.mappingOffset > 0xD0) && (existingCache->header.dylibsImageGroupAddr != 0) )
381 results->cachedDylibsGroup = (const launch_cache::binary_format::ImageGroup*)(existingCache->header.dylibsImageGroupAddr + results->slide);
382 else
383 results->cachedDylibsGroup = nullptr;
384 // we don't know the path this cache was previously loaded from, assume default
385 getCachePath(options, sizeof(results->path), results->path);
386 if ( options.verbose ) {
387 const shared_file_mapping_np* const mappings = (shared_file_mapping_np*)(cacheBaseAddress + existingCache->header.mappingOffset);
388 dyld::log("re-using existing shared cache (%s):\n", results->path);
389 shared_file_mapping_np slidMappings[3];
390 for (int i=0; i < 3; ++i) {
391 slidMappings[i] = mappings[i];
392 slidMappings[i].sfm_address += results->slide;
393 }
394 verboseSharedCacheMappings(slidMappings);
395 }
396 }
397 else {
398 results->errorMessage = "existing shared cache in memory is not compatible";
399 }
400 return true;
401 }
402 return false;
403 }
404
405 static long pickCacheASLR(CacheInfo& info)
406 {
407 // choose new random slide
408 #if __IPHONE_OS_VERSION_MIN_REQUIRED
409 // <rdar://problem/20848977> change shared cache slide for 32-bit arm to always be 16k aligned
410 long slide = ((arc4random() % info.maxSlide) & (-16384));
411 #else
412 long slide = ((arc4random() % info.maxSlide) & (-4096));
413 #endif
414
415 // <rdar://problem/32031197> respect -disable_aslr boot-arg
416 if ( dyld3::loader::bootArgsContains("-disable_aslr") )
417 slide = 0;
418
419 // update mappings
420 for (uint32_t i=0; i < 3; ++i) {
421 info.mappings[i].sfm_address += slide;
422 }
423
424 return slide;
425 }
426
427 static bool mapCacheSystemWide(const SharedCacheOptions& options, SharedCacheLoadInfo* results)
428 {
429 CacheInfo info;
430 if ( !preflightCacheFile(options, results, &info) )
431 return false;
432
433 const dyld_cache_slide_info2* slideInfo = nullptr;
434 if ( info.slideInfoSize != 0 ) {
435 results->slide = pickCacheASLR(info);
436 slideInfo = (dyld_cache_slide_info2*)(info.slideInfoAddressUnslid + results->slide);
437 }
438 if ( info.cachedDylibsGroupUnslid != 0 )
439 results->cachedDylibsGroup = (const launch_cache::binary_format::ImageGroup*)(info.cachedDylibsGroupUnslid + results->slide);
440 else
441 results->cachedDylibsGroup = nullptr;
442
443 int result = __shared_region_map_and_slide_np(info.fd, 3, info.mappings, results->slide, slideInfo, info.slideInfoSize);
444 ::close(info.fd);
445 if ( result == 0 ) {
446 results->loadAddress = (const DyldSharedCache*)(info.mappings[0].sfm_address);
447 }
448 else {
449 // could be another process beat us to it
450 if ( reuseExistingCache(options, results) )
451 return true;
452 // if cache does not exist, then really is an error
453 results->errorMessage = "syscall to map cache into shared region failed";
454 return false;
455 }
456
457 if ( options.verbose ) {
458 dyld::log("mapped dyld cache file system wide: %s\n", results->path);
459 verboseSharedCacheMappings(info.mappings);
460 }
461 return true;
462 }
463 #endif
464
465 static bool mapCachePrivate(const SharedCacheOptions& options, SharedCacheLoadInfo* results)
466 {
467 // open and validate cache file
468 CacheInfo info;
469 if ( !preflightCacheFile(options, results, &info) )
470 return false;
471
472 // compute ALSR slide
473 results->slide = 0;
474 const dyld_cache_slide_info2* slideInfo = nullptr;
475 #if !TARGET_IPHONE_SIMULATOR // simulator caches do not support sliding
476 if ( info.slideInfoSize != 0 ) {
477 results->slide = pickCacheASLR(info);
478 slideInfo = (dyld_cache_slide_info2*)(info.slideInfoAddressUnslid + results->slide);
479 }
480 #endif
481 results->loadAddress = (const DyldSharedCache*)(info.mappings[0].sfm_address);
482 if ( info.cachedDylibsGroupUnslid != 0 )
483 results->cachedDylibsGroup = (const launch_cache::binary_format::ImageGroup*)(info.cachedDylibsGroupUnslid + results->slide);
484 else
485 results->cachedDylibsGroup = nullptr;
486
487 // remove the shared region sub-map
488 vm_deallocate(mach_task_self(), (vm_address_t)info.sharedRegionStart, (vm_size_t)info.sharedRegionSize);
489
490 // map cache just for this process with mmap()
491 for (int i=0; i < 3; ++i) {
492 void* mmapAddress = (void*)(uintptr_t)(info.mappings[i].sfm_address);
493 size_t size = (size_t)(info.mappings[i].sfm_size);
494 //dyld::log("dyld: mapping address %p with size 0x%08lX\n", mmapAddress, size);
495 int protection = 0;
496 if ( info.mappings[i].sfm_init_prot & VM_PROT_EXECUTE )
497 protection |= PROT_EXEC;
498 if ( info.mappings[i].sfm_init_prot & VM_PROT_READ )
499 protection |= PROT_READ;
500 if ( info.mappings[i].sfm_init_prot & VM_PROT_WRITE )
501 protection |= PROT_WRITE;
502 off_t offset = info.mappings[i].sfm_file_offset;
503 if ( ::mmap(mmapAddress, size, protection, MAP_FIXED | MAP_PRIVATE, info.fd, offset) != mmapAddress ) {
504 // failed to map some chunk of this shared cache file
505 // clear shared region
506 vm_deallocate(mach_task_self(), (vm_address_t)info.sharedRegionStart, (vm_size_t)info.sharedRegionSize);
507 // return failure
508 results->loadAddress = nullptr;
509 results->cachedDylibsGroup = nullptr;
510 results->errorMessage = "could not mmap() part of dyld cache";
511 return false;
512 }
513 }
514
515 // update all __DATA pages with slide info
516 const dyld_cache_slide_info* slideInfoHeader = (dyld_cache_slide_info*)slideInfo;
517 if ( slideInfoHeader != nullptr ) {
518 if ( slideInfoHeader->version != 2 ) {
519 results->errorMessage = "invalide slide info in cache file";
520 return false;
521 }
522 const dyld_cache_slide_info2* slideHeader = (dyld_cache_slide_info2*)slideInfo;
523 const uint32_t page_size = slideHeader->page_size;
524 const uint16_t* page_starts = (uint16_t*)((long)(slideInfo) + slideHeader->page_starts_offset);
525 const uint16_t* page_extras = (uint16_t*)((long)(slideInfo) + slideHeader->page_extras_offset);
526 const uintptr_t dataPagesStart = (uintptr_t)info.mappings[1].sfm_address;
527 for (int i=0; i < slideHeader->page_starts_count; ++i) {
528 uint8_t* page = (uint8_t*)(long)(dataPagesStart + (page_size*i));
529 uint16_t pageEntry = page_starts[i];
530 //dyld::log("page[%d]: page_starts[i]=0x%04X\n", i, pageEntry);
531 if ( pageEntry == DYLD_CACHE_SLIDE_PAGE_ATTR_NO_REBASE )
532 continue;
533 if ( pageEntry & DYLD_CACHE_SLIDE_PAGE_ATTR_EXTRA ) {
534 uint16_t chainIndex = (pageEntry & 0x3FFF);
535 bool done = false;
536 while ( !done ) {
537 uint16_t pInfo = page_extras[chainIndex];
538 uint16_t pageStartOffset = (pInfo & 0x3FFF)*4;
539 //dyld::log(" chain[%d] pageOffset=0x%03X\n", chainIndex, pageStartOffset);
540 rebaseChain(page, pageStartOffset, results->slide, slideInfo);
541 done = (pInfo & DYLD_CACHE_SLIDE_PAGE_ATTR_END);
542 ++chainIndex;
543 }
544 }
545 else {
546 uint32_t pageOffset = pageEntry * 4;
547 //dyld::log(" start pageOffset=0x%03X\n", pageOffset);
548 rebaseChain(page, pageOffset, results->slide, slideInfo);
549 }
550 }
551 }
552
553 if ( options.verbose ) {
554 dyld::log("mapped dyld cache file private to process (%s):\n", results->path);
555 verboseSharedCacheMappings(info.mappings);
556 }
557 return true;
558 }
559
560
561
562 bool loadDyldCache(const SharedCacheOptions& options, SharedCacheLoadInfo* results)
563 {
564 results->loadAddress = 0;
565 results->slide = 0;
566 results->cachedDylibsGroup = nullptr;
567 results->errorMessage = nullptr;
568
569 #if TARGET_IPHONE_SIMULATOR
570 // simulator only supports mmap()ing cache privately into process
571 return mapCachePrivate(options, results);
572 #else
573 if ( options.forcePrivate ) {
574 // mmap cache into this process only
575 return mapCachePrivate(options, results);
576 }
577 else {
578 // fast path: when cache is already mapped into shared region
579 if ( reuseExistingCache(options, results) )
580 return (results->errorMessage != nullptr);
581
582 // slow path: this is first process to load cache
583 return mapCacheSystemWide(options, results);
584 }
585 #endif
586 }
587
588
589 bool findInSharedCacheImage(const SharedCacheLoadInfo& loadInfo, const char* dylibPathToFind, SharedCacheFindDylibResults* results)
590 {
591 if ( loadInfo.loadAddress == nullptr )
592 return false;
593
594 // HACK: temp support for old caches
595 if ( (loadInfo.cachedDylibsGroup == nullptr) || (loadInfo.loadAddress->header.formatVersion != launch_cache::binary_format::kFormatVersion) ) {
596 const dyld_cache_image_info* const start = (dyld_cache_image_info*)((uint8_t*)loadInfo.loadAddress + loadInfo.loadAddress->header.imagesOffset);
597 const dyld_cache_image_info* const end = &start[loadInfo.loadAddress->header.imagesCount];
598 for (const dyld_cache_image_info* p = start; p != end; ++p) {
599 const char* aPath = (char*)loadInfo.loadAddress + p->pathFileOffset;
600 if ( strcmp(aPath, dylibPathToFind) == 0 ) {
601 results->mhInCache = (const mach_header*)(p->address+loadInfo.slide);
602 results->pathInCache = aPath;
603 results->slideInCache = loadInfo.slide;
604 results->imageData = nullptr;
605 return true;
606 }
607 }
608 return false;
609 }
610 // HACK: end
611
612 launch_cache::ImageGroup dylibsGroup(loadInfo.cachedDylibsGroup);
613 uint32_t foundIndex;
614 const launch_cache::binary_format::Image* imageData = dylibsGroup.findImageByPath(dylibPathToFind, foundIndex);
615 #if __MAC_OS_X_VERSION_MIN_REQUIRED
616 // <rdar://problem/32740215> handle symlink to cached dylib
617 if ( imageData == nullptr ) {
618 char resolvedPath[PATH_MAX];
619 if ( realpath(dylibPathToFind, resolvedPath) != nullptr )
620 imageData = dylibsGroup.findImageByPath(resolvedPath, foundIndex);
621 }
622 #endif
623 if ( imageData == nullptr )
624 return false;
625
626 launch_cache::Image image(imageData);
627 results->mhInCache = (const mach_header*)((uintptr_t)loadInfo.loadAddress + image.cacheOffset());
628 results->pathInCache = image.path();
629 results->slideInCache = loadInfo.slide;
630 results->imageData = imageData;
631 return true;
632 }
633
634
635 bool pathIsInSharedCacheImage(const SharedCacheLoadInfo& loadInfo, const char* dylibPathToFind)
636 {
637 if ( (loadInfo.loadAddress == nullptr) || (loadInfo.cachedDylibsGroup == nullptr) || (loadInfo.loadAddress->header.formatVersion != launch_cache::binary_format::kFormatVersion) )
638 return false;
639
640 launch_cache::ImageGroup dylibsGroup(loadInfo.cachedDylibsGroup);
641 uint32_t foundIndex;
642 const launch_cache::binary_format::Image* imageData = dylibsGroup.findImageByPath(dylibPathToFind, foundIndex);
643 return (imageData != nullptr);
644 }
645
646
647 } // namespace dyld3
648