2 * Copyright (c) 2012 Apple Inc. All Rights Reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
26 Management of optimizations in the dyld shared cache
29 #include "objc-private.h"
31 #include "objc-file.h"
35 // Preoptimization not supported on this platform.
37 bool isPreoptimized(void)
42 bool noMissingWeakSuperclasses(void)
47 bool header_info::isPreoptimized() const
52 bool header_info::hasPreoptimizedSelectors() const
57 bool header_info::hasPreoptimizedClasses() const
62 bool header_info::hasPreoptimizedProtocols() const
67 Protocol *getPreoptimizedProtocol(const char *name)
72 unsigned int getPreoptimizedClassUnreasonableCount()
77 Class getPreoptimizedClass(const char *name)
82 Class* copyPreoptimizedClasses(const char *name, int *outCount)
88 header_info *preoptimizedHinfoForHeader(const headerType *mhdr)
93 header_info_rw *getPreoptimizedHeaderRW(const struct header_info *const hdr)
98 void preopt_init(void)
100 disableSharedCacheOptimizations();
103 _objc_inform("PREOPTIMIZATION: is DISABLED "
104 "(not supported on ths platform)");
113 #include <objc-shared-cache.h>
115 using objc_opt::objc_stringhash_offset_t;
116 using objc_opt::objc_protocolopt2_t;
117 using objc_opt::objc_clsopt_t;
118 using objc_opt::objc_headeropt_ro_t;
119 using objc_opt::objc_headeropt_rw_t;
120 using objc_opt::objc_opt_t;
124 // preopt: the actual opt used at runtime (nil or &_objc_opt_data)
125 // _objc_opt_data: opt data possibly written by dyld
126 // opt is initialized to ~0 to detect incorrect use before preopt_init()
128 static const objc_opt_t *opt = (objc_opt_t *)~0;
129 static bool preoptimized;
131 extern const objc_opt_t _objc_opt_data; // in __TEXT, __objc_opt_ro
134 struct objc_headeropt_ro_t {
137 header_info headers[0]; // sorted by mhdr address
139 header_info& getOrEnd(uint32_t i) const {
141 return *(header_info *)((uint8_t *)&headers + (i * entsize));
144 header_info& get(uint32_t i) const {
146 return *(header_info *)((uint8_t *)&headers + (i * entsize));
149 uint32_t index(const header_info* hi) const {
150 const header_info* begin = &get(0);
151 const header_info* end = &getOrEnd(count);
152 ASSERT(hi >= begin && hi < end);
153 return (uint32_t)(((uintptr_t)hi - (uintptr_t)begin) / entsize);
156 header_info *get(const headerType *mhdr)
160 while (start <= end) {
161 int32_t i = (start+end)/2;
162 header_info &hi = get(i);
163 if (mhdr == hi.mhdr()) return &hi;
164 else if (mhdr < hi.mhdr()) end = i-1;
169 for (uint32_t i = 0; i < count; i++) {
170 header_info &hi = get(i);
171 if (mhdr == hi.mhdr()) {
172 _objc_fatal("failed to find header %p (%d/%d)",
182 struct objc_headeropt_rw_t {
185 header_info_rw headers[0]; // sorted by mhdr address
189 /***********************************************************************
190 * Return YES if we have a valid optimized shared cache.
191 **********************************************************************/
192 bool isPreoptimized(void)
198 /***********************************************************************
199 * Return YES if the shared cache does not have any classes with
200 * missing weak superclasses.
201 **********************************************************************/
202 bool noMissingWeakSuperclasses(void)
204 if (!preoptimized) return NO; // might have missing weak superclasses
205 return opt->flags & objc_opt::NoMissingWeakSuperclasses;
209 /***********************************************************************
210 * Return YES if this image's dyld shared cache optimizations are valid.
211 **********************************************************************/
212 bool header_info::isPreoptimized() const
214 // preoptimization disabled for some reason
215 if (!preoptimized) return NO;
217 // image not from shared cache, or not fixed inside shared cache
218 if (!info()->optimizedByDyld()) return NO;
223 bool header_info::hasPreoptimizedSelectors() const
225 // preoptimization disabled for some reason
226 if (!preoptimized) return NO;
228 return info()->optimizedByDyld() || info()->optimizedByDyldClosure();
231 bool header_info::hasPreoptimizedClasses() const
233 // preoptimization disabled for some reason
234 if (!preoptimized) return NO;
236 return info()->optimizedByDyld() || info()->optimizedByDyldClosure();
239 bool header_info::hasPreoptimizedProtocols() const
241 // preoptimization disabled for some reason
242 if (!preoptimized) return NO;
244 return info()->optimizedByDyld() || info()->optimizedByDyldClosure();
247 bool header_info::hasPreoptimizedSectionLookups() const
249 objc_opt::objc_headeropt_ro_t *hinfoRO = opt->headeropt_ro();
250 if (hinfoRO->entsize == (2 * sizeof(intptr_t)))
256 const classref_t *header_info::nlclslist(size_t *outCount) const
259 // This field is new, so temporarily be resilient to the shared cache
261 if (isPreoptimized() && hasPreoptimizedSectionLookups()) {
262 *outCount = nlclslist_count;
263 const classref_t *list = (const classref_t *)(((intptr_t)&nlclslist_offset) + nlclslist_offset);
266 assert((list == _getObjc2NonlazyClassList(mhdr(), &debugCount)) && (*outCount == debugCount));
270 return _getObjc2NonlazyClassList(mhdr(), outCount);
276 category_t * const *header_info::nlcatlist(size_t *outCount) const
279 // This field is new, so temporarily be resilient to the shared cache
281 if (isPreoptimized() && hasPreoptimizedSectionLookups()) {
282 *outCount = nlcatlist_count;
283 category_t * const *list = (category_t * const *)(((intptr_t)&nlcatlist_offset) + nlcatlist_offset);
286 assert((list == _getObjc2NonlazyCategoryList(mhdr(), &debugCount)) && (*outCount == debugCount));
290 return _getObjc2NonlazyCategoryList(mhdr(), outCount);
296 category_t * const *header_info::catlist(size_t *outCount) const
299 // This field is new, so temporarily be resilient to the shared cache
301 if (isPreoptimized() && hasPreoptimizedSectionLookups()) {
302 *outCount = catlist_count;
303 category_t * const *list = (category_t * const *)(((intptr_t)&catlist_offset) + catlist_offset);
306 assert((list == _getObjc2CategoryList(mhdr(), &debugCount)) && (*outCount == debugCount));
310 return _getObjc2CategoryList(mhdr(), outCount);
316 category_t * const *header_info::catlist2(size_t *outCount) const
319 // This field is new, so temporarily be resilient to the shared cache
321 if (isPreoptimized() && hasPreoptimizedSectionLookups()) {
322 *outCount = catlist2_count;
323 category_t * const *list = (category_t * const *)(((intptr_t)&catlist2_offset) + catlist2_offset);
326 assert((list == _getObjc2CategoryList2(mhdr(), &debugCount)) && (*outCount == debugCount));
330 return _getObjc2CategoryList2(mhdr(), outCount);
337 Protocol *getSharedCachePreoptimizedProtocol(const char *name)
339 objc_protocolopt2_t *protocols = opt ? opt->protocolopt2() : nil;
340 if (!protocols) return nil;
342 // Note, we have to pass the lambda directly here as otherwise we would try
343 // message copy and autorelease.
344 return (Protocol *)protocols->getProtocol(name, [](const void* hi) -> bool {
345 return ((header_info *)hi)->isLoaded();
350 Protocol *getPreoptimizedProtocol(const char *name)
352 objc_protocolopt2_t *protocols = opt ? opt->protocolopt2() : nil;
353 if (!protocols) return nil;
355 // Try table from dyld closure first. It was built to ignore the dupes it
356 // knows will come from the cache, so anything left in here was there when
358 Protocol *result = nil;
359 // Note, we have to pass the lambda directly here as otherwise we would try
360 // message copy and autorelease.
361 _dyld_for_each_objc_protocol(name, [&result](void* protocolPtr, bool isLoaded, bool* stop) {
362 // Skip images which aren't loaded. This supports the case where dyld
363 // might soft link an image from the main binary so its possibly not
368 // Found a loaded image with this class name, so stop the search
369 result = (Protocol *)protocolPtr;
372 if (result) return result;
374 return getSharedCachePreoptimizedProtocol(name);
378 unsigned int getPreoptimizedClassUnreasonableCount()
380 objc_clsopt_t *classes = opt ? opt->clsopt() : nil;
381 if (!classes) return 0;
383 // This is an overestimate: each set of duplicates
384 // gets double-counted in `capacity` as well.
385 return classes->capacity + classes->duplicateCount();
389 Class getPreoptimizedClass(const char *name)
391 objc_clsopt_t *classes = opt ? opt->clsopt() : nil;
392 if (!classes) return nil;
394 // Try table from dyld closure first. It was built to ignore the dupes it
395 // knows will come from the cache, so anything left in here was there when
398 // Note, we have to pass the lambda directly here as otherwise we would try
399 // message copy and autorelease.
400 _dyld_for_each_objc_class(name, [&result](void* classPtr, bool isLoaded, bool* stop) {
401 // Skip images which aren't loaded. This supports the case where dyld
402 // might soft link an image from the main binary so its possibly not
407 // Found a loaded image with this class name, so stop the search
408 result = (Class)classPtr;
411 if (result) return result;
415 uint32_t count = classes->getClassAndHeader(name, cls, hi);
416 if (count == 1 && ((header_info *)hi)->isLoaded()) {
417 // exactly one matching class, and its image is loaded
420 else if (count > 1) {
421 // more than one matching class - find one that is loaded
422 void *clslist[count];
424 classes->getClassesAndHeaders(name, clslist, hilist);
425 for (uint32_t i = 0; i < count; i++) {
426 if (((header_info *)hilist[i])->isLoaded()) {
427 return (Class)clslist[i];
432 // no match that is loaded
437 Class* copyPreoptimizedClasses(const char *name, int *outCount)
441 objc_clsopt_t *classes = opt ? opt->clsopt() : nil;
442 if (!classes) return nil;
446 uint32_t count = classes->getClassAndHeader(name, cls, hi);
447 if (count == 0) return nil;
449 Class *result = (Class *)calloc(count, sizeof(Class));
450 if (count == 1 && ((header_info *)hi)->isLoaded()) {
451 // exactly one matching class, and its image is loaded
452 result[(*outCount)++] = (Class)cls;
455 else if (count > 1) {
456 // more than one matching class - find those that are loaded
457 void *clslist[count];
459 classes->getClassesAndHeaders(name, clslist, hilist);
460 for (uint32_t i = 0; i < count; i++) {
461 if (((header_info *)hilist[i])->isLoaded()) {
462 result[(*outCount)++] = (Class)clslist[i];
466 if (*outCount == 0) {
467 // found multiple classes with that name, but none are loaded
474 // no match that is loaded
479 header_info *preoptimizedHinfoForHeader(const headerType *mhdr)
482 // fixme old ABI shared cache doesn't prepare these properly
486 objc_headeropt_ro_t *hinfos = opt ? opt->headeropt_ro() : nil;
487 if (hinfos) return hinfos->get(mhdr);
492 header_info_rw *getPreoptimizedHeaderRW(const struct header_info *const hdr)
495 // fixme old ABI shared cache doesn't prepare these properly
499 objc_headeropt_ro_t *hinfoRO = opt ? opt->headeropt_ro() : nil;
500 objc_headeropt_rw_t *hinfoRW = opt ? opt->headeropt_rw() : nil;
501 if (!hinfoRO || !hinfoRW) {
502 _objc_fatal("preoptimized header_info missing for %s (%p %p %p)",
503 hdr->fname(), hdr, hinfoRO, hinfoRW);
505 int32_t index = hinfoRO->index(hdr);
506 ASSERT(hinfoRW->entsize == sizeof(header_info_rw));
507 return &hinfoRW->headers[index];
511 void preopt_init(void)
513 // Get the memory region occupied by the shared cache.
515 const uintptr_t start = (uintptr_t)_dyld_get_shared_cache_range(&length);
518 objc::dataSegmentsRanges.setSharedCacheRange(start, start + length);
521 // `opt` not set at compile time in order to detect too-early usage
522 const char *failure = nil;
523 opt = &_objc_opt_data;
526 // OBJC_DISABLE_PREOPTIMIZATION is set
527 // If opt->version != VERSION then you continue at your own risk.
528 failure = "(by OBJC_DISABLE_PREOPTIMIZATION)";
530 else if (opt->version != objc_opt::VERSION) {
531 // This shouldn't happen. You probably forgot to edit objc-sel-table.s.
532 // If dyld really did write the wrong optimization version,
533 // then we must halt because we don't know what bits dyld twiddled.
534 _objc_fatal("bad objc preopt version (want %d, got %d)",
535 objc_opt::VERSION, opt->version);
537 else if (!opt->selopt() || !opt->headeropt_ro()) {
538 // One of the tables is missing.
539 failure = "(dyld shared cache is absent or out of date)";
543 // All preoptimized selector references are invalid.
546 disableSharedCacheOptimizations();
549 _objc_inform("PREOPTIMIZATION: is DISABLED %s", failure);
553 // Valid optimization data written by dyld shared cache
557 _objc_inform("PREOPTIMIZATION: is ENABLED "
558 "(version %d)", opt->version);