]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.mm
objc4-787.1.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.mm
1 /*
2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "DenseMapExtras.h"
32 #include "objc-private.h"
33 #include "objc-runtime-new.h"
34 #include "objc-file.h"
35 #include "objc-cache.h"
36 #include "objc-zalloc.h"
37 #include <Block.h>
38 #include <objc/message.h>
39 #include <mach/shared_region.h>
40
41 #define newprotocol(p) ((protocol_t *)p)
42
43 static void disableTaggedPointers();
44 static void detach_class(Class cls, bool isMeta);
45 static void free_class(Class cls);
46 static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
47 static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
48 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
49 template<typename T> static bool method_lists_contains_any(T *mlists, T *end,
50 SEL sels[], size_t selcount);
51 static void flushCaches(Class cls);
52 static void initializeTaggedPointerObfuscator(void);
53 #if SUPPORT_FIXUP
54 static void fixupMessageRef(message_ref_t *msg);
55 #endif
56 static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
57 static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
58
59 struct locstamped_category_t {
60 category_t *cat;
61 struct header_info *hi;
62 };
63 enum {
64 ATTACH_CLASS = 1 << 0,
65 ATTACH_METACLASS = 1 << 1,
66 ATTACH_CLASS_AND_METACLASS = 1 << 2,
67 ATTACH_EXISTING = 1 << 3,
68 };
69 static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
70
71
72 /***********************************************************************
73 * Lock management
74 **********************************************************************/
75 mutex_t runtimeLock;
76 mutex_t selLock;
77 #if CONFIG_USE_CACHE_LOCK
78 mutex_t cacheUpdateLock;
79 #endif
80 recursive_mutex_t loadMethodLock;
81
82 /***********************************************************************
83 * Class structure decoding
84 **********************************************************************/
85
86 const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
87
88
89 /***********************************************************************
90 * Non-pointer isa decoding
91 **********************************************************************/
92 #if SUPPORT_INDEXED_ISA
93
94 // Indexed non-pointer isa.
95
96 // These are used to mask the ISA and see if its got an index or not.
97 const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
98 const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
99
100 // die if masks overlap
101 STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
102
103 // die if magic is wrong
104 STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
105
106 // Then these are used to extract the index from the ISA.
107 const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
108 const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
109
110 asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
111 "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
112 asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
113 "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
114 asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
115 "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
116 asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
117 "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
118
119
120 // And then we can use that index to get the class from this array. Note
121 // the size is provided so that clients can ensure the index they get is in
122 // bounds and not read off the end of the array.
123 // Defined in the objc-msg-*.s files
124 // const Class objc_indexed_classes[]
125
126 // When we don't have enough bits to store a class*, we can instead store an
127 // index in to this array. Classes are added here when they are realized.
128 // Note, an index of 0 is illegal.
129 uintptr_t objc_indexed_classes_count = 0;
130
131 // SUPPORT_INDEXED_ISA
132 #else
133 // not SUPPORT_INDEXED_ISA
134
135 // These variables exist but are all set to 0 so that they are ignored.
136 const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
137 const uintptr_t objc_debug_indexed_isa_magic_value = 0;
138 const uintptr_t objc_debug_indexed_isa_index_mask = 0;
139 const uintptr_t objc_debug_indexed_isa_index_shift = 0;
140 Class objc_indexed_classes[1] = { nil };
141 uintptr_t objc_indexed_classes_count = 0;
142
143 // not SUPPORT_INDEXED_ISA
144 #endif
145
146
147 #if SUPPORT_PACKED_ISA
148
149 // Packed non-pointer isa.
150
151 asm("\n .globl _objc_absolute_packed_isa_class_mask" \
152 "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
153
154 const uintptr_t objc_debug_isa_class_mask = ISA_MASK;
155 const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
156 const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
157
158 // die if masks overlap
159 STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
160
161 // die if magic is wrong
162 STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
163
164 // die if virtual address space bound goes up
165 STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
166 ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
167
168 // SUPPORT_PACKED_ISA
169 #else
170 // not SUPPORT_PACKED_ISA
171
172 // These variables exist but enforce pointer alignment only.
173 const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
174 const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
175 const uintptr_t objc_debug_isa_magic_value = 0;
176
177 // not SUPPORT_PACKED_ISA
178 #endif
179
180
181 /***********************************************************************
182 * Swift marker bits
183 **********************************************************************/
184 const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
185
186
187 /***********************************************************************
188 * allocatedClasses
189 * A table of all classes (and metaclasses) which have been allocated
190 * with objc_allocateClassPair.
191 **********************************************************************/
192 namespace objc {
193 static ExplicitInitDenseSet<Class> allocatedClasses;
194 }
195
196 /***********************************************************************
197 * _firstRealizedClass
198 * The root of all realized classes
199 **********************************************************************/
200 static Class _firstRealizedClass = nil;
201
202 /***********************************************************************
203 * didInitialAttachCategories
204 * Whether the initial attachment of categories present at startup has
205 * been done.
206 **********************************************************************/
207 static bool didInitialAttachCategories = false;
208
209 /***********************************************************************
210 * didCallDyldNotifyRegister
211 * Whether the call to _dyld_objc_notify_register has completed.
212 **********************************************************************/
213 bool didCallDyldNotifyRegister = false;
214
215
216 /***********************************************************************
217 * smallMethodIMPMap
218 * The map from small method pointers to replacement IMPs.
219 *
220 * Locking: runtimeLock must be held when accessing this map.
221 **********************************************************************/
222 namespace objc {
223 static objc::LazyInitDenseMap<const method_t *, IMP> smallMethodIMPMap;
224 }
225
226 static IMP method_t_remappedImp_nolock(const method_t *m) {
227 runtimeLock.assertLocked();
228 auto *map = objc::smallMethodIMPMap.get(false);
229 if (!map)
230 return nullptr;
231 auto iter = map->find(m);
232 if (iter == map->end())
233 return nullptr;
234 return iter->second;
235 }
236
237 IMP method_t::remappedImp(bool needsLock) const {
238 ASSERT(isSmall());
239 if (needsLock) {
240 mutex_locker_t guard(runtimeLock);
241 return method_t_remappedImp_nolock(this);
242 } else {
243 return method_t_remappedImp_nolock(this);
244 }
245 }
246
247 void method_t::remapImp(IMP imp) {
248 ASSERT(isSmall());
249 runtimeLock.assertLocked();
250 auto *map = objc::smallMethodIMPMap.get(true);
251 (*map)[this] = imp;
252 }
253
254 objc_method_description *method_t::getSmallDescription() const {
255 static objc::LazyInitDenseMap<const method_t *, objc_method_description *> map;
256
257 mutex_locker_t guard(runtimeLock);
258
259 auto &ptr = (*map.get(true))[this];
260 if (!ptr) {
261 ptr = (objc_method_description *)malloc(sizeof *ptr);
262 ptr->name = name();
263 ptr->types = (char *)types();
264 }
265 return ptr;
266 }
267
268 /*
269 Low two bits of mlist->entsize is used as the fixed-up marker.
270 PREOPTIMIZED VERSION:
271 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
272 (Protocol method lists are not sorted because of their extra parallel data)
273 Runtime fixed-up method lists get 3.
274 UN-PREOPTIMIZED VERSION:
275 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted)
276 Shared cache's sorting and uniquing are not trusted, but do affect the
277 location of the selector name string.
278 Runtime fixed-up method lists get 2.
279
280 High two bits of protocol->flags is used as the fixed-up marker.
281 PREOPTIMIZED VERSION:
282 Protocols from shared cache are 1<<30.
283 Runtime fixed-up protocols get 1<<30.
284 UN-PREOPTIMIZED VERSION:
285 Protocols from shared cache are 1<<30.
286 Shared cache's fixups are not trusted.
287 Runtime fixed-up protocols get 3<<30.
288 */
289
290 static uint32_t fixed_up_method_list = 3;
291 static uint32_t uniqued_method_list = 1;
292 static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
293 static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
294
295 void
296 disableSharedCacheOptimizations(void)
297 {
298 fixed_up_method_list = 2;
299 // It is safe to set uniqued method lists to 0 as we'll never call it unless
300 // the method list was already in need of being fixed up
301 uniqued_method_list = 0;
302 fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
303 // Its safe to just set canonical protocol to 0 as we'll never call
304 // clearIsCanonical() unless isCanonical() returned true, which can't happen
305 // with a 0 mask
306 canonical_protocol = 0;
307 }
308
309 bool method_list_t::isUniqued() const {
310 return (flags() & uniqued_method_list) != 0;
311 }
312
313 bool method_list_t::isFixedUp() const {
314 // Ignore any flags in the top bits, just look at the bottom two.
315 return (flags() & 0x3) == fixed_up_method_list;
316 }
317
318 void method_list_t::setFixedUp() {
319 runtimeLock.assertLocked();
320 ASSERT(!isFixedUp());
321 entsizeAndFlags = entsize() | fixed_up_method_list;
322 }
323
324 bool protocol_t::isFixedUp() const {
325 return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
326 }
327
328 void protocol_t::setFixedUp() {
329 runtimeLock.assertLocked();
330 ASSERT(!isFixedUp());
331 flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
332 }
333
334 bool protocol_t::isCanonical() const {
335 return (flags & canonical_protocol) != 0;
336 }
337
338 void protocol_t::clearIsCanonical() {
339 runtimeLock.assertLocked();
340 ASSERT(isCanonical());
341 flags = flags & ~canonical_protocol;
342 }
343
344
345 const method_list_t_authed_ptr<method_list_t> *method_array_t::endCategoryMethodLists(Class cls) const
346 {
347 auto mlists = beginLists();
348 auto mlistsEnd = endLists();
349
350 if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods())
351 {
352 // No methods, or no base methods.
353 // Everything here is a category method.
354 return mlistsEnd;
355 }
356
357 // Have base methods. Category methods are
358 // everything except the last method list.
359 return mlistsEnd - 1;
360 }
361
362 static const char *sel_cname(SEL sel)
363 {
364 return (const char *)(void *)sel;
365 }
366
367
368 static size_t protocol_list_size(const protocol_list_t *plist)
369 {
370 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
371 }
372
373
374 static void try_free(const void *p)
375 {
376 if (p && malloc_size(p)) free((void *)p);
377 }
378
379
380 using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
381 Class _Nonnull newClass);
382 // Normally there's only one handler registered.
383 static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
384
385 void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
386 (Class _Nonnull oldClass, Class _Nonnull newClass)) {
387 mutex_locker_t lock(runtimeLock);
388
389 classCopyFixupHandlers.append(newFixupHandler);
390 }
391
392 static Class
393 alloc_class_for_subclass(Class supercls, size_t extraBytes)
394 {
395 if (!supercls || !supercls->isAnySwift()) {
396 return _calloc_class(sizeof(objc_class) + extraBytes);
397 }
398
399 // Superclass is a Swift class. New subclass must duplicate its extra bits.
400
401 // Allocate the new class, with space for super's prefix and suffix
402 // and self's extraBytes.
403 swift_class_t *swiftSupercls = (swift_class_t *)supercls;
404 size_t superSize = swiftSupercls->classSize;
405 void *superBits = swiftSupercls->baseAddress();
406 void *bits = malloc(superSize + extraBytes);
407
408 // Copy all of the superclass's data to the new class.
409 memcpy(bits, superBits, superSize);
410
411 // Erase the objc data and the Swift description in the new class.
412 swift_class_t *swcls = (swift_class_t *)
413 ((uint8_t *)bits + swiftSupercls->classAddressOffset);
414 bzero(swcls, sizeof(objc_class));
415 swcls->description = nil;
416
417 for (auto handler : classCopyFixupHandlers) {
418 handler(supercls, (Class)swcls);
419 }
420
421 // Mark this class as Swift-enhanced.
422 if (supercls->isSwiftStable()) {
423 swcls->bits.setIsSwiftStable();
424 }
425 if (supercls->isSwiftLegacy()) {
426 swcls->bits.setIsSwiftLegacy();
427 }
428
429 return (Class)swcls;
430 }
431
432
433 /***********************************************************************
434 * object_getIndexedIvars.
435 **********************************************************************/
436 void *object_getIndexedIvars(id obj)
437 {
438 uint8_t *base = (uint8_t *)obj;
439
440 if (!obj) return nil;
441 if (obj->isTaggedPointer()) return nil;
442
443 if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
444
445 Class cls = (Class)obj;
446 if (!cls->isAnySwift()) return base + sizeof(objc_class);
447
448 swift_class_t *swcls = (swift_class_t *)cls;
449 return base - swcls->classAddressOffset + word_align(swcls->classSize);
450 }
451
452
453 /***********************************************************************
454 * make_ro_writeable
455 * Reallocates rw->ro if necessary to make it writeable.
456 * Locking: runtimeLock must be held by the caller.
457 **********************************************************************/
458 static class_ro_t *make_ro_writeable(class_rw_t *rw)
459 {
460 runtimeLock.assertLocked();
461
462 if (rw->flags & RW_COPIED_RO) {
463 // already writeable, do nothing
464 } else {
465 rw->set_ro(rw->ro()->duplicate());
466 rw->flags |= RW_COPIED_RO;
467 }
468 return const_cast<class_ro_t *>(rw->ro());
469 }
470
471
472 /***********************************************************************
473 * dataSegmentsContain
474 * Returns true if the given address lies within a data segment in any
475 * loaded image.
476 **********************************************************************/
477 NEVER_INLINE
478 static bool
479 dataSegmentsContain(Class cls)
480 {
481 uint32_t index;
482 if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
483 // if the class is realized (hence has a class_rw_t),
484 // memorize where we found the range
485 if (cls->isRealized()) {
486 cls->data()->witness = (uint16_t)index;
487 }
488 return true;
489 }
490 return false;
491 }
492
493
494 /***********************************************************************
495 * isKnownClass
496 * Return true if the class is known to the runtime (located within the
497 * shared cache, within the data segment of a loaded image, or has been
498 * allocated with obj_allocateClassPair).
499 *
500 * The result of this operation is cached on the class in a "witness"
501 * value that is cheaply checked in the fastpath.
502 **********************************************************************/
503 ALWAYS_INLINE
504 static bool
505 isKnownClass(Class cls)
506 {
507 if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
508 return true;
509 }
510 auto &set = objc::allocatedClasses.get();
511 return set.find(cls) != set.end() || dataSegmentsContain(cls);
512 }
513
514
515 /***********************************************************************
516 * addClassTableEntry
517 * Add a class to the table of all classes. If addMeta is true,
518 * automatically adds the metaclass of the class as well.
519 * Locking: runtimeLock must be held by the caller.
520 **********************************************************************/
521 static void
522 addClassTableEntry(Class cls, bool addMeta = true)
523 {
524 runtimeLock.assertLocked();
525
526 // This class is allowed to be a known class via the shared cache or via
527 // data segments, but it is not allowed to be in the dynamic table already.
528 auto &set = objc::allocatedClasses.get();
529
530 ASSERT(set.find(cls) == set.end());
531
532 if (!isKnownClass(cls))
533 set.insert(cls);
534 if (addMeta)
535 addClassTableEntry(cls->ISA(), false);
536 }
537
538
539 /***********************************************************************
540 * checkIsKnownClass
541 * Checks the given class against the list of all known classes. Dies
542 * with a fatal error if the class is not known.
543 * Locking: runtimeLock must be held by the caller.
544 **********************************************************************/
545 ALWAYS_INLINE
546 static void
547 checkIsKnownClass(Class cls)
548 {
549 if (slowpath(!isKnownClass(cls))) {
550 _objc_fatal("Attempt to use unknown class %p.", cls);
551 }
552 }
553
554 /***********************************************************************
555 * classNSObject
556 * Returns class NSObject.
557 * Locking: none
558 **********************************************************************/
559 static Class classNSObject(void)
560 {
561 extern objc_class OBJC_CLASS_$_NSObject;
562 return (Class)&OBJC_CLASS_$_NSObject;
563 }
564
565 static Class metaclassNSObject(void)
566 {
567 extern objc_class OBJC_METACLASS_$_NSObject;
568 return (Class)&OBJC_METACLASS_$_NSObject;
569 }
570
571 /***********************************************************************
572 * printReplacements
573 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
574 * Warn about methods from cats that override other methods in cats or cls.
575 * Assumes no methods from cats have been added to cls yet.
576 **********************************************************************/
577 __attribute__((cold, noinline))
578 static void
579 printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
580 {
581 uint32_t c;
582 bool isMeta = cls->isMetaClass();
583
584 // Newest categories are LAST in cats
585 // Later categories override earlier ones.
586 for (c = 0; c < cats_count; c++) {
587 category_t *cat = cats_list[c].cat;
588
589 method_list_t *mlist = cat->methodsForMeta(isMeta);
590 if (!mlist) continue;
591
592 for (const auto& meth : *mlist) {
593 SEL s = sel_registerName(sel_cname(meth.name()));
594
595 // Search for replaced methods in method lookup order.
596 // Complain about the first duplicate only.
597
598 // Look for method in earlier categories
599 for (uint32_t c2 = 0; c2 < c; c2++) {
600 category_t *cat2 = cats_list[c2].cat;
601
602 const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
603 if (!mlist2) continue;
604
605 for (const auto& meth2 : *mlist2) {
606 SEL s2 = sel_registerName(sel_cname(meth2.name()));
607 if (s == s2) {
608 logReplacedMethod(cls->nameForLogging(), s,
609 cls->isMetaClass(), cat->name,
610 meth2.imp(false), meth.imp(false));
611 goto complained;
612 }
613 }
614 }
615
616 // Look for method in cls
617 for (const auto& meth2 : cls->data()->methods()) {
618 SEL s2 = sel_registerName(sel_cname(meth2.name()));
619 if (s == s2) {
620 logReplacedMethod(cls->nameForLogging(), s,
621 cls->isMetaClass(), cat->name,
622 meth2.imp(false), meth.imp(false));
623 goto complained;
624 }
625 }
626
627 complained:
628 ;
629 }
630 }
631 }
632
633
634 /***********************************************************************
635 * unreasonableClassCount
636 * Provides an upper bound for any iteration of classes,
637 * to prevent spins when runtime metadata is corrupted.
638 **********************************************************************/
639 static unsigned unreasonableClassCount()
640 {
641 runtimeLock.assertLocked();
642
643 int base = NXCountMapTable(gdb_objc_realized_classes) +
644 getPreoptimizedClassUnreasonableCount();
645
646 // Provide lots of slack here. Some iterations touch metaclasses too.
647 // Some iterations backtrack (like realized class iteration).
648 // We don't need an efficient bound, merely one that prevents spins.
649 return (base + 1) * 16;
650 }
651
652
653 /***********************************************************************
654 * Class enumerators
655 * The passed in block returns `false` if subclasses can be skipped
656 * Locking: runtimeLock must be held by the caller.
657 **********************************************************************/
658 static inline void
659 foreach_realized_class_and_subclass_2(Class top, unsigned &count,
660 bool skip_metaclass,
661 bool (^code)(Class) __attribute((noescape)))
662 {
663 Class cls = top;
664
665 runtimeLock.assertLocked();
666 ASSERT(top);
667
668 while (1) {
669 if (--count == 0) {
670 _objc_fatal("Memory corruption in class list.");
671 }
672
673 bool skip_subclasses;
674
675 if (skip_metaclass && cls->isMetaClass()) {
676 skip_subclasses = true;
677 } else {
678 skip_subclasses = !code(cls);
679 }
680
681 if (!skip_subclasses && cls->data()->firstSubclass) {
682 cls = cls->data()->firstSubclass;
683 } else {
684 while (!cls->data()->nextSiblingClass && cls != top) {
685 cls = cls->superclass;
686 if (--count == 0) {
687 _objc_fatal("Memory corruption in class list.");
688 }
689 }
690 if (cls == top) break;
691 cls = cls->data()->nextSiblingClass;
692 }
693 }
694 }
695
696 // Enumerates a class and all of its realized subclasses.
697 static void
698 foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
699 {
700 unsigned int count = unreasonableClassCount();
701
702 foreach_realized_class_and_subclass_2(top, count, false, code);
703 }
704
705 // Enumerates all realized classes and metaclasses.
706 static void
707 foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
708 {
709 unsigned int count = unreasonableClassCount();
710
711 for (Class top = _firstRealizedClass;
712 top != nil;
713 top = top->data()->nextSiblingClass)
714 {
715 foreach_realized_class_and_subclass_2(top, count, false, code);
716 }
717 }
718
719 // Enumerates all realized classes (ignoring metaclasses).
720 static void
721 foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
722 {
723 unsigned int count = unreasonableClassCount();
724
725 for (Class top = _firstRealizedClass;
726 top != nil;
727 top = top->data()->nextSiblingClass)
728 {
729 foreach_realized_class_and_subclass_2(top, count, true, code);
730 }
731 }
732
733
734 /***********************************************************************
735 * Method Scanners / Optimization tracking
736 * Implementation of scanning for various implementations of methods.
737 **********************************************************************/
738
739 namespace objc {
740
741 enum SelectorBundle {
742 AWZ,
743 RR,
744 Core,
745 };
746
747 namespace scanner {
748
749 // The current state of NSObject swizzling for every scanner
750 //
751 // It allows for cheap checks of global swizzles, and also lets
752 // things like IMP Swizzling before NSObject has been initialized
753 // to be remembered, as setInitialized() would miss these.
754 //
755 // Every pair of bits describes a SelectorBundle.
756 // even bits: is NSObject class swizzled for this bundle
757 // odd bits: is NSObject meta class swizzled for this bundle
758 static uintptr_t NSObjectSwizzledMask;
759
760 static ALWAYS_INLINE uintptr_t
761 swizzlingBit(SelectorBundle bundle, bool isMeta)
762 {
763 return 1UL << (2 * bundle + isMeta);
764 }
765
766 static void __attribute__((cold, noinline))
767 printCustom(Class cls, SelectorBundle bundle, bool inherited)
768 {
769 static char const * const SelectorBundleName[] = {
770 [AWZ] = "CUSTOM AWZ",
771 [RR] = "CUSTOM RR",
772 [Core] = "CUSTOM Core",
773 };
774
775 _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
776 cls->nameForLogging(),
777 cls->isMetaClass() ? " (meta)" : "",
778 inherited ? " (inherited)" : "");
779 }
780
781 enum class Scope { Instances, Classes, Both };
782
783 template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
784 class Mixin {
785
786 // work around compiler being broken with templates using Class/objc_class,
787 // probably some weird confusion with Class being builtin
788 ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
789 return (objc_class *)cls;
790 }
791
792 static void
793 setCustomRecursively(Class cls, bool inherited = false)
794 {
795 foreach_realized_class_and_subclass(cls, [=](Class c){
796 if (c != cls && !as_objc_class(c)->isInitialized()) {
797 // Subclass not yet initialized. Wait for setInitialized() to do it
798 return false;
799 }
800 if (Traits::isCustom(c)) {
801 return false;
802 }
803 Traits::setCustom(c);
804 if (ShouldPrint) {
805 printCustom(cls, Bundle, inherited || c != cls);
806 }
807 return true;
808 });
809 }
810
811 static bool
812 isNSObjectSwizzled(bool isMeta)
813 {
814 return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
815 }
816
817 static void
818 setNSObjectSwizzled(Class NSOClass, bool isMeta)
819 {
820 NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
821 if (as_objc_class(NSOClass)->isInitialized()) {
822 setCustomRecursively(NSOClass);
823 }
824 }
825
826 static void
827 scanChangedMethodForUnknownClass(const method_t *meth)
828 {
829 Class cls;
830
831 cls = classNSObject();
832 if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
833 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
834 if (meth == &meth2) {
835 setNSObjectSwizzled(cls, NO);
836 break;
837 }
838 }
839 }
840
841 cls = metaclassNSObject();
842 if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
843 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
844 if (meth == &meth2) {
845 setNSObjectSwizzled(cls, YES);
846 break;
847 }
848 }
849 }
850 }
851
852 static void
853 scanAddedClassImpl(Class cls, bool isMeta)
854 {
855 Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
856 bool setCustom = NO, inherited = NO;
857
858 if (isNSObjectSwizzled(isMeta)) {
859 setCustom = YES;
860 } else if (cls == NSOClass) {
861 // NSObject is default but we need to check categories
862 auto &methods = as_objc_class(cls)->data()->methods();
863 setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
864 methods.endCategoryMethodLists(cls));
865 } else if (!isMeta && !as_objc_class(cls)->superclass) {
866 // Custom Root class
867 setCustom = YES;
868 } else if (Traits::isCustom(as_objc_class(cls)->superclass)) {
869 // Superclass is custom, therefore we are too.
870 setCustom = YES;
871 inherited = YES;
872 } else {
873 // Not NSObject.
874 auto &methods = as_objc_class(cls)->data()->methods();
875 setCustom = Traits::scanMethodLists(methods.beginLists(),
876 methods.endLists());
877 }
878 if (slowpath(setCustom)) {
879 if (ShouldPrint) printCustom(cls, Bundle, inherited);
880 } else {
881 Traits::setDefault(cls);
882 }
883 }
884
885 public:
886 // Scan a class that is about to be marked Initialized for particular
887 // bundles of selectors, and mark the class and its children
888 // accordingly.
889 //
890 // This also handles inheriting properties from its superclass.
891 //
892 // Caller: objc_class::setInitialized()
893 static void
894 scanInitializedClass(Class cls, Class metacls)
895 {
896 if (Domain != Scope::Classes) {
897 scanAddedClassImpl(cls, false);
898 }
899 if (Domain != Scope::Instances) {
900 scanAddedClassImpl(metacls, true);
901 }
902 }
903
904 // Inherit various properties from the superclass when a class
905 // is being added to the graph.
906 //
907 // Caller: addSubclass()
908 static void
909 scanAddedSubClass(Class subcls, Class supercls)
910 {
911 if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
912 setCustomRecursively(subcls, true);
913 }
914 }
915
916 // Scan Method lists for selectors that would override things
917 // in a Bundle.
918 //
919 // This is used to detect when categories override problematic selectors
920 // are injected in a class after it has been initialized.
921 //
922 // Caller: prepareMethodLists()
923 static void
924 scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
925 {
926 if (slowpath(Traits::isCustom(cls))) {
927 return;
928 }
929 if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
930 setCustomRecursively(cls);
931 }
932 }
933
934 // Handle IMP Swizzling (the IMP for an exisiting method being changed).
935 //
936 // In almost all cases, IMP swizzling does not affect custom bits.
937 // Custom search will already find the method whether or not
938 // it is swizzled, so it does not transition from non-custom to custom.
939 //
940 // The only cases where IMP swizzling can affect the custom bits is
941 // if the swizzled method is one of the methods that is assumed to be
942 // non-custom. These special cases are listed in setInitialized().
943 // We look for such cases here.
944 //
945 // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
946 static void
947 scanChangedMethod(Class cls, const method_t *meth)
948 {
949 if (fastpath(!Traits::isInterestingSelector(meth->name()))) {
950 return;
951 }
952
953 if (cls) {
954 bool isMeta = as_objc_class(cls)->isMetaClass();
955 if (isMeta && Domain != Scope::Instances) {
956 if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
957 setNSObjectSwizzled(cls, isMeta);
958 }
959 }
960 if (!isMeta && Domain != Scope::Classes) {
961 if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
962 setNSObjectSwizzled(cls, isMeta);
963 }
964 }
965 } else {
966 // We're called from method_exchangeImplementations, only NSObject
967 // class and metaclass may be problematic (exchanging the default
968 // builtin IMP of an interesting seleector, is a swizzling that,
969 // may flip our scanned property. For other classes, the previous
970 // value had already flipped the property).
971 //
972 // However, as we don't know the class, we need to scan all of
973 // NSObject class and metaclass methods (this is SLOW).
974 scanChangedMethodForUnknownClass(meth);
975 }
976 }
977 };
978
979 } // namespace scanner
980
981 // AWZ methods: +alloc / +allocWithZone:
982 struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
983 static bool isCustom(Class cls) {
984 return cls->hasCustomAWZ();
985 }
986 static void setCustom(Class cls) {
987 cls->setHasCustomAWZ();
988 }
989 static void setDefault(Class cls) {
990 cls->setHasDefaultAWZ();
991 }
992 static bool isInterestingSelector(SEL sel) {
993 return sel == @selector(alloc) || sel == @selector(allocWithZone:);
994 }
995 template<typename T>
996 static bool scanMethodLists(T *mlists, T *end) {
997 SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
998 return method_lists_contains_any(mlists, end, sels, 2);
999 }
1000 };
1001
1002 // Retain/Release methods that are extremely rarely overridden
1003 //
1004 // retain/release/autorelease/retainCount/
1005 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
1006 struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
1007 #if !SUPPORT_NONPOINTER_ISA
1008 , scanner::Scope::Instances
1009 #endif
1010 > {
1011 static bool isCustom(Class cls) {
1012 return cls->hasCustomRR();
1013 }
1014 static void setCustom(Class cls) {
1015 cls->setHasCustomRR();
1016 }
1017 static void setDefault(Class cls) {
1018 cls->setHasDefaultRR();
1019 }
1020 static bool isInterestingSelector(SEL sel) {
1021 return sel == @selector(retain) ||
1022 sel == @selector(release) ||
1023 sel == @selector(autorelease) ||
1024 sel == @selector(_tryRetain) ||
1025 sel == @selector(_isDeallocating) ||
1026 sel == @selector(retainCount) ||
1027 sel == @selector(allowsWeakReference) ||
1028 sel == @selector(retainWeakReference);
1029 }
1030 template <typename T>
1031 static bool scanMethodLists(T *mlists, T *end) {
1032 SEL sels[8] = {
1033 @selector(retain),
1034 @selector(release),
1035 @selector(autorelease),
1036 @selector(_tryRetain),
1037 @selector(_isDeallocating),
1038 @selector(retainCount),
1039 @selector(allowsWeakReference),
1040 @selector(retainWeakReference),
1041 };
1042 return method_lists_contains_any(mlists, end, sels, 8);
1043 }
1044 };
1045
1046 // Core NSObject methods that are extremely rarely overridden
1047 //
1048 // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
1049 struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
1050 static bool isCustom(Class cls) {
1051 return cls->hasCustomCore();
1052 }
1053 static void setCustom(Class cls) {
1054 cls->setHasCustomCore();
1055 }
1056 static void setDefault(Class cls) {
1057 cls->setHasDefaultCore();
1058 }
1059 static bool isInterestingSelector(SEL sel) {
1060 return sel == @selector(new) ||
1061 sel == @selector(self) ||
1062 sel == @selector(class) ||
1063 sel == @selector(isKindOfClass:) ||
1064 sel == @selector(respondsToSelector:);
1065 }
1066 template <typename T>
1067 static bool scanMethodLists(T *mlists, T *end) {
1068 SEL sels[5] = {
1069 @selector(new),
1070 @selector(self),
1071 @selector(class),
1072 @selector(isKindOfClass:),
1073 @selector(respondsToSelector:)
1074 };
1075 return method_lists_contains_any(mlists, end, sels, 5);
1076 }
1077 };
1078
1079 class category_list : nocopy_t {
1080 union {
1081 locstamped_category_t lc;
1082 struct {
1083 locstamped_category_t *array;
1084 // this aliases with locstamped_category_t::hi
1085 // which is an aliased pointer
1086 uint32_t is_array : 1;
1087 uint32_t count : 31;
1088 uint32_t size : 32;
1089 };
1090 } _u;
1091
1092 public:
1093 category_list() : _u{{nullptr, nullptr}} { }
1094 category_list(locstamped_category_t lc) : _u{{lc}} { }
1095 category_list(category_list &&other) : category_list() {
1096 std::swap(_u, other._u);
1097 }
1098 ~category_list()
1099 {
1100 if (_u.is_array) {
1101 free(_u.array);
1102 }
1103 }
1104
1105 uint32_t count() const
1106 {
1107 if (_u.is_array) return _u.count;
1108 return _u.lc.cat ? 1 : 0;
1109 }
1110
1111 uint32_t arrayByteSize(uint32_t size) const
1112 {
1113 return sizeof(locstamped_category_t) * size;
1114 }
1115
1116 const locstamped_category_t *array() const
1117 {
1118 return _u.is_array ? _u.array : &_u.lc;
1119 }
1120
1121 void append(locstamped_category_t lc)
1122 {
1123 if (_u.is_array) {
1124 if (_u.count == _u.size) {
1125 // Have a typical malloc growth:
1126 // - size <= 8: grow by 2
1127 // - size <= 16: grow by 4
1128 // - size <= 32: grow by 8
1129 // ... etc
1130 _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
1131 _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
1132 }
1133 _u.array[_u.count++] = lc;
1134 } else if (_u.lc.cat == NULL) {
1135 _u.lc = lc;
1136 } else {
1137 locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
1138 arr[0] = _u.lc;
1139 arr[1] = lc;
1140
1141 _u.array = arr;
1142 _u.is_array = true;
1143 _u.count = 2;
1144 _u.size = 2;
1145 }
1146 }
1147
1148 void erase(category_t *cat)
1149 {
1150 if (_u.is_array) {
1151 for (int i = 0; i < _u.count; i++) {
1152 if (_u.array[i].cat == cat) {
1153 // shift entries to preserve list order
1154 memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
1155 return;
1156 }
1157 }
1158 } else if (_u.lc.cat == cat) {
1159 _u.lc.cat = NULL;
1160 _u.lc.hi = NULL;
1161 }
1162 }
1163 };
1164
1165 class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
1166 {
1167 public:
1168 void addForClass(locstamped_category_t lc, Class cls)
1169 {
1170 runtimeLock.assertLocked();
1171
1172 if (slowpath(PrintConnecting)) {
1173 _objc_inform("CLASS: found category %c%s(%s)",
1174 cls->isMetaClass() ? '+' : '-',
1175 cls->nameForLogging(), lc.cat->name);
1176 }
1177
1178 auto result = get().try_emplace(cls, lc);
1179 if (!result.second) {
1180 result.first->second.append(lc);
1181 }
1182 }
1183
1184 void attachToClass(Class cls, Class previously, int flags)
1185 {
1186 runtimeLock.assertLocked();
1187 ASSERT((flags & ATTACH_CLASS) ||
1188 (flags & ATTACH_METACLASS) ||
1189 (flags & ATTACH_CLASS_AND_METACLASS));
1190
1191 auto &map = get();
1192 auto it = map.find(previously);
1193
1194 if (it != map.end()) {
1195 category_list &list = it->second;
1196 if (flags & ATTACH_CLASS_AND_METACLASS) {
1197 int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
1198 attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
1199 attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
1200 } else {
1201 attachCategories(cls, list.array(), list.count(), flags);
1202 }
1203 map.erase(it);
1204 }
1205 }
1206
1207 void eraseCategoryForClass(category_t *cat, Class cls)
1208 {
1209 runtimeLock.assertLocked();
1210
1211 auto &map = get();
1212 auto it = map.find(cls);
1213 if (it != map.end()) {
1214 category_list &list = it->second;
1215 list.erase(cat);
1216 if (list.count() == 0) {
1217 map.erase(it);
1218 }
1219 }
1220 }
1221
1222 void eraseClass(Class cls)
1223 {
1224 runtimeLock.assertLocked();
1225
1226 get().erase(cls);
1227 }
1228 };
1229
1230 static UnattachedCategories unattachedCategories;
1231
1232 } // namespace objc
1233
1234 static bool isBundleClass(Class cls)
1235 {
1236 return cls->data()->ro()->flags & RO_FROM_BUNDLE;
1237 }
1238
1239
1240 static void
1241 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1242 {
1243 runtimeLock.assertLocked();
1244 ASSERT(!mlist->isFixedUp());
1245
1246 // fixme lock less in attachMethodLists ?
1247 // dyld3 may have already uniqued, but not sorted, the list
1248 if (!mlist->isUniqued()) {
1249 mutex_locker_t lock(selLock);
1250
1251 // Unique selectors in list.
1252 for (auto& meth : *mlist) {
1253 const char *name = sel_cname(meth.name());
1254 meth.name() = sel_registerNameNoLock(name, bundleCopy);
1255 }
1256 }
1257
1258 // Sort by selector address.
1259 // Don't try to sort small lists, as they're immutable.
1260 // Don't try to sort big lists of nonstandard size, as stable_sort
1261 // won't copy the entries properly.
1262 if (sort && !mlist->isSmallList() && mlist->entsize() == method_t::bigSize) {
1263 method_t::SortBySELAddress sorter;
1264 std::stable_sort(&mlist->begin()->big(), &mlist->end()->big(), sorter);
1265 }
1266
1267 // Mark method list as uniqued and sorted.
1268 // Can't mark small lists, since they're immutable.
1269 if (!mlist->isSmallList()) {
1270 mlist->setFixedUp();
1271 }
1272 }
1273
1274
1275 static void
1276 prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
1277 bool baseMethods, bool methodsFromBundle)
1278 {
1279 runtimeLock.assertLocked();
1280
1281 if (addedCount == 0) return;
1282
1283 // There exist RR/AWZ/Core special cases for some class's base methods.
1284 // But this code should never need to scan base methods for RR/AWZ/Core:
1285 // default RR/AWZ/Core cannot be set before setInitialized().
1286 // Therefore we need not handle any special cases here.
1287 if (baseMethods) {
1288 ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
1289 }
1290
1291 // Add method lists to array.
1292 // Reallocate un-fixed method lists.
1293 // The new methods are PREPENDED to the method list array.
1294
1295 for (int i = 0; i < addedCount; i++) {
1296 method_list_t *mlist = addedLists[i];
1297 ASSERT(mlist);
1298
1299 // Fixup selectors if necessary
1300 if (!mlist->isFixedUp()) {
1301 fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1302 }
1303 }
1304
1305 // If the class is initialized, then scan for method implementations
1306 // tracked by the class's flags. If it's not initialized yet,
1307 // then objc_class::setInitialized() will take care of it.
1308 if (cls->isInitialized()) {
1309 objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1310 objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1311 objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1312 }
1313 }
1314
1315 class_rw_ext_t *
1316 class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy)
1317 {
1318 runtimeLock.assertLocked();
1319
1320 auto rwe = objc::zalloc<class_rw_ext_t>();
1321
1322 rwe->version = (ro->flags & RO_META) ? 7 : 0;
1323
1324 method_list_t *list = ro->baseMethods();
1325 if (list) {
1326 if (deepCopy) list = list->duplicate();
1327 rwe->methods.attachLists(&list, 1);
1328 }
1329
1330 // See comments in objc_duplicateClass
1331 // property lists and protocol lists historically
1332 // have not been deep-copied
1333 //
1334 // This is probably wrong and ought to be fixed some day
1335 property_list_t *proplist = ro->baseProperties;
1336 if (proplist) {
1337 rwe->properties.attachLists(&proplist, 1);
1338 }
1339
1340 protocol_list_t *protolist = ro->baseProtocols;
1341 if (protolist) {
1342 rwe->protocols.attachLists(&protolist, 1);
1343 }
1344
1345 set_ro_or_rwe(rwe, ro);
1346 return rwe;
1347 }
1348
1349 // Attach method lists and properties and protocols from categories to a class.
1350 // Assumes the categories in cats are all loaded and sorted by load order,
1351 // oldest categories first.
1352 static void
1353 attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
1354 int flags)
1355 {
1356 if (slowpath(PrintReplacedMethods)) {
1357 printReplacements(cls, cats_list, cats_count);
1358 }
1359 if (slowpath(PrintConnecting)) {
1360 _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
1361 cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
1362 cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
1363 }
1364
1365 /*
1366 * Only a few classes have more than 64 categories during launch.
1367 * This uses a little stack, and avoids malloc.
1368 *
1369 * Categories must be added in the proper order, which is back
1370 * to front. To do that with the chunking, we iterate cats_list
1371 * from front to back, build up the local buffers backwards,
1372 * and call attachLists on the chunks. attachLists prepends the
1373 * lists, so the final result is in the expected order.
1374 */
1375 constexpr uint32_t ATTACH_BUFSIZ = 64;
1376 method_list_t *mlists[ATTACH_BUFSIZ];
1377 property_list_t *proplists[ATTACH_BUFSIZ];
1378 protocol_list_t *protolists[ATTACH_BUFSIZ];
1379
1380 uint32_t mcount = 0;
1381 uint32_t propcount = 0;
1382 uint32_t protocount = 0;
1383 bool fromBundle = NO;
1384 bool isMeta = (flags & ATTACH_METACLASS);
1385 auto rwe = cls->data()->extAllocIfNeeded();
1386
1387 for (uint32_t i = 0; i < cats_count; i++) {
1388 auto& entry = cats_list[i];
1389
1390 method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
1391 if (mlist) {
1392 if (mcount == ATTACH_BUFSIZ) {
1393 prepareMethodLists(cls, mlists, mcount, NO, fromBundle);
1394 rwe->methods.attachLists(mlists, mcount);
1395 mcount = 0;
1396 }
1397 mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
1398 fromBundle |= entry.hi->isBundle();
1399 }
1400
1401 property_list_t *proplist =
1402 entry.cat->propertiesForMeta(isMeta, entry.hi);
1403 if (proplist) {
1404 if (propcount == ATTACH_BUFSIZ) {
1405 rwe->properties.attachLists(proplists, propcount);
1406 propcount = 0;
1407 }
1408 proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
1409 }
1410
1411 protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
1412 if (protolist) {
1413 if (protocount == ATTACH_BUFSIZ) {
1414 rwe->protocols.attachLists(protolists, protocount);
1415 protocount = 0;
1416 }
1417 protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
1418 }
1419 }
1420
1421 if (mcount > 0) {
1422 prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle);
1423 rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
1424 if (flags & ATTACH_EXISTING) flushCaches(cls);
1425 }
1426
1427 rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
1428
1429 rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
1430 }
1431
1432
1433 /***********************************************************************
1434 * methodizeClass
1435 * Fixes up cls's method list, protocol list, and property list.
1436 * Attaches any outstanding categories.
1437 * Locking: runtimeLock must be held by the caller
1438 **********************************************************************/
1439 static void methodizeClass(Class cls, Class previously)
1440 {
1441 runtimeLock.assertLocked();
1442
1443 bool isMeta = cls->isMetaClass();
1444 auto rw = cls->data();
1445 auto ro = rw->ro();
1446 auto rwe = rw->ext();
1447
1448 // Methodizing for the first time
1449 if (PrintConnecting) {
1450 _objc_inform("CLASS: methodizing class '%s' %s",
1451 cls->nameForLogging(), isMeta ? "(meta)" : "");
1452 }
1453
1454 // Install methods and properties that the class implements itself.
1455 method_list_t *list = ro->baseMethods();
1456 if (list) {
1457 if (list->isSmallList() && !_dyld_is_memory_immutable(list, list->byteSize()))
1458 _objc_fatal("CLASS: class '%s' %p small method list %p is not in immutable memory",
1459 cls->nameForLogging(), cls, list);
1460 prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls));
1461 if (rwe) rwe->methods.attachLists(&list, 1);
1462 }
1463
1464 property_list_t *proplist = ro->baseProperties;
1465 if (rwe && proplist) {
1466 rwe->properties.attachLists(&proplist, 1);
1467 }
1468
1469 protocol_list_t *protolist = ro->baseProtocols;
1470 if (rwe && protolist) {
1471 rwe->protocols.attachLists(&protolist, 1);
1472 }
1473
1474 // Root classes get bonus method implementations if they don't have
1475 // them already. These apply before category replacements.
1476 if (cls->isRootMetaclass()) {
1477 // root metaclass
1478 addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
1479 }
1480
1481 // Attach categories.
1482 if (previously) {
1483 if (isMeta) {
1484 objc::unattachedCategories.attachToClass(cls, previously,
1485 ATTACH_METACLASS);
1486 } else {
1487 // When a class relocates, categories with class methods
1488 // may be registered on the class itself rather than on
1489 // the metaclass. Tell attachToClass to look for those.
1490 objc::unattachedCategories.attachToClass(cls, previously,
1491 ATTACH_CLASS_AND_METACLASS);
1492 }
1493 }
1494 objc::unattachedCategories.attachToClass(cls, cls,
1495 isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
1496
1497 #if DEBUG
1498 // Debug: sanity-check all SELs; log method list contents
1499 for (const auto& meth : rw->methods()) {
1500 if (PrintConnecting) {
1501 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1502 cls->nameForLogging(), sel_getName(meth.name()));
1503 }
1504 ASSERT(sel_registerName(sel_getName(meth.name())) == meth.name());
1505 }
1506 #endif
1507 }
1508
1509
1510 /***********************************************************************
1511 * nonMetaClasses
1512 * Returns the secondary metaclass => class map
1513 * Used for some cases of +initialize and +resolveClassMethod:.
1514 * This map does not contain all class and metaclass pairs. It only
1515 * contains metaclasses whose classes would be in the runtime-allocated
1516 * named-class table, but are not because some other class with the same name
1517 * is in that table.
1518 * Classes with no duplicates are not included.
1519 * Classes in the preoptimized named-class table are not included.
1520 * Classes whose duplicates are in the preoptimized table are not included.
1521 * Most code should use getMaybeUnrealizedNonMetaClass()
1522 * instead of reading this table.
1523 * Locking: runtimeLock must be read- or write-locked by the caller
1524 **********************************************************************/
1525 static NXMapTable *nonmeta_class_map = nil;
1526 static NXMapTable *nonMetaClasses(void)
1527 {
1528 runtimeLock.assertLocked();
1529
1530 if (nonmeta_class_map) return nonmeta_class_map;
1531
1532 // nonmeta_class_map is typically small
1533 INIT_ONCE_PTR(nonmeta_class_map,
1534 NXCreateMapTable(NXPtrValueMapPrototype, 32),
1535 NXFreeMapTable(v));
1536
1537 return nonmeta_class_map;
1538 }
1539
1540
1541 /***********************************************************************
1542 * addNonMetaClass
1543 * Adds metacls => cls to the secondary metaclass map
1544 * Locking: runtimeLock must be held by the caller
1545 **********************************************************************/
1546 static void addNonMetaClass(Class cls)
1547 {
1548 runtimeLock.assertLocked();
1549 void *old;
1550 old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
1551
1552 ASSERT(!cls->isMetaClassMaybeUnrealized());
1553 ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
1554 ASSERT(!old);
1555 }
1556
1557
1558 static void removeNonMetaClass(Class cls)
1559 {
1560 runtimeLock.assertLocked();
1561 NXMapRemove(nonMetaClasses(), cls->ISA());
1562 }
1563
1564
1565 static bool scanMangledField(const char *&string, const char *end,
1566 const char *&field, int& length)
1567 {
1568 // Leading zero not allowed.
1569 if (*string == '0') return false;
1570
1571 length = 0;
1572 field = string;
1573 while (field < end) {
1574 char c = *field;
1575 if (!isdigit(c)) break;
1576 field++;
1577 if (__builtin_smul_overflow(length, 10, &length)) return false;
1578 if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
1579 }
1580
1581 string = field + length;
1582 return length > 0 && string <= end;
1583 }
1584
1585
1586 /***********************************************************************
1587 * copySwiftV1DemangledName
1588 * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
1589 * Returns nil if the string doesn't look like a mangled Swift v1 name.
1590 * The result must be freed with free().
1591 **********************************************************************/
1592 static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
1593 {
1594 if (!string) return nil;
1595
1596 // Swift mangling prefix.
1597 if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
1598 string += 4;
1599
1600 const char *end = string + strlen(string);
1601
1602 // Module name.
1603 const char *prefix;
1604 int prefixLength;
1605 if (string[0] == 's') {
1606 // "s" is the Swift module.
1607 prefix = "Swift";
1608 prefixLength = 5;
1609 string += 1;
1610 } else {
1611 if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
1612 }
1613
1614 // Class or protocol name.
1615 const char *suffix;
1616 int suffixLength;
1617 if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
1618
1619 if (isProtocol) {
1620 // Remainder must be "_".
1621 if (strcmp(string, "_") != 0) return nil;
1622 } else {
1623 // Remainder must be empty.
1624 if (string != end) return nil;
1625 }
1626
1627 char *result;
1628 asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
1629 return result;
1630 }
1631
1632
1633 /***********************************************************************
1634 * copySwiftV1MangledName
1635 * Returns the Swift 1.0 mangled form of the given class or protocol name.
1636 * Returns nil if the string doesn't look like an unmangled Swift name.
1637 * The result must be freed with free().
1638 **********************************************************************/
1639 static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
1640 {
1641 if (!string) return nil;
1642
1643 size_t dotCount = 0;
1644 size_t dotIndex;
1645 const char *s;
1646 for (s = string; *s; s++) {
1647 if (*s == '.') {
1648 dotCount++;
1649 dotIndex = s - string;
1650 }
1651 }
1652 size_t stringLength = s - string;
1653
1654 if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
1655 return nil;
1656 }
1657
1658 const char *prefix = string;
1659 size_t prefixLength = dotIndex;
1660 const char *suffix = string + dotIndex + 1;
1661 size_t suffixLength = stringLength - (dotIndex + 1);
1662
1663 char *name;
1664
1665 if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
1666 asprintf(&name, "_Tt%cs%zu%.*s%s",
1667 isProtocol ? 'P' : 'C',
1668 suffixLength, (int)suffixLength, suffix,
1669 isProtocol ? "_" : "");
1670 } else {
1671 asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
1672 isProtocol ? 'P' : 'C',
1673 prefixLength, (int)prefixLength, prefix,
1674 suffixLength, (int)suffixLength, suffix,
1675 isProtocol ? "_" : "");
1676 }
1677 return name;
1678 }
1679
1680
1681 /***********************************************************************
1682 * getClassExceptSomeSwift
1683 * Looks up a class by name. The class MIGHT NOT be realized.
1684 * Demangled Swift names are recognized.
1685 * Classes known to the Swift runtime but not yet used are NOT recognized.
1686 * (such as subclasses of un-instantiated generics)
1687 * Use look_up_class() to find them as well.
1688 * Locking: runtimeLock must be read- or write-locked by the caller.
1689 **********************************************************************/
1690
1691 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1692 // named classes not in the dyld shared cache, whether realized or not.
1693 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1694 uintptr_t objc_debug_realized_class_generation_count;
1695
1696 static Class getClass_impl(const char *name)
1697 {
1698 runtimeLock.assertLocked();
1699
1700 // allocated in _read_images
1701 ASSERT(gdb_objc_realized_classes);
1702
1703 // Try runtime-allocated table
1704 Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
1705 if (result) return result;
1706
1707 // Try table from dyld shared cache.
1708 // Note we do this last to handle the case where we dlopen'ed a shared cache
1709 // dylib with duplicates of classes already present in the main executable.
1710 // In that case, we put the class from the main executable in
1711 // gdb_objc_realized_classes and want to check that before considering any
1712 // newly loaded shared cache binaries.
1713 return getPreoptimizedClass(name);
1714 }
1715
1716 static Class getClassExceptSomeSwift(const char *name)
1717 {
1718 runtimeLock.assertLocked();
1719
1720 // Try name as-is
1721 Class result = getClass_impl(name);
1722 if (result) return result;
1723
1724 // Try Swift-mangled equivalent of the given name.
1725 if (char *swName = copySwiftV1MangledName(name)) {
1726 result = getClass_impl(swName);
1727 free(swName);
1728 return result;
1729 }
1730
1731 return nil;
1732 }
1733
1734
1735 /***********************************************************************
1736 * addNamedClass
1737 * Adds name => cls to the named non-meta class map.
1738 * Warns about duplicate class names and keeps the old mapping.
1739 * Locking: runtimeLock must be held by the caller
1740 **********************************************************************/
1741 static void addNamedClass(Class cls, const char *name, Class replacing = nil)
1742 {
1743 runtimeLock.assertLocked();
1744 Class old;
1745 if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
1746 inform_duplicate(name, old, cls);
1747
1748 // getMaybeUnrealizedNonMetaClass uses name lookups.
1749 // Classes not found by name lookup must be in the
1750 // secondary meta->nonmeta table.
1751 addNonMetaClass(cls);
1752 } else {
1753 NXMapInsert(gdb_objc_realized_classes, name, cls);
1754 }
1755 ASSERT(!(cls->data()->flags & RO_META));
1756
1757 // wrong: constructed classes are already realized when they get here
1758 // ASSERT(!cls->isRealized());
1759 }
1760
1761
1762 /***********************************************************************
1763 * removeNamedClass
1764 * Removes cls from the name => cls map.
1765 * Locking: runtimeLock must be held by the caller
1766 **********************************************************************/
1767 static void removeNamedClass(Class cls, const char *name)
1768 {
1769 runtimeLock.assertLocked();
1770 ASSERT(!(cls->data()->flags & RO_META));
1771 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1772 NXMapRemove(gdb_objc_realized_classes, name);
1773 } else {
1774 // cls has a name collision with another class - don't remove the other
1775 // but do remove cls from the secondary metaclass->class map.
1776 removeNonMetaClass(cls);
1777 }
1778 }
1779
1780
1781 /***********************************************************************
1782 * futureNamedClasses
1783 * Returns the classname => future class map for unrealized future classes.
1784 * Locking: runtimeLock must be held by the caller
1785 **********************************************************************/
1786 static NXMapTable *future_named_class_map = nil;
1787 static NXMapTable *futureNamedClasses()
1788 {
1789 runtimeLock.assertLocked();
1790
1791 if (future_named_class_map) return future_named_class_map;
1792
1793 // future_named_class_map is big enough for CF's classes and a few others
1794 future_named_class_map =
1795 NXCreateMapTable(NXStrValueMapPrototype, 32);
1796
1797 return future_named_class_map;
1798 }
1799
1800
1801 static bool haveFutureNamedClasses() {
1802 return future_named_class_map && NXCountMapTable(future_named_class_map);
1803 }
1804
1805
1806 /***********************************************************************
1807 * addFutureNamedClass
1808 * Installs cls as the class structure to use for the named class if it appears.
1809 * Locking: runtimeLock must be held by the caller
1810 **********************************************************************/
1811 static void addFutureNamedClass(const char *name, Class cls)
1812 {
1813 void *old;
1814
1815 runtimeLock.assertLocked();
1816
1817 if (PrintFuture) {
1818 _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
1819 }
1820
1821 class_rw_t *rw = objc::zalloc<class_rw_t>();
1822 class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
1823 ro->name = strdupIfMutable(name);
1824 rw->set_ro(ro);
1825 cls->setData(rw);
1826 cls->data()->flags = RO_FUTURE;
1827
1828 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1829 ASSERT(!old);
1830 }
1831
1832
1833 /***********************************************************************
1834 * popFutureNamedClass
1835 * Removes the named class from the unrealized future class list,
1836 * because it has been realized.
1837 * Returns nil if the name is not used by a future class.
1838 * Locking: runtimeLock must be held by the caller
1839 **********************************************************************/
1840 static Class popFutureNamedClass(const char *name)
1841 {
1842 runtimeLock.assertLocked();
1843
1844 Class cls = nil;
1845
1846 if (future_named_class_map) {
1847 cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
1848 if (cls && NXCountMapTable(future_named_class_map) == 0) {
1849 NXFreeMapTable(future_named_class_map);
1850 future_named_class_map = nil;
1851 }
1852 }
1853
1854 return cls;
1855 }
1856
1857
1858 /***********************************************************************
1859 * remappedClasses
1860 * Returns the oldClass => newClass map for realized future classes.
1861 * Returns the oldClass => nil map for ignored weak-linked classes.
1862 * Locking: runtimeLock must be read- or write-locked by the caller
1863 **********************************************************************/
1864 static objc::DenseMap<Class, Class> *remappedClasses(bool create)
1865 {
1866 static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
1867
1868 runtimeLock.assertLocked();
1869
1870 // start big enough to hold CF's classes and a few others
1871 return remapped_class_map.get(create, 32);
1872 }
1873
1874
1875 /***********************************************************************
1876 * noClassesRemapped
1877 * Returns YES if no classes have been remapped
1878 * Locking: runtimeLock must be read- or write-locked by the caller
1879 **********************************************************************/
1880 static bool noClassesRemapped(void)
1881 {
1882 runtimeLock.assertLocked();
1883
1884 bool result = (remappedClasses(NO) == nil);
1885 #if DEBUG
1886 // Catch construction of an empty table, which defeats optimization.
1887 auto *map = remappedClasses(NO);
1888 if (map) ASSERT(map->size() > 0);
1889 #endif
1890 return result;
1891 }
1892
1893
1894 /***********************************************************************
1895 * addRemappedClass
1896 * newcls is a realized future class, replacing oldcls.
1897 * OR newcls is nil, replacing ignored weak-linked class oldcls.
1898 * Locking: runtimeLock must be write-locked by the caller
1899 **********************************************************************/
1900 static void addRemappedClass(Class oldcls, Class newcls)
1901 {
1902 runtimeLock.assertLocked();
1903
1904 if (PrintFuture) {
1905 _objc_inform("FUTURE: using %p instead of %p for %s",
1906 (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
1907 }
1908
1909 auto result = remappedClasses(YES)->insert({ oldcls, newcls });
1910 #if DEBUG
1911 if (!std::get<1>(result)) {
1912 // An existing mapping was overwritten. This is not allowed
1913 // unless it was to nil.
1914 auto iterator = std::get<0>(result);
1915 auto value = std::get<1>(*iterator);
1916 ASSERT(value == nil);
1917 }
1918 #else
1919 (void)result;
1920 #endif
1921 }
1922
1923
1924 /***********************************************************************
1925 * remapClass
1926 * Returns the live class pointer for cls, which may be pointing to
1927 * a class struct that has been reallocated.
1928 * Returns nil if cls is ignored because of weak linking.
1929 * Locking: runtimeLock must be read- or write-locked by the caller
1930 **********************************************************************/
1931 static Class remapClass(Class cls)
1932 {
1933 runtimeLock.assertLocked();
1934
1935 if (!cls) return nil;
1936
1937 auto *map = remappedClasses(NO);
1938 if (!map)
1939 return cls;
1940
1941 auto iterator = map->find(cls);
1942 if (iterator == map->end())
1943 return cls;
1944 return std::get<1>(*iterator);
1945 }
1946
1947 static Class remapClass(classref_t cls)
1948 {
1949 return remapClass((Class)cls);
1950 }
1951
1952 Class _class_remap(Class cls)
1953 {
1954 mutex_locker_t lock(runtimeLock);
1955 return remapClass(cls);
1956 }
1957
1958 /***********************************************************************
1959 * remapClassRef
1960 * Fix up a class ref, in case the class referenced has been reallocated
1961 * or is an ignored weak-linked class.
1962 * Locking: runtimeLock must be read- or write-locked by the caller
1963 **********************************************************************/
1964 static void remapClassRef(Class *clsref)
1965 {
1966 runtimeLock.assertLocked();
1967
1968 Class newcls = remapClass(*clsref);
1969 if (*clsref != newcls) *clsref = newcls;
1970 }
1971
1972
1973 _Nullable Class
1974 objc_loadClassref(_Nullable Class * _Nonnull clsref)
1975 {
1976 auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
1977
1978 uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
1979 if (fastpath((cls & 1) == 0))
1980 return (Class)cls;
1981
1982 auto stub = (stub_class_t *)(cls & ~1ULL);
1983 Class initialized = stub->initializer((Class)stub, nil);
1984 atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
1985 return initialized;
1986 }
1987
1988
1989 /***********************************************************************
1990 * getMaybeUnrealizedNonMetaClass
1991 * Return the ordinary class for this class or metaclass.
1992 * `inst` is an instance of `cls` or a subclass thereof, or nil.
1993 * Non-nil inst is faster.
1994 * The result may be unrealized.
1995 * Used by +initialize.
1996 * Locking: runtimeLock must be read- or write-locked by the caller
1997 **********************************************************************/
1998 static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
1999 {
2000 static int total, named, secondary, sharedcache, dyld3;
2001 runtimeLock.assertLocked();
2002 ASSERT(metacls->isRealized());
2003
2004 total++;
2005
2006 // return cls itself if it's already a non-meta class
2007 if (!metacls->isMetaClass()) return metacls;
2008
2009 // metacls really is a metaclass
2010 // which means inst (if any) is a class
2011
2012 // special case for root metaclass
2013 // where inst == inst->ISA() == metacls is possible
2014 if (metacls->ISA() == metacls) {
2015 Class cls = metacls->superclass;
2016 ASSERT(cls->isRealized());
2017 ASSERT(!cls->isMetaClass());
2018 ASSERT(cls->ISA() == metacls);
2019 if (cls->ISA() == metacls) return cls;
2020 }
2021
2022 // use inst if available
2023 if (inst) {
2024 Class cls = remapClass((Class)inst);
2025 // cls may be a subclass - find the real class for metacls
2026 // fixme this probably stops working once Swift starts
2027 // reallocating classes if cls is unrealized.
2028 while (cls) {
2029 if (cls->ISA() == metacls) {
2030 ASSERT(!cls->isMetaClassMaybeUnrealized());
2031 return cls;
2032 }
2033 cls = cls->superclass;
2034 }
2035 #if DEBUG
2036 _objc_fatal("cls is not an instance of metacls");
2037 #else
2038 // release build: be forgiving and fall through to slow lookups
2039 #endif
2040 }
2041
2042 // try name lookup
2043 {
2044 Class cls = getClassExceptSomeSwift(metacls->mangledName());
2045 if (cls && cls->ISA() == metacls) {
2046 named++;
2047 if (PrintInitializing) {
2048 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2049 "successful by-name metaclass lookups",
2050 named, total, named*100.0/total);
2051 }
2052 return cls;
2053 }
2054 }
2055
2056 // try secondary table
2057 {
2058 Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
2059 if (cls) {
2060 secondary++;
2061 if (PrintInitializing) {
2062 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2063 "successful secondary metaclass lookups",
2064 secondary, total, secondary*100.0/total);
2065 }
2066
2067 ASSERT(cls->ISA() == metacls);
2068 return cls;
2069 }
2070 }
2071
2072 // try the dyld closure table
2073 if (isPreoptimized())
2074 {
2075 // Try table from dyld closure first. It was built to ignore the dupes it
2076 // knows will come from the cache, so anything left in here was there when
2077 // we launched
2078 Class cls = nil;
2079 // Note, we have to pass the lambda directly here as otherwise we would try
2080 // message copy and autorelease.
2081 _dyld_for_each_objc_class(metacls->mangledName(),
2082 [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
2083 // Skip images which aren't loaded. This supports the case where dyld
2084 // might soft link an image from the main binary so its possibly not
2085 // loaded yet.
2086 if (!isLoaded)
2087 return;
2088
2089 // Found a loaded image with this class name, so check if its the right one
2090 Class result = (Class)classPtr;
2091 if (result->ISA() == metacls) {
2092 cls = result;
2093 *stop = true;
2094 }
2095 });
2096
2097 if (cls) {
2098 dyld3++;
2099 if (PrintInitializing) {
2100 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2101 "successful dyld closure metaclass lookups",
2102 dyld3, total, dyld3*100.0/total);
2103 }
2104
2105 return cls;
2106 }
2107 }
2108
2109 // try any duplicates in the dyld shared cache
2110 {
2111 Class cls = nil;
2112
2113 int count;
2114 Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
2115 if (classes) {
2116 for (int i = 0; i < count; i++) {
2117 if (classes[i]->ISA() == metacls) {
2118 cls = classes[i];
2119 break;
2120 }
2121 }
2122 free(classes);
2123 }
2124
2125 if (cls) {
2126 sharedcache++;
2127 if (PrintInitializing) {
2128 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2129 "successful shared cache metaclass lookups",
2130 sharedcache, total, sharedcache*100.0/total);
2131 }
2132
2133 return cls;
2134 }
2135 }
2136
2137 _objc_fatal("no class for metaclass %p", (void*)metacls);
2138 }
2139
2140
2141 /***********************************************************************
2142 * class_initialize. Send the '+initialize' message on demand to any
2143 * uninitialized class. Force initialization of superclasses first.
2144 * inst is an instance of cls, or nil. Non-nil is better for performance.
2145 * Returns the class pointer. If the class was unrealized then
2146 * it may be reallocated.
2147 * Locking:
2148 * runtimeLock must be held by the caller
2149 * This function may drop the lock.
2150 * On exit the lock is re-acquired or dropped as requested by leaveLocked.
2151 **********************************************************************/
2152 static Class initializeAndMaybeRelock(Class cls, id inst,
2153 mutex_t& lock, bool leaveLocked)
2154 {
2155 lock.assertLocked();
2156 ASSERT(cls->isRealized());
2157
2158 if (cls->isInitialized()) {
2159 if (!leaveLocked) lock.unlock();
2160 return cls;
2161 }
2162
2163 // Find the non-meta class for cls, if it is not already one.
2164 // The +initialize message is sent to the non-meta class object.
2165 Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
2166
2167 // Realize the non-meta class if necessary.
2168 if (nonmeta->isRealized()) {
2169 // nonmeta is cls, which was already realized
2170 // OR nonmeta is distinct, but is already realized
2171 // - nothing else to do
2172 lock.unlock();
2173 } else {
2174 nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
2175 // runtimeLock is now unlocked
2176 // fixme Swift can't relocate the class today,
2177 // but someday it will:
2178 cls = object_getClass(nonmeta);
2179 }
2180
2181 // runtimeLock is now unlocked, for +initialize dispatch
2182 ASSERT(nonmeta->isRealized());
2183 initializeNonMetaClass(nonmeta);
2184
2185 if (leaveLocked) runtimeLock.lock();
2186 return cls;
2187 }
2188
2189 // Locking: acquires runtimeLock
2190 Class class_initialize(Class cls, id obj)
2191 {
2192 runtimeLock.lock();
2193 return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
2194 }
2195
2196 // Locking: caller must hold runtimeLock; this may drop and re-acquire it
2197 static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
2198 {
2199 return initializeAndMaybeRelock(cls, obj, lock, true);
2200 }
2201
2202
2203 /***********************************************************************
2204 * addRootClass
2205 * Adds cls as a new realized root class.
2206 * Locking: runtimeLock must be held by the caller.
2207 **********************************************************************/
2208 static void addRootClass(Class cls)
2209 {
2210 runtimeLock.assertLocked();
2211
2212 ASSERT(cls->isRealized());
2213
2214 objc_debug_realized_class_generation_count++;
2215
2216 cls->data()->nextSiblingClass = _firstRealizedClass;
2217 _firstRealizedClass = cls;
2218 }
2219
2220 static void removeRootClass(Class cls)
2221 {
2222 runtimeLock.assertLocked();
2223
2224 objc_debug_realized_class_generation_count++;
2225
2226 Class *classp;
2227 for (classp = &_firstRealizedClass;
2228 *classp != cls;
2229 classp = &(*classp)->data()->nextSiblingClass)
2230 { }
2231
2232 *classp = (*classp)->data()->nextSiblingClass;
2233 }
2234
2235
2236 /***********************************************************************
2237 * addSubclass
2238 * Adds subcls as a subclass of supercls.
2239 * Locking: runtimeLock must be held by the caller.
2240 **********************************************************************/
2241 static void addSubclass(Class supercls, Class subcls)
2242 {
2243 runtimeLock.assertLocked();
2244
2245 if (supercls && subcls) {
2246 ASSERT(supercls->isRealized());
2247 ASSERT(subcls->isRealized());
2248
2249 objc_debug_realized_class_generation_count++;
2250
2251 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2252 supercls->data()->firstSubclass = subcls;
2253
2254 if (supercls->hasCxxCtor()) {
2255 subcls->setHasCxxCtor();
2256 }
2257
2258 if (supercls->hasCxxDtor()) {
2259 subcls->setHasCxxDtor();
2260 }
2261
2262 objc::AWZScanner::scanAddedSubClass(subcls, supercls);
2263 objc::RRScanner::scanAddedSubClass(subcls, supercls);
2264 objc::CoreScanner::scanAddedSubClass(subcls, supercls);
2265
2266 // Special case: instancesRequireRawIsa does not propagate
2267 // from root class to root metaclass
2268 if (supercls->instancesRequireRawIsa() && supercls->superclass) {
2269 subcls->setInstancesRequireRawIsaRecursively(true);
2270 }
2271 }
2272 }
2273
2274
2275 /***********************************************************************
2276 * removeSubclass
2277 * Removes subcls as a subclass of supercls.
2278 * Locking: runtimeLock must be held by the caller.
2279 **********************************************************************/
2280 static void removeSubclass(Class supercls, Class subcls)
2281 {
2282 runtimeLock.assertLocked();
2283 ASSERT(supercls->isRealized());
2284 ASSERT(subcls->isRealized());
2285 ASSERT(subcls->superclass == supercls);
2286
2287 objc_debug_realized_class_generation_count++;
2288
2289 Class *cp;
2290 for (cp = &supercls->data()->firstSubclass;
2291 *cp && *cp != subcls;
2292 cp = &(*cp)->data()->nextSiblingClass)
2293 ;
2294 ASSERT(*cp == subcls);
2295 *cp = subcls->data()->nextSiblingClass;
2296 }
2297
2298
2299
2300 /***********************************************************************
2301 * protocols
2302 * Returns the protocol name => protocol map for protocols.
2303 * Locking: runtimeLock must read- or write-locked by the caller
2304 **********************************************************************/
2305 static NXMapTable *protocols(void)
2306 {
2307 static NXMapTable *protocol_map = nil;
2308
2309 runtimeLock.assertLocked();
2310
2311 INIT_ONCE_PTR(protocol_map,
2312 NXCreateMapTable(NXStrValueMapPrototype, 16),
2313 NXFreeMapTable(v) );
2314
2315 return protocol_map;
2316 }
2317
2318
2319 /***********************************************************************
2320 * getProtocol
2321 * Looks up a protocol by name. Demangled Swift names are recognized.
2322 * Locking: runtimeLock must be read- or write-locked by the caller.
2323 **********************************************************************/
2324 static NEVER_INLINE Protocol *getProtocol(const char *name)
2325 {
2326 runtimeLock.assertLocked();
2327
2328 // Try name as-is.
2329 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
2330 if (result) return result;
2331
2332 // Try Swift-mangled equivalent of the given name.
2333 if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
2334 result = (Protocol *)NXMapGet(protocols(), swName);
2335 free(swName);
2336 if (result) return result;
2337 }
2338
2339 // Try table from dyld3 closure and dyld shared cache
2340 return getPreoptimizedProtocol(name);
2341 }
2342
2343
2344 /***********************************************************************
2345 * remapProtocol
2346 * Returns the live protocol pointer for proto, which may be pointing to
2347 * a protocol struct that has been reallocated.
2348 * Locking: runtimeLock must be read- or write-locked by the caller
2349 **********************************************************************/
2350 static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
2351 {
2352 runtimeLock.assertLocked();
2353
2354 // Protocols in shared cache images have a canonical bit to mark that they
2355 // are the definition we should use
2356 if (((protocol_t *)proto)->isCanonical())
2357 return (protocol_t *)proto;
2358
2359 protocol_t *newproto = (protocol_t *)
2360 getProtocol(((protocol_t *)proto)->mangledName);
2361 return newproto ? newproto : (protocol_t *)proto;
2362 }
2363
2364
2365 /***********************************************************************
2366 * remapProtocolRef
2367 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2368 * Locking: runtimeLock must be read- or write-locked by the caller
2369 **********************************************************************/
2370 static size_t UnfixedProtocolReferences;
2371 static void remapProtocolRef(protocol_t **protoref)
2372 {
2373 runtimeLock.assertLocked();
2374
2375 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2376 if (*protoref != newproto) {
2377 *protoref = newproto;
2378 UnfixedProtocolReferences++;
2379 }
2380 }
2381
2382
2383 /***********************************************************************
2384 * moveIvars
2385 * Slides a class's ivars to accommodate the given superclass size.
2386 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2387 * Locking: runtimeLock must be held by the caller.
2388 **********************************************************************/
2389 static void moveIvars(class_ro_t *ro, uint32_t superSize)
2390 {
2391 runtimeLock.assertLocked();
2392
2393 uint32_t diff;
2394
2395 ASSERT(superSize > ro->instanceStart);
2396 diff = superSize - ro->instanceStart;
2397
2398 if (ro->ivars) {
2399 // Find maximum alignment in this class's ivars
2400 uint32_t maxAlignment = 1;
2401 for (const auto& ivar : *ro->ivars) {
2402 if (!ivar.offset) continue; // anonymous bitfield
2403
2404 uint32_t alignment = ivar.alignment();
2405 if (alignment > maxAlignment) maxAlignment = alignment;
2406 }
2407
2408 // Compute a slide value that preserves that alignment
2409 uint32_t alignMask = maxAlignment - 1;
2410 diff = (diff + alignMask) & ~alignMask;
2411
2412 // Slide all of this class's ivars en masse
2413 for (const auto& ivar : *ro->ivars) {
2414 if (!ivar.offset) continue; // anonymous bitfield
2415
2416 uint32_t oldOffset = (uint32_t)*ivar.offset;
2417 uint32_t newOffset = oldOffset + diff;
2418 *ivar.offset = newOffset;
2419
2420 if (PrintIvars) {
2421 _objc_inform("IVARS: offset %u -> %u for %s "
2422 "(size %u, align %u)",
2423 oldOffset, newOffset, ivar.name,
2424 ivar.size, ivar.alignment());
2425 }
2426 }
2427 }
2428
2429 *(uint32_t *)&ro->instanceStart += diff;
2430 *(uint32_t *)&ro->instanceSize += diff;
2431 }
2432
2433
2434 static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
2435 {
2436 class_rw_t *rw = cls->data();
2437
2438 ASSERT(supercls);
2439 ASSERT(!cls->isMetaClass());
2440
2441 /* debug: print them all before sliding
2442 if (ro->ivars) {
2443 for (const auto& ivar : *ro->ivars) {
2444 if (!ivar.offset) continue; // anonymous bitfield
2445
2446 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2447 ro->name, ivar.name,
2448 *ivar.offset, ivar.size, ivar.alignment());
2449 }
2450 }
2451 */
2452
2453 // Non-fragile ivars - reconcile this class with its superclass
2454 const class_ro_t *super_ro = supercls->data()->ro();
2455
2456 if (DebugNonFragileIvars) {
2457 // Debugging: Force non-fragile ivars to slide.
2458 // Intended to find compiler, runtime, and program bugs.
2459 // If it fails with this and works without, you have a problem.
2460
2461 // Operation: Reset everything to 0 + misalignment.
2462 // Then force the normal sliding logic to push everything back.
2463
2464 // Exceptions: root classes, metaclasses, *NSCF* classes,
2465 // __CF* classes, NSConstantString, NSSimpleCString
2466
2467 // (already know it's not root because supercls != nil)
2468 const char *clsname = cls->mangledName();
2469 if (!strstr(clsname, "NSCF") &&
2470 0 != strncmp(clsname, "__CF", 4) &&
2471 0 != strcmp(clsname, "NSConstantString") &&
2472 0 != strcmp(clsname, "NSSimpleCString"))
2473 {
2474 uint32_t oldStart = ro->instanceStart;
2475 class_ro_t *ro_w = make_ro_writeable(rw);
2476 ro = rw->ro();
2477
2478 // Find max ivar alignment in class.
2479 // default to word size to simplify ivar update
2480 uint32_t alignment = 1<<WORD_SHIFT;
2481 if (ro->ivars) {
2482 for (const auto& ivar : *ro->ivars) {
2483 if (ivar.alignment() > alignment) {
2484 alignment = ivar.alignment();
2485 }
2486 }
2487 }
2488 uint32_t misalignment = ro->instanceStart % alignment;
2489 uint32_t delta = ro->instanceStart - misalignment;
2490 ro_w->instanceStart = misalignment;
2491 ro_w->instanceSize -= delta;
2492
2493 if (PrintIvars) {
2494 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2495 "to slide (instanceStart %zu -> %zu)",
2496 cls->nameForLogging(), (size_t)oldStart,
2497 (size_t)ro->instanceStart);
2498 }
2499
2500 if (ro->ivars) {
2501 for (const auto& ivar : *ro->ivars) {
2502 if (!ivar.offset) continue; // anonymous bitfield
2503 *ivar.offset -= delta;
2504 }
2505 }
2506 }
2507 }
2508
2509 if (ro->instanceStart >= super_ro->instanceSize) {
2510 // Superclass has not overgrown its space. We're done here.
2511 return;
2512 }
2513 // fixme can optimize for "class has no new ivars", etc
2514
2515 if (ro->instanceStart < super_ro->instanceSize) {
2516 // Superclass has changed size. This class's ivars must move.
2517 // Also slide layout bits in parallel.
2518 // This code is incapable of compacting the subclass to
2519 // compensate for a superclass that shrunk, so don't do that.
2520 if (PrintIvars) {
2521 _objc_inform("IVARS: sliding ivars for class %s "
2522 "(superclass was %u bytes, now %u)",
2523 cls->nameForLogging(), ro->instanceStart,
2524 super_ro->instanceSize);
2525 }
2526 class_ro_t *ro_w = make_ro_writeable(rw);
2527 ro = rw->ro();
2528 moveIvars(ro_w, super_ro->instanceSize);
2529 gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2530 }
2531 }
2532
2533
2534 /***********************************************************************
2535 * realizeClassWithoutSwift
2536 * Performs first-time initialization on class cls,
2537 * including allocating its read-write data.
2538 * Does not perform any Swift-side initialization.
2539 * Returns the real class structure for the class.
2540 * Locking: runtimeLock must be write-locked by the caller
2541 **********************************************************************/
2542 static Class realizeClassWithoutSwift(Class cls, Class previously)
2543 {
2544 runtimeLock.assertLocked();
2545
2546 class_rw_t *rw;
2547 Class supercls;
2548 Class metacls;
2549
2550 if (!cls) return nil;
2551 if (cls->isRealized()) return cls;
2552 ASSERT(cls == remapClass(cls));
2553
2554 // fixme verify class is not in an un-dlopened part of the shared cache?
2555
2556 auto ro = (const class_ro_t *)cls->data();
2557 auto isMeta = ro->flags & RO_META;
2558 if (ro->flags & RO_FUTURE) {
2559 // This was a future class. rw data is already allocated.
2560 rw = cls->data();
2561 ro = cls->data()->ro();
2562 ASSERT(!isMeta);
2563 cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
2564 } else {
2565 // Normal class. Allocate writeable class data.
2566 rw = objc::zalloc<class_rw_t>();
2567 rw->set_ro(ro);
2568 rw->flags = RW_REALIZED|RW_REALIZING|isMeta;
2569 cls->setData(rw);
2570 }
2571
2572 #if FAST_CACHE_META
2573 if (isMeta) cls->cache.setBit(FAST_CACHE_META);
2574 #endif
2575
2576 // Choose an index for this class.
2577 // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
2578 cls->chooseClassArrayIndex();
2579
2580 if (PrintConnecting) {
2581 _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
2582 cls->nameForLogging(), isMeta ? " (meta)" : "",
2583 (void*)cls, ro, cls->classArrayIndex(),
2584 cls->isSwiftStable() ? "(swift)" : "",
2585 cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
2586 }
2587
2588 // Realize superclass and metaclass, if they aren't already.
2589 // This needs to be done after RW_REALIZED is set above, for root classes.
2590 // This needs to be done after class index is chosen, for root metaclasses.
2591 // This assumes that none of those classes have Swift contents,
2592 // or that Swift's initializers have already been called.
2593 // fixme that assumption will be wrong if we add support
2594 // for ObjC subclasses of Swift classes.
2595 supercls = realizeClassWithoutSwift(remapClass(cls->superclass), nil);
2596 metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
2597
2598 #if SUPPORT_NONPOINTER_ISA
2599 if (isMeta) {
2600 // Metaclasses do not need any features from non pointer ISA
2601 // This allows for a faspath for classes in objc_retain/objc_release.
2602 cls->setInstancesRequireRawIsa();
2603 } else {
2604 // Disable non-pointer isa for some classes and/or platforms.
2605 // Set instancesRequireRawIsa.
2606 bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
2607 bool rawIsaIsInherited = false;
2608 static bool hackedDispatch = false;
2609
2610 if (DisableNonpointerIsa) {
2611 // Non-pointer isa disabled by environment or app SDK version
2612 instancesRequireRawIsa = true;
2613 }
2614 else if (!hackedDispatch && 0 == strcmp(ro->name, "OS_object"))
2615 {
2616 // hack for libdispatch et al - isa also acts as vtable pointer
2617 hackedDispatch = true;
2618 instancesRequireRawIsa = true;
2619 }
2620 else if (supercls && supercls->superclass &&
2621 supercls->instancesRequireRawIsa())
2622 {
2623 // This is also propagated by addSubclass()
2624 // but nonpointer isa setup needs it earlier.
2625 // Special case: instancesRequireRawIsa does not propagate
2626 // from root class to root metaclass
2627 instancesRequireRawIsa = true;
2628 rawIsaIsInherited = true;
2629 }
2630
2631 if (instancesRequireRawIsa) {
2632 cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
2633 }
2634 }
2635 // SUPPORT_NONPOINTER_ISA
2636 #endif
2637
2638 // Update superclass and metaclass in case of remapping
2639 cls->superclass = supercls;
2640 cls->initClassIsa(metacls);
2641
2642 // Reconcile instance variable offsets / layout.
2643 // This may reallocate class_ro_t, updating our ro variable.
2644 if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
2645
2646 // Set fastInstanceSize if it wasn't set already.
2647 cls->setInstanceSize(ro->instanceSize);
2648
2649 // Copy some flags from ro to rw
2650 if (ro->flags & RO_HAS_CXX_STRUCTORS) {
2651 cls->setHasCxxDtor();
2652 if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
2653 cls->setHasCxxCtor();
2654 }
2655 }
2656
2657 // Propagate the associated objects forbidden flag from ro or from
2658 // the superclass.
2659 if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
2660 (supercls && supercls->forbidsAssociatedObjects()))
2661 {
2662 rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
2663 }
2664
2665 // Connect this class to its superclass's subclass lists
2666 if (supercls) {
2667 addSubclass(supercls, cls);
2668 } else {
2669 addRootClass(cls);
2670 }
2671
2672 // Attach categories
2673 methodizeClass(cls, previously);
2674
2675 return cls;
2676 }
2677
2678
2679 /***********************************************************************
2680 * _objc_realizeClassFromSwift
2681 * Called by Swift when it needs the ObjC part of a class to be realized.
2682 * There are four cases:
2683 * 1. cls != nil; previously == cls
2684 * Class cls is being realized in place
2685 * 2. cls != nil; previously == nil
2686 * Class cls is being constructed at runtime
2687 * 3. cls != nil; previously != cls
2688 * The class that was at previously has been reallocated to cls
2689 * 4. cls == nil, previously != nil
2690 * The class at previously is hereby disavowed
2691 *
2692 * Only variants #1 and #2 are supported today.
2693 *
2694 * Locking: acquires runtimeLock
2695 **********************************************************************/
2696 Class _objc_realizeClassFromSwift(Class cls, void *previously)
2697 {
2698 if (cls) {
2699 if (previously && previously != (void*)cls) {
2700 // #3: relocation
2701 mutex_locker_t lock(runtimeLock);
2702 addRemappedClass((Class)previously, cls);
2703 addClassTableEntry(cls);
2704 addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
2705 return realizeClassWithoutSwift(cls, (Class)previously);
2706 } else {
2707 // #1 and #2: realization in place, or new class
2708 mutex_locker_t lock(runtimeLock);
2709
2710 if (!previously) {
2711 // #2: new class
2712 cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
2713 }
2714
2715 // #1 and #2: realization in place, or new class
2716 // We ignore the Swift metadata initializer callback.
2717 // We assume that's all handled since we're being called from Swift.
2718 return realizeClassWithoutSwift(cls, nil);
2719 }
2720 }
2721 else {
2722 // #4: disavowal
2723 // In the future this will mean remapping the old address to nil
2724 // and if necessary removing the old address from any other tables.
2725 _objc_fatal("Swift requested that class %p be ignored, "
2726 "but libobjc does not support that.", previously);
2727 }
2728 }
2729
2730 /***********************************************************************
2731 * realizeSwiftClass
2732 * Performs first-time initialization on class cls,
2733 * including allocating its read-write data,
2734 * and any Swift-side initialization.
2735 * Returns the real class structure for the class.
2736 * Locking: acquires runtimeLock indirectly
2737 **********************************************************************/
2738 static Class realizeSwiftClass(Class cls)
2739 {
2740 runtimeLock.assertUnlocked();
2741
2742 // Some assumptions:
2743 // * Metaclasses never have a Swift initializer.
2744 // * Root classes never have a Swift initializer.
2745 // (These two together avoid initialization order problems at the root.)
2746 // * Unrealized non-Swift classes have no Swift ancestry.
2747 // * Unrealized Swift classes with no initializer have no ancestry that
2748 // does have the initializer.
2749 // (These two together mean we don't need to scan superclasses here
2750 // and we don't need to worry about Swift superclasses inside
2751 // realizeClassWithoutSwift()).
2752
2753 // fixme some of these assumptions will be wrong
2754 // if we add support for ObjC sublasses of Swift classes.
2755
2756 #if DEBUG
2757 runtimeLock.lock();
2758 ASSERT(remapClass(cls) == cls);
2759 ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
2760 ASSERT(!cls->isMetaClassMaybeUnrealized());
2761 ASSERT(cls->superclass);
2762 runtimeLock.unlock();
2763 #endif
2764
2765 // Look for a Swift metadata initialization function
2766 // installed on the class. If it is present we call it.
2767 // That function in turn initializes the Swift metadata,
2768 // prepares the "compiler-generated" ObjC metadata if not
2769 // already present, and calls _objc_realizeSwiftClass() to finish
2770 // our own initialization.
2771
2772 if (auto init = cls->swiftMetadataInitializer()) {
2773 if (PrintConnecting) {
2774 _objc_inform("CLASS: calling Swift metadata initializer "
2775 "for class '%s' (%p)", cls->nameForLogging(), cls);
2776 }
2777
2778 Class newcls = init(cls, nil);
2779
2780 // fixme someday Swift will need to relocate classes at this point,
2781 // but we don't accept that yet.
2782 if (cls != newcls) {
2783 mutex_locker_t lock(runtimeLock);
2784 addRemappedClass(cls, newcls);
2785 }
2786
2787 return newcls;
2788 }
2789 else {
2790 // No Swift-side initialization callback.
2791 // Perform our own realization directly.
2792 mutex_locker_t lock(runtimeLock);
2793 return realizeClassWithoutSwift(cls, nil);
2794 }
2795 }
2796
2797
2798 /***********************************************************************
2799 * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
2800 * Realize a class that might be a Swift class.
2801 * Returns the real class structure for the class.
2802 * Locking:
2803 * runtimeLock must be held on entry
2804 * runtimeLock may be dropped during execution
2805 * ...AndUnlock function leaves runtimeLock unlocked on exit
2806 * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
2807 * This complication avoids repeated lock transitions in some cases.
2808 **********************************************************************/
2809 static Class
2810 realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
2811 {
2812 lock.assertLocked();
2813
2814 if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
2815 // Non-Swift class. Realize it now with the lock still held.
2816 // fixme wrong in the future for objc subclasses of swift classes
2817 realizeClassWithoutSwift(cls, nil);
2818 if (!leaveLocked) lock.unlock();
2819 } else {
2820 // Swift class. We need to drop locks and call the Swift
2821 // runtime to initialize it.
2822 lock.unlock();
2823 cls = realizeSwiftClass(cls);
2824 ASSERT(cls->isRealized()); // callback must have provoked realization
2825 if (leaveLocked) lock.lock();
2826 }
2827
2828 return cls;
2829 }
2830
2831 static Class
2832 realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
2833 {
2834 return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
2835 }
2836
2837 static Class
2838 realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
2839 {
2840 return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
2841 }
2842
2843
2844 /***********************************************************************
2845 * missingWeakSuperclass
2846 * Return YES if some superclass of cls was weak-linked and is missing.
2847 **********************************************************************/
2848 static bool
2849 missingWeakSuperclass(Class cls)
2850 {
2851 ASSERT(!cls->isRealized());
2852
2853 if (!cls->superclass) {
2854 // superclass nil. This is normal for root classes only.
2855 return (!(cls->data()->flags & RO_ROOT));
2856 } else {
2857 // superclass not nil. Check if a higher superclass is missing.
2858 Class supercls = remapClass(cls->superclass);
2859 ASSERT(cls != cls->superclass);
2860 ASSERT(cls != supercls);
2861 if (!supercls) return YES;
2862 if (supercls->isRealized()) return NO;
2863 return missingWeakSuperclass(supercls);
2864 }
2865 }
2866
2867
2868 /***********************************************************************
2869 * realizeAllClassesInImage
2870 * Non-lazily realizes all unrealized classes in the given image.
2871 * Locking: runtimeLock must be held by the caller.
2872 * Locking: this function may drop and re-acquire the lock.
2873 **********************************************************************/
2874 static void realizeAllClassesInImage(header_info *hi)
2875 {
2876 runtimeLock.assertLocked();
2877
2878 size_t count, i;
2879 classref_t const *classlist;
2880
2881 if (hi->areAllClassesRealized()) return;
2882
2883 classlist = _getObjc2ClassList(hi, &count);
2884
2885 for (i = 0; i < count; i++) {
2886 Class cls = remapClass(classlist[i]);
2887 if (cls) {
2888 realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
2889 }
2890 }
2891
2892 hi->setAllClassesRealized(YES);
2893 }
2894
2895
2896 /***********************************************************************
2897 * realizeAllClasses
2898 * Non-lazily realizes all unrealized classes in all known images.
2899 * Locking: runtimeLock must be held by the caller.
2900 * Locking: this function may drop and re-acquire the lock.
2901 * Dropping the lock makes this function thread-unsafe with respect
2902 * to concurrent image unload, but the callers of this function
2903 * already ultimately do something that is also thread-unsafe with
2904 * respect to image unload (such as using the list of all classes).
2905 **********************************************************************/
2906 static void realizeAllClasses(void)
2907 {
2908 runtimeLock.assertLocked();
2909
2910 header_info *hi;
2911 for (hi = FirstHeader; hi; hi = hi->getNext()) {
2912 realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
2913 }
2914 }
2915
2916
2917 /***********************************************************************
2918 * _objc_allocateFutureClass
2919 * Allocate an unresolved future class for the given class name.
2920 * Returns any existing allocation if one was already made.
2921 * Assumes the named class doesn't exist yet.
2922 * Locking: acquires runtimeLock
2923 **********************************************************************/
2924 Class _objc_allocateFutureClass(const char *name)
2925 {
2926 mutex_locker_t lock(runtimeLock);
2927
2928 Class cls;
2929 NXMapTable *map = futureNamedClasses();
2930
2931 if ((cls = (Class)NXMapGet(map, name))) {
2932 // Already have a future class for this name.
2933 return cls;
2934 }
2935
2936 cls = _calloc_class(sizeof(objc_class));
2937 addFutureNamedClass(name, cls);
2938
2939 return cls;
2940 }
2941
2942
2943 /***********************************************************************
2944 * objc_getFutureClass. Return the id of the named class.
2945 * If the class does not exist, return an uninitialized class
2946 * structure that will be used for the class when and if it
2947 * does get loaded.
2948 * Not thread safe.
2949 **********************************************************************/
2950 Class objc_getFutureClass(const char *name)
2951 {
2952 Class cls;
2953
2954 // YES unconnected, NO class handler
2955 // (unconnected is OK because it will someday be the real class)
2956 cls = look_up_class(name, YES, NO);
2957 if (cls) {
2958 if (PrintFuture) {
2959 _objc_inform("FUTURE: found %p already in use for %s",
2960 (void*)cls, name);
2961 }
2962
2963 return cls;
2964 }
2965
2966 // No class or future class with that name yet. Make one.
2967 // fixme not thread-safe with respect to
2968 // simultaneous library load or getFutureClass.
2969 return _objc_allocateFutureClass(name);
2970 }
2971
2972
2973 BOOL _class_isFutureClass(Class cls)
2974 {
2975 return cls && cls->isFuture();
2976 }
2977
2978
2979 /***********************************************************************
2980 * _objc_flush_caches
2981 * Flushes all caches.
2982 * (Historical behavior: flush caches for cls, its metaclass,
2983 * and subclasses thereof. Nil flushes all classes.)
2984 * Locking: acquires runtimeLock
2985 **********************************************************************/
2986 static void flushCaches(Class cls)
2987 {
2988 runtimeLock.assertLocked();
2989 #if CONFIG_USE_CACHE_LOCK
2990 mutex_locker_t lock(cacheUpdateLock);
2991 #endif
2992
2993 if (cls) {
2994 foreach_realized_class_and_subclass(cls, [](Class c){
2995 cache_erase_nolock(c);
2996 return true;
2997 });
2998 }
2999 else {
3000 foreach_realized_class_and_metaclass([](Class c){
3001 cache_erase_nolock(c);
3002 return true;
3003 });
3004 }
3005 }
3006
3007
3008 void _objc_flush_caches(Class cls)
3009 {
3010 {
3011 mutex_locker_t lock(runtimeLock);
3012 flushCaches(cls);
3013 if (cls && cls->superclass && cls != cls->getIsa()) {
3014 flushCaches(cls->getIsa());
3015 } else {
3016 // cls is a root class or root metaclass. Its metaclass is itself
3017 // or a subclass so the metaclass caches were already flushed.
3018 }
3019 }
3020
3021 if (!cls) {
3022 // collectALot if cls==nil
3023 #if CONFIG_USE_CACHE_LOCK
3024 mutex_locker_t lock(cacheUpdateLock);
3025 #else
3026 mutex_locker_t lock(runtimeLock);
3027 #endif
3028 cache_collect(true);
3029 }
3030 }
3031
3032
3033 /***********************************************************************
3034 * map_images
3035 * Process the given images which are being mapped in by dyld.
3036 * Calls ABI-agnostic code after taking ABI-specific locks.
3037 *
3038 * Locking: write-locks runtimeLock
3039 **********************************************************************/
3040 void
3041 map_images(unsigned count, const char * const paths[],
3042 const struct mach_header * const mhdrs[])
3043 {
3044 mutex_locker_t lock(runtimeLock);
3045 return map_images_nolock(count, paths, mhdrs);
3046 }
3047
3048
3049 static void load_categories_nolock(header_info *hi) {
3050 bool hasClassProperties = hi->info()->hasCategoryClassProperties();
3051
3052 size_t count;
3053 auto processCatlist = [&](category_t * const *catlist) {
3054 for (unsigned i = 0; i < count; i++) {
3055 category_t *cat = catlist[i];
3056 Class cls = remapClass(cat->cls);
3057 locstamped_category_t lc{cat, hi};
3058
3059 if (!cls) {
3060 // Category's target class is missing (probably weak-linked).
3061 // Ignore the category.
3062 if (PrintConnecting) {
3063 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3064 "missing weak-linked target class",
3065 cat->name, cat);
3066 }
3067 continue;
3068 }
3069
3070 // Process this category.
3071 if (cls->isStubClass()) {
3072 // Stub classes are never realized. Stub classes
3073 // don't know their metaclass until they're
3074 // initialized, so we have to add categories with
3075 // class methods or properties to the stub itself.
3076 // methodizeClass() will find them and add them to
3077 // the metaclass as appropriate.
3078 if (cat->instanceMethods ||
3079 cat->protocols ||
3080 cat->instanceProperties ||
3081 cat->classMethods ||
3082 cat->protocols ||
3083 (hasClassProperties && cat->_classProperties))
3084 {
3085 objc::unattachedCategories.addForClass(lc, cls);
3086 }
3087 } else {
3088 // First, register the category with its target class.
3089 // Then, rebuild the class's method lists (etc) if
3090 // the class is realized.
3091 if (cat->instanceMethods || cat->protocols
3092 || cat->instanceProperties)
3093 {
3094 if (cls->isRealized()) {
3095 attachCategories(cls, &lc, 1, ATTACH_EXISTING);
3096 } else {
3097 objc::unattachedCategories.addForClass(lc, cls);
3098 }
3099 }
3100
3101 if (cat->classMethods || cat->protocols
3102 || (hasClassProperties && cat->_classProperties))
3103 {
3104 if (cls->ISA()->isRealized()) {
3105 attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
3106 } else {
3107 objc::unattachedCategories.addForClass(lc, cls->ISA());
3108 }
3109 }
3110 }
3111 }
3112 };
3113
3114 processCatlist(hi->catlist(&count));
3115 processCatlist(hi->catlist2(&count));
3116 }
3117
3118 static void loadAllCategories() {
3119 mutex_locker_t lock(runtimeLock);
3120
3121 for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
3122 load_categories_nolock(hi);
3123 }
3124 }
3125
3126 /***********************************************************************
3127 * load_images
3128 * Process +load in the given images which are being mapped in by dyld.
3129 *
3130 * Locking: write-locks runtimeLock and loadMethodLock
3131 **********************************************************************/
3132 extern bool hasLoadMethods(const headerType *mhdr);
3133 extern void prepare_load_methods(const headerType *mhdr);
3134
3135 void
3136 load_images(const char *path __unused, const struct mach_header *mh)
3137 {
3138 if (!didInitialAttachCategories && didCallDyldNotifyRegister) {
3139 didInitialAttachCategories = true;
3140 loadAllCategories();
3141 }
3142
3143 // Return without taking locks if there are no +load methods here.
3144 if (!hasLoadMethods((const headerType *)mh)) return;
3145
3146 recursive_mutex_locker_t lock(loadMethodLock);
3147
3148 // Discover load methods
3149 {
3150 mutex_locker_t lock2(runtimeLock);
3151 prepare_load_methods((const headerType *)mh);
3152 }
3153
3154 // Call +load methods (without runtimeLock - re-entrant)
3155 call_load_methods();
3156 }
3157
3158
3159 /***********************************************************************
3160 * unmap_image
3161 * Process the given image which is about to be unmapped by dyld.
3162 *
3163 * Locking: write-locks runtimeLock and loadMethodLock
3164 **********************************************************************/
3165 void
3166 unmap_image(const char *path __unused, const struct mach_header *mh)
3167 {
3168 recursive_mutex_locker_t lock(loadMethodLock);
3169 mutex_locker_t lock2(runtimeLock);
3170 unmap_image_nolock(mh);
3171 }
3172
3173
3174 /***********************************************************************
3175 * mustReadClasses
3176 * Preflight check in advance of readClass() from an image.
3177 **********************************************************************/
3178 bool mustReadClasses(header_info *hi, bool hasDyldRoots)
3179 {
3180 const char *reason;
3181
3182 // If the image is not preoptimized then we must read classes.
3183 if (!hi->hasPreoptimizedClasses()) {
3184 reason = nil; // Don't log this one because it is noisy.
3185 goto readthem;
3186 }
3187
3188 // If iOS simulator then we must read classes.
3189 #if TARGET_OS_SIMULATOR
3190 reason = "the image is for iOS simulator";
3191 goto readthem;
3192 #endif
3193
3194 ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
3195
3196 // If the image may have missing weak superclasses then we must read classes
3197 if (!noMissingWeakSuperclasses() || hasDyldRoots) {
3198 reason = "the image may contain classes with missing weak superclasses";
3199 goto readthem;
3200 }
3201
3202 // If there are unresolved future classes then we must read classes.
3203 if (haveFutureNamedClasses()) {
3204 reason = "there are unresolved future classes pending";
3205 goto readthem;
3206 }
3207
3208 // readClass() rewrites bits in backward-deploying Swift stable ABI code.
3209 // The assumption here is there there are no such classes
3210 // in the dyld shared cache.
3211 #if DEBUG
3212 {
3213 size_t count;
3214 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3215 for (size_t i = 0; i < count; i++) {
3216 Class cls = remapClass(classlist[i]);
3217 ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
3218 }
3219 }
3220 #endif
3221
3222 // readClass() does not need to do anything.
3223 return NO;
3224
3225 readthem:
3226 if (PrintPreopt && reason) {
3227 _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
3228 "because %s", hi->fname(), reason);
3229 }
3230 return YES;
3231 }
3232
3233
3234 /***********************************************************************
3235 * readClass
3236 * Read a class and metaclass as written by a compiler.
3237 * Returns the new class pointer. This could be:
3238 * - cls
3239 * - nil (cls has a missing weak-linked superclass)
3240 * - something else (space for this class was reserved by a future class)
3241 *
3242 * Note that all work performed by this function is preflighted by
3243 * mustReadClasses(). Do not change this function without updating that one.
3244 *
3245 * Locking: runtimeLock acquired by map_images or objc_readClassPair
3246 **********************************************************************/
3247 Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
3248 {
3249 const char *mangledName = cls->mangledName();
3250
3251 if (missingWeakSuperclass(cls)) {
3252 // No superclass (probably weak-linked).
3253 // Disavow any knowledge of this subclass.
3254 if (PrintConnecting) {
3255 _objc_inform("CLASS: IGNORING class '%s' with "
3256 "missing weak-linked superclass",
3257 cls->nameForLogging());
3258 }
3259 addRemappedClass(cls, nil);
3260 cls->superclass = nil;
3261 return nil;
3262 }
3263
3264 cls->fixupBackwardDeployingStableSwift();
3265
3266 Class replacing = nil;
3267 if (Class newCls = popFutureNamedClass(mangledName)) {
3268 // This name was previously allocated as a future class.
3269 // Copy objc_class to future class's struct.
3270 // Preserve future's rw data block.
3271
3272 if (newCls->isAnySwift()) {
3273 _objc_fatal("Can't complete future class request for '%s' "
3274 "because the real class is too big.",
3275 cls->nameForLogging());
3276 }
3277
3278 class_rw_t *rw = newCls->data();
3279 const class_ro_t *old_ro = rw->ro();
3280 memcpy(newCls, cls, sizeof(objc_class));
3281 rw->set_ro((class_ro_t *)newCls->data());
3282 newCls->setData(rw);
3283 freeIfMutable((char *)old_ro->name);
3284 free((void *)old_ro);
3285
3286 addRemappedClass(cls, newCls);
3287
3288 replacing = cls;
3289 cls = newCls;
3290 }
3291
3292 if (headerIsPreoptimized && !replacing) {
3293 // class list built in shared cache
3294 // fixme strict assert doesn't work because of duplicates
3295 // ASSERT(cls == getClass(name));
3296 ASSERT(getClassExceptSomeSwift(mangledName));
3297 } else {
3298 addNamedClass(cls, mangledName, replacing);
3299 addClassTableEntry(cls);
3300 }
3301
3302 // for future reference: shared cache never contains MH_BUNDLEs
3303 if (headerIsBundle) {
3304 cls->data()->flags |= RO_FROM_BUNDLE;
3305 cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
3306 }
3307
3308 return cls;
3309 }
3310
3311
3312 /***********************************************************************
3313 * readProtocol
3314 * Read a protocol as written by a compiler.
3315 **********************************************************************/
3316 static void
3317 readProtocol(protocol_t *newproto, Class protocol_class,
3318 NXMapTable *protocol_map,
3319 bool headerIsPreoptimized, bool headerIsBundle)
3320 {
3321 // This is not enough to make protocols in unloaded bundles safe,
3322 // but it does prevent crashes when looking up unrelated protocols.
3323 auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
3324
3325 protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
3326
3327 if (oldproto) {
3328 if (oldproto != newproto) {
3329 // Some other definition already won.
3330 if (PrintProtocols) {
3331 _objc_inform("PROTOCOLS: protocol at %p is %s "
3332 "(duplicate of %p)",
3333 newproto, oldproto->nameForLogging(), oldproto);
3334 }
3335
3336 // If we are a shared cache binary then we have a definition of this
3337 // protocol, but if another one was chosen then we need to clear our
3338 // isCanonical bit so that no-one trusts it.
3339 // Note, if getProtocol returned a shared cache protocol then the
3340 // canonical definition is already in the shared cache and we don't
3341 // need to do anything.
3342 if (headerIsPreoptimized && !oldproto->isCanonical()) {
3343 // Note newproto is an entry in our __objc_protolist section which
3344 // for shared cache binaries points to the original protocol in
3345 // that binary, not the shared cache uniqued one.
3346 auto cacheproto = (protocol_t *)
3347 getSharedCachePreoptimizedProtocol(newproto->mangledName);
3348 if (cacheproto && cacheproto->isCanonical())
3349 cacheproto->clearIsCanonical();
3350 }
3351 }
3352 }
3353 else if (headerIsPreoptimized) {
3354 // Shared cache initialized the protocol object itself,
3355 // but in order to allow out-of-cache replacement we need
3356 // to add it to the protocol table now.
3357
3358 protocol_t *cacheproto = (protocol_t *)
3359 getPreoptimizedProtocol(newproto->mangledName);
3360 protocol_t *installedproto;
3361 if (cacheproto && cacheproto != newproto) {
3362 // Another definition in the shared cache wins (because
3363 // everything in the cache was fixed up to point to it).
3364 installedproto = cacheproto;
3365 }
3366 else {
3367 // This definition wins.
3368 installedproto = newproto;
3369 }
3370
3371 ASSERT(installedproto->getIsa() == protocol_class);
3372 ASSERT(installedproto->size >= sizeof(protocol_t));
3373 insertFn(protocol_map, installedproto->mangledName,
3374 installedproto);
3375
3376 if (PrintProtocols) {
3377 _objc_inform("PROTOCOLS: protocol at %p is %s",
3378 installedproto, installedproto->nameForLogging());
3379 if (newproto != installedproto) {
3380 _objc_inform("PROTOCOLS: protocol at %p is %s "
3381 "(duplicate of %p)",
3382 newproto, installedproto->nameForLogging(),
3383 installedproto);
3384 }
3385 }
3386 }
3387 else if (newproto->size >= sizeof(protocol_t)) {
3388 // New protocol from an un-preoptimized image
3389 // with sufficient storage. Fix it up in place.
3390 // fixme duplicate protocols from unloadable bundle
3391 newproto->initIsa(protocol_class); // fixme pinned
3392 insertFn(protocol_map, newproto->mangledName, newproto);
3393 if (PrintProtocols) {
3394 _objc_inform("PROTOCOLS: protocol at %p is %s",
3395 newproto, newproto->nameForLogging());
3396 }
3397 }
3398 else {
3399 // New protocol from an un-preoptimized image
3400 // with insufficient storage. Reallocate it.
3401 // fixme duplicate protocols from unloadable bundle
3402 size_t size = max(sizeof(protocol_t), (size_t)newproto->size);
3403 protocol_t *installedproto = (protocol_t *)calloc(size, 1);
3404 memcpy(installedproto, newproto, newproto->size);
3405 installedproto->size = (typeof(installedproto->size))size;
3406
3407 installedproto->initIsa(protocol_class); // fixme pinned
3408 insertFn(protocol_map, installedproto->mangledName, installedproto);
3409 if (PrintProtocols) {
3410 _objc_inform("PROTOCOLS: protocol at %p is %s ",
3411 installedproto, installedproto->nameForLogging());
3412 _objc_inform("PROTOCOLS: protocol at %p is %s "
3413 "(reallocated to %p)",
3414 newproto, installedproto->nameForLogging(),
3415 installedproto);
3416 }
3417 }
3418 }
3419
3420 /***********************************************************************
3421 * _read_images
3422 * Perform initial processing of the headers in the linked
3423 * list beginning with headerList.
3424 *
3425 * Called by: map_images_nolock
3426 *
3427 * Locking: runtimeLock acquired by map_images
3428 **********************************************************************/
3429 void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
3430 {
3431 header_info *hi;
3432 uint32_t hIndex;
3433 size_t count;
3434 size_t i;
3435 Class *resolvedFutureClasses = nil;
3436 size_t resolvedFutureClassCount = 0;
3437 static bool doneOnce;
3438 bool launchTime = NO;
3439 TimeLogger ts(PrintImageTimes);
3440
3441 runtimeLock.assertLocked();
3442
3443 #define EACH_HEADER \
3444 hIndex = 0; \
3445 hIndex < hCount && (hi = hList[hIndex]); \
3446 hIndex++
3447
3448 if (!doneOnce) {
3449 doneOnce = YES;
3450 launchTime = YES;
3451
3452 #if SUPPORT_NONPOINTER_ISA
3453 // Disable non-pointer isa under some conditions.
3454
3455 # if SUPPORT_INDEXED_ISA
3456 // Disable nonpointer isa if any image contains old Swift code
3457 for (EACH_HEADER) {
3458 if (hi->info()->containsSwift() &&
3459 hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
3460 {
3461 DisableNonpointerIsa = true;
3462 if (PrintRawIsa) {
3463 _objc_inform("RAW ISA: disabling non-pointer isa because "
3464 "the app or a framework contains Swift code "
3465 "older than Swift 3.0");
3466 }
3467 break;
3468 }
3469 }
3470 # endif
3471
3472 # if TARGET_OS_OSX
3473 // Disable non-pointer isa if the app is too old
3474 // (linked before OS X 10.11)
3475 if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_11) {
3476 DisableNonpointerIsa = true;
3477 if (PrintRawIsa) {
3478 _objc_inform("RAW ISA: disabling non-pointer isa because "
3479 "the app is too old (SDK version " SDK_FORMAT ")",
3480 FORMAT_SDK(dyld_get_program_sdk_version()));
3481 }
3482 }
3483
3484 // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
3485 // New apps that load old extensions may need this.
3486 for (EACH_HEADER) {
3487 if (hi->mhdr()->filetype != MH_EXECUTE) continue;
3488 unsigned long size;
3489 if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
3490 DisableNonpointerIsa = true;
3491 if (PrintRawIsa) {
3492 _objc_inform("RAW ISA: disabling non-pointer isa because "
3493 "the app has a __DATA,__objc_rawisa section");
3494 }
3495 }
3496 break; // assume only one MH_EXECUTE image
3497 }
3498 # endif
3499
3500 #endif
3501
3502 if (DisableTaggedPointers) {
3503 disableTaggedPointers();
3504 }
3505
3506 initializeTaggedPointerObfuscator();
3507
3508 if (PrintConnecting) {
3509 _objc_inform("CLASS: found %d classes during launch", totalClasses);
3510 }
3511
3512 // namedClasses
3513 // Preoptimized classes don't go in this table.
3514 // 4/3 is NXMapTable's load factor
3515 int namedClassesSize =
3516 (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
3517 gdb_objc_realized_classes =
3518 NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
3519
3520 ts.log("IMAGE TIMES: first time tasks");
3521 }
3522
3523 // Fix up @selector references
3524 static size_t UnfixedSelectors;
3525 {
3526 mutex_locker_t lock(selLock);
3527 for (EACH_HEADER) {
3528 if (hi->hasPreoptimizedSelectors()) continue;
3529
3530 bool isBundle = hi->isBundle();
3531 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3532 UnfixedSelectors += count;
3533 for (i = 0; i < count; i++) {
3534 const char *name = sel_cname(sels[i]);
3535 SEL sel = sel_registerNameNoLock(name, isBundle);
3536 if (sels[i] != sel) {
3537 sels[i] = sel;
3538 }
3539 }
3540 }
3541 }
3542
3543 ts.log("IMAGE TIMES: fix up selector references");
3544
3545 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
3546 bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
3547
3548 for (EACH_HEADER) {
3549 if (! mustReadClasses(hi, hasDyldRoots)) {
3550 // Image is sufficiently optimized that we need not call readClass()
3551 continue;
3552 }
3553
3554 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3555
3556 bool headerIsBundle = hi->isBundle();
3557 bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
3558
3559 for (i = 0; i < count; i++) {
3560 Class cls = (Class)classlist[i];
3561 Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
3562
3563 if (newCls != cls && newCls) {
3564 // Class was moved but not deleted. Currently this occurs
3565 // only when the new class resolved a future class.
3566 // Non-lazily realize the class below.
3567 resolvedFutureClasses = (Class *)
3568 realloc(resolvedFutureClasses,
3569 (resolvedFutureClassCount+1) * sizeof(Class));
3570 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3571 }
3572 }
3573 }
3574
3575 ts.log("IMAGE TIMES: discover classes");
3576
3577 // Fix up remapped classes
3578 // Class list and nonlazy class list remain unremapped.
3579 // Class refs and super refs are remapped for message dispatching.
3580
3581 if (!noClassesRemapped()) {
3582 for (EACH_HEADER) {
3583 Class *classrefs = _getObjc2ClassRefs(hi, &count);
3584 for (i = 0; i < count; i++) {
3585 remapClassRef(&classrefs[i]);
3586 }
3587 // fixme why doesn't test future1 catch the absence of this?
3588 classrefs = _getObjc2SuperRefs(hi, &count);
3589 for (i = 0; i < count; i++) {
3590 remapClassRef(&classrefs[i]);
3591 }
3592 }
3593 }
3594
3595 ts.log("IMAGE TIMES: remap classes");
3596
3597 #if SUPPORT_FIXUP
3598 // Fix up old objc_msgSend_fixup call sites
3599 for (EACH_HEADER) {
3600 message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
3601 if (count == 0) continue;
3602
3603 if (PrintVtables) {
3604 _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
3605 "call sites in %s", count, hi->fname());
3606 }
3607 for (i = 0; i < count; i++) {
3608 fixupMessageRef(refs+i);
3609 }
3610 }
3611
3612 ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
3613 #endif
3614
3615
3616 // Discover protocols. Fix up protocol refs.
3617 for (EACH_HEADER) {
3618 extern objc_class OBJC_CLASS_$_Protocol;
3619 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3620 ASSERT(cls);
3621 NXMapTable *protocol_map = protocols();
3622 bool isPreoptimized = hi->hasPreoptimizedProtocols();
3623
3624 // Skip reading protocols if this is an image from the shared cache
3625 // and we support roots
3626 // Note, after launch we do need to walk the protocol as the protocol
3627 // in the shared cache is marked with isCanonical() and that may not
3628 // be true if some non-shared cache binary was chosen as the canonical
3629 // definition
3630 if (launchTime && isPreoptimized) {
3631 if (PrintProtocols) {
3632 _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
3633 hi->fname());
3634 }
3635 continue;
3636 }
3637
3638 bool isBundle = hi->isBundle();
3639
3640 protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
3641 for (i = 0; i < count; i++) {
3642 readProtocol(protolist[i], cls, protocol_map,
3643 isPreoptimized, isBundle);
3644 }
3645 }
3646
3647 ts.log("IMAGE TIMES: discover protocols");
3648
3649 // Fix up @protocol references
3650 // Preoptimized images may have the right
3651 // answer already but we don't know for sure.
3652 for (EACH_HEADER) {
3653 // At launch time, we know preoptimized image refs are pointing at the
3654 // shared cache definition of a protocol. We can skip the check on
3655 // launch, but have to visit @protocol refs for shared cache images
3656 // loaded later.
3657 if (launchTime && hi->isPreoptimized())
3658 continue;
3659 protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
3660 for (i = 0; i < count; i++) {
3661 remapProtocolRef(&protolist[i]);
3662 }
3663 }
3664
3665 ts.log("IMAGE TIMES: fix up @protocol references");
3666
3667 // Discover categories. Only do this after the initial category
3668 // attachment has been done. For categories present at startup,
3669 // discovery is deferred until the first load_images call after
3670 // the call to _dyld_objc_notify_register completes. rdar://problem/53119145
3671 if (didInitialAttachCategories) {
3672 for (EACH_HEADER) {
3673 load_categories_nolock(hi);
3674 }
3675 }
3676
3677 ts.log("IMAGE TIMES: discover categories");
3678
3679 // Category discovery MUST BE Late to avoid potential races
3680 // when other threads call the new category code before
3681 // this thread finishes its fixups.
3682
3683 // +load handled by prepare_load_methods()
3684
3685 // Realize non-lazy classes (for +load methods and static instances)
3686 for (EACH_HEADER) {
3687 classref_t const *classlist = hi->nlclslist(&count);
3688 for (i = 0; i < count; i++) {
3689 Class cls = remapClass(classlist[i]);
3690 if (!cls) continue;
3691
3692 addClassTableEntry(cls);
3693
3694 if (cls->isSwiftStable()) {
3695 if (cls->swiftMetadataInitializer()) {
3696 _objc_fatal("Swift class %s with a metadata initializer "
3697 "is not allowed to be non-lazy",
3698 cls->nameForLogging());
3699 }
3700 // fixme also disallow relocatable classes
3701 // We can't disallow all Swift classes because of
3702 // classes like Swift.__EmptyArrayStorage
3703 }
3704 realizeClassWithoutSwift(cls, nil);
3705 }
3706 }
3707
3708 ts.log("IMAGE TIMES: realize non-lazy classes");
3709
3710 // Realize newly-resolved future classes, in case CF manipulates them
3711 if (resolvedFutureClasses) {
3712 for (i = 0; i < resolvedFutureClassCount; i++) {
3713 Class cls = resolvedFutureClasses[i];
3714 if (cls->isSwiftStable()) {
3715 _objc_fatal("Swift class is not allowed to be future");
3716 }
3717 realizeClassWithoutSwift(cls, nil);
3718 cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
3719 }
3720 free(resolvedFutureClasses);
3721 }
3722
3723 ts.log("IMAGE TIMES: realize future classes");
3724
3725 if (DebugNonFragileIvars) {
3726 realizeAllClasses();
3727 }
3728
3729
3730 // Print preoptimization statistics
3731 if (PrintPreopt) {
3732 static unsigned int PreoptTotalMethodLists;
3733 static unsigned int PreoptOptimizedMethodLists;
3734 static unsigned int PreoptTotalClasses;
3735 static unsigned int PreoptOptimizedClasses;
3736
3737 for (EACH_HEADER) {
3738 if (hi->hasPreoptimizedSelectors()) {
3739 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
3740 "in %s", hi->fname());
3741 }
3742 else if (hi->info()->optimizedByDyld()) {
3743 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
3744 "in %s", hi->fname());
3745 }
3746
3747 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3748 for (i = 0; i < count; i++) {
3749 Class cls = remapClass(classlist[i]);
3750 if (!cls) continue;
3751
3752 PreoptTotalClasses++;
3753 if (hi->hasPreoptimizedClasses()) {
3754 PreoptOptimizedClasses++;
3755 }
3756
3757 const method_list_t *mlist;
3758 if ((mlist = ((class_ro_t *)cls->data())->baseMethods())) {
3759 PreoptTotalMethodLists++;
3760 if (mlist->isFixedUp()) {
3761 PreoptOptimizedMethodLists++;
3762 }
3763 }
3764 if ((mlist=((class_ro_t *)cls->ISA()->data())->baseMethods())) {
3765 PreoptTotalMethodLists++;
3766 if (mlist->isFixedUp()) {
3767 PreoptOptimizedMethodLists++;
3768 }
3769 }
3770 }
3771 }
3772
3773 _objc_inform("PREOPTIMIZATION: %zu selector references not "
3774 "pre-optimized", UnfixedSelectors);
3775 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3776 PreoptOptimizedMethodLists, PreoptTotalMethodLists,
3777 PreoptTotalMethodLists
3778 ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
3779 : 0.0);
3780 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3781 PreoptOptimizedClasses, PreoptTotalClasses,
3782 PreoptTotalClasses
3783 ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
3784 : 0.0);
3785 _objc_inform("PREOPTIMIZATION: %zu protocol references not "
3786 "pre-optimized", UnfixedProtocolReferences);
3787 }
3788
3789 #undef EACH_HEADER
3790 }
3791
3792
3793 /***********************************************************************
3794 * prepare_load_methods
3795 * Schedule +load for classes in this image, any un-+load-ed
3796 * superclasses in other images, and any categories in this image.
3797 **********************************************************************/
3798 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3799 // cls must already be connected.
3800 static void schedule_class_load(Class cls)
3801 {
3802 if (!cls) return;
3803 ASSERT(cls->isRealized()); // _read_images should realize
3804
3805 if (cls->data()->flags & RW_LOADED) return;
3806
3807 // Ensure superclass-first ordering
3808 schedule_class_load(cls->superclass);
3809
3810 add_class_to_loadable_list(cls);
3811 cls->setInfo(RW_LOADED);
3812 }
3813
3814 // Quick scan for +load methods that doesn't take a lock.
3815 bool hasLoadMethods(const headerType *mhdr)
3816 {
3817 size_t count;
3818 if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
3819 if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
3820 return false;
3821 }
3822
3823 void prepare_load_methods(const headerType *mhdr)
3824 {
3825 size_t count, i;
3826
3827 runtimeLock.assertLocked();
3828
3829 classref_t const *classlist =
3830 _getObjc2NonlazyClassList(mhdr, &count);
3831 for (i = 0; i < count; i++) {
3832 schedule_class_load(remapClass(classlist[i]));
3833 }
3834
3835 category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
3836 for (i = 0; i < count; i++) {
3837 category_t *cat = categorylist[i];
3838 Class cls = remapClass(cat->cls);
3839 if (!cls) continue; // category for ignored weak-linked class
3840 if (cls->isSwiftStable()) {
3841 _objc_fatal("Swift class extensions and categories on Swift "
3842 "classes are not allowed to have +load methods");
3843 }
3844 realizeClassWithoutSwift(cls, nil);
3845 ASSERT(cls->ISA()->isRealized());
3846 add_category_to_loadable_list(cat);
3847 }
3848 }
3849
3850
3851 /***********************************************************************
3852 * _unload_image
3853 * Only handles MH_BUNDLE for now.
3854 * Locking: write-lock and loadMethodLock acquired by unmap_image
3855 **********************************************************************/
3856 void _unload_image(header_info *hi)
3857 {
3858 size_t count, i;
3859
3860 loadMethodLock.assertLocked();
3861 runtimeLock.assertLocked();
3862
3863 // Unload unattached categories and categories waiting for +load.
3864
3865 // Ignore __objc_catlist2. We don't support unloading Swift
3866 // and we never will.
3867 category_t * const *catlist = hi->catlist(&count);
3868 for (i = 0; i < count; i++) {
3869 category_t *cat = catlist[i];
3870 Class cls = remapClass(cat->cls);
3871 if (!cls) continue; // category for ignored weak-linked class
3872
3873 // fixme for MH_DYLIB cat's class may have been unloaded already
3874
3875 // unattached list
3876 objc::unattachedCategories.eraseCategoryForClass(cat, cls);
3877
3878 // +load queue
3879 remove_category_from_loadable_list(cat);
3880 }
3881
3882 // Unload classes.
3883
3884 // Gather classes from both __DATA,__objc_clslist
3885 // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
3886 // only, and we need to unload that class if we unload an arclite image.
3887
3888 objc::DenseSet<Class> classes{};
3889 classref_t const *classlist;
3890
3891 classlist = _getObjc2ClassList(hi, &count);
3892 for (i = 0; i < count; i++) {
3893 Class cls = remapClass(classlist[i]);
3894 if (cls) classes.insert(cls);
3895 }
3896
3897 classlist = hi->nlclslist(&count);
3898 for (i = 0; i < count; i++) {
3899 Class cls = remapClass(classlist[i]);
3900 if (cls) classes.insert(cls);
3901 }
3902
3903 // First detach classes from each other. Then free each class.
3904 // This avoid bugs where this loop unloads a subclass before its superclass
3905
3906 for (Class cls: classes) {
3907 remove_class_from_loadable_list(cls);
3908 detach_class(cls->ISA(), YES);
3909 detach_class(cls, NO);
3910 }
3911 for (Class cls: classes) {
3912 free_class(cls->ISA());
3913 free_class(cls);
3914 }
3915
3916 // XXX FIXME -- Clean up protocols:
3917 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3918
3919 // fixme DebugUnload
3920 }
3921
3922
3923 /***********************************************************************
3924 * method_getDescription
3925 * Returns a pointer to this method's objc_method_description.
3926 * Locking: none
3927 **********************************************************************/
3928 struct objc_method_description *
3929 method_getDescription(Method m)
3930 {
3931 if (!m) return nil;
3932 return m->getDescription();
3933 }
3934
3935
3936 IMP
3937 method_getImplementation(Method m)
3938 {
3939 return m ? m->imp(true) : nil;
3940 }
3941
3942 IMPAndSEL _method_getImplementationAndName(Method m)
3943 {
3944 return { m->imp(true), m->name() };
3945 }
3946
3947
3948 /***********************************************************************
3949 * method_getName
3950 * Returns this method's selector.
3951 * The method must not be nil.
3952 * The method must already have been fixed-up.
3953 * Locking: none
3954 **********************************************************************/
3955 SEL
3956 method_getName(Method m)
3957 {
3958 if (!m) return nil;
3959
3960 ASSERT(m->name() == sel_registerName(sel_getName(m->name())));
3961 return m->name();
3962 }
3963
3964
3965 /***********************************************************************
3966 * method_getTypeEncoding
3967 * Returns this method's old-style type encoding string.
3968 * The method must not be nil.
3969 * Locking: none
3970 **********************************************************************/
3971 const char *
3972 method_getTypeEncoding(Method m)
3973 {
3974 if (!m) return nil;
3975 return m->types();
3976 }
3977
3978
3979 /***********************************************************************
3980 * method_setImplementation
3981 * Sets this method's implementation to imp.
3982 * The previous implementation is returned.
3983 **********************************************************************/
3984 static IMP
3985 _method_setImplementation(Class cls, method_t *m, IMP imp)
3986 {
3987 runtimeLock.assertLocked();
3988
3989 if (!m) return nil;
3990 if (!imp) return nil;
3991
3992 IMP old = m->imp(false);
3993 m->setImp(imp);
3994
3995 // Cache updates are slow if cls is nil (i.e. unknown)
3996 // RR/AWZ updates are slow if cls is nil (i.e. unknown)
3997 // fixme build list of classes whose Methods are known externally?
3998
3999 flushCaches(cls);
4000
4001 adjustCustomFlagsForMethodChange(cls, m);
4002
4003 return old;
4004 }
4005
4006 IMP
4007 method_setImplementation(Method m, IMP imp)
4008 {
4009 // Don't know the class - will be slow if RR/AWZ are affected
4010 // fixme build list of classes whose Methods are known externally?
4011 mutex_locker_t lock(runtimeLock);
4012 return _method_setImplementation(Nil, m, imp);
4013 }
4014
4015
4016 void method_exchangeImplementations(Method m1, Method m2)
4017 {
4018 if (!m1 || !m2) return;
4019
4020 mutex_locker_t lock(runtimeLock);
4021
4022 IMP m1_imp = m1->imp(false);
4023 m1->setImp(m2->imp(false));
4024 m2->setImp(m1_imp);
4025
4026
4027 // RR/AWZ updates are slow because class is unknown
4028 // Cache updates are slow because class is unknown
4029 // fixme build list of classes whose Methods are known externally?
4030
4031 flushCaches(nil);
4032
4033 adjustCustomFlagsForMethodChange(nil, m1);
4034 adjustCustomFlagsForMethodChange(nil, m2);
4035 }
4036
4037
4038 /***********************************************************************
4039 * ivar_getOffset
4040 * fixme
4041 * Locking: none
4042 **********************************************************************/
4043 ptrdiff_t
4044 ivar_getOffset(Ivar ivar)
4045 {
4046 if (!ivar) return 0;
4047 return *ivar->offset;
4048 }
4049
4050
4051 /***********************************************************************
4052 * ivar_getName
4053 * fixme
4054 * Locking: none
4055 **********************************************************************/
4056 const char *
4057 ivar_getName(Ivar ivar)
4058 {
4059 if (!ivar) return nil;
4060 return ivar->name;
4061 }
4062
4063
4064 /***********************************************************************
4065 * ivar_getTypeEncoding
4066 * fixme
4067 * Locking: none
4068 **********************************************************************/
4069 const char *
4070 ivar_getTypeEncoding(Ivar ivar)
4071 {
4072 if (!ivar) return nil;
4073 return ivar->type;
4074 }
4075
4076
4077
4078 const char *property_getName(objc_property_t prop)
4079 {
4080 return prop->name;
4081 }
4082
4083 const char *property_getAttributes(objc_property_t prop)
4084 {
4085 return prop->attributes;
4086 }
4087
4088 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
4089 unsigned int *outCount)
4090 {
4091 if (!prop) {
4092 if (outCount) *outCount = 0;
4093 return nil;
4094 }
4095
4096 mutex_locker_t lock(runtimeLock);
4097 return copyPropertyAttributeList(prop->attributes,outCount);
4098 }
4099
4100 char * property_copyAttributeValue(objc_property_t prop, const char *name)
4101 {
4102 if (!prop || !name || *name == '\0') return nil;
4103
4104 mutex_locker_t lock(runtimeLock);
4105 return copyPropertyAttributeValue(prop->attributes, name);
4106 }
4107
4108
4109 /***********************************************************************
4110 * getExtendedTypesIndexesForMethod
4111 * Returns:
4112 * a is the count of methods in all method lists before m's method list
4113 * b is the index of m in m's method list
4114 * a+b is the index of m's extended types in the extended types array
4115 **********************************************************************/
4116 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
4117 {
4118 a = 0;
4119
4120 if (proto->instanceMethods) {
4121 if (isRequiredMethod && isInstanceMethod) {
4122 b = proto->instanceMethods->indexOfMethod(m);
4123 return;
4124 }
4125 a += proto->instanceMethods->count;
4126 }
4127
4128 if (proto->classMethods) {
4129 if (isRequiredMethod && !isInstanceMethod) {
4130 b = proto->classMethods->indexOfMethod(m);
4131 return;
4132 }
4133 a += proto->classMethods->count;
4134 }
4135
4136 if (proto->optionalInstanceMethods) {
4137 if (!isRequiredMethod && isInstanceMethod) {
4138 b = proto->optionalInstanceMethods->indexOfMethod(m);
4139 return;
4140 }
4141 a += proto->optionalInstanceMethods->count;
4142 }
4143
4144 if (proto->optionalClassMethods) {
4145 if (!isRequiredMethod && !isInstanceMethod) {
4146 b = proto->optionalClassMethods->indexOfMethod(m);
4147 return;
4148 }
4149 a += proto->optionalClassMethods->count;
4150 }
4151 }
4152
4153
4154 /***********************************************************************
4155 * getExtendedTypesIndexForMethod
4156 * Returns the index of m's extended types in proto's extended types array.
4157 **********************************************************************/
4158 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
4159 {
4160 uint32_t a;
4161 uint32_t b;
4162 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
4163 isInstanceMethod, a, b);
4164 return a + b;
4165 }
4166
4167
4168 /***********************************************************************
4169 * fixupProtocolMethodList
4170 * Fixes up a single method list in a protocol.
4171 **********************************************************************/
4172 static void
4173 fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
4174 bool required, bool instance)
4175 {
4176 runtimeLock.assertLocked();
4177
4178 if (!mlist) return;
4179 if (mlist->isFixedUp()) return;
4180
4181 const char **extTypes = proto->extendedMethodTypes();
4182 fixupMethodList(mlist, true/*always copy for simplicity*/,
4183 !extTypes/*sort if no extended method types*/);
4184
4185 if (extTypes && !mlist->isSmallList()) {
4186 // Sort method list and extended method types together.
4187 // fixupMethodList() can't do this.
4188 // fixme COW stomp
4189 uint32_t count = mlist->count;
4190 uint32_t prefix;
4191 uint32_t junk;
4192 getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
4193 required, instance, prefix, junk);
4194 for (uint32_t i = 0; i < count; i++) {
4195 for (uint32_t j = i+1; j < count; j++) {
4196 auto& mi = mlist->get(i).big();
4197 auto& mj = mlist->get(j).big();
4198 if (mi.name > mj.name) {
4199 std::swap(mi, mj);
4200 std::swap(extTypes[prefix+i], extTypes[prefix+j]);
4201 }
4202 }
4203 }
4204 }
4205 }
4206
4207
4208 /***********************************************************************
4209 * fixupProtocol
4210 * Fixes up all of a protocol's method lists.
4211 **********************************************************************/
4212 static void
4213 fixupProtocol(protocol_t *proto)
4214 {
4215 runtimeLock.assertLocked();
4216
4217 if (proto->protocols) {
4218 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4219 protocol_t *sub = remapProtocol(proto->protocols->list[i]);
4220 if (!sub->isFixedUp()) fixupProtocol(sub);
4221 }
4222 }
4223
4224 fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
4225 fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
4226 fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
4227 fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
4228
4229 // fixme memory barrier so we can check this with no lock
4230 proto->setFixedUp();
4231 }
4232
4233
4234 /***********************************************************************
4235 * fixupProtocolIfNeeded
4236 * Fixes up all of a protocol's method lists if they aren't fixed up already.
4237 * Locking: write-locks runtimeLock.
4238 **********************************************************************/
4239 static void
4240 fixupProtocolIfNeeded(protocol_t *proto)
4241 {
4242 runtimeLock.assertUnlocked();
4243 ASSERT(proto);
4244
4245 if (!proto->isFixedUp()) {
4246 mutex_locker_t lock(runtimeLock);
4247 fixupProtocol(proto);
4248 }
4249 }
4250
4251
4252 static method_list_t *
4253 getProtocolMethodList(protocol_t *proto, bool required, bool instance)
4254 {
4255 method_list_t **mlistp = nil;
4256 if (required) {
4257 if (instance) {
4258 mlistp = &proto->instanceMethods;
4259 } else {
4260 mlistp = &proto->classMethods;
4261 }
4262 } else {
4263 if (instance) {
4264 mlistp = &proto->optionalInstanceMethods;
4265 } else {
4266 mlistp = &proto->optionalClassMethods;
4267 }
4268 }
4269
4270 return *mlistp;
4271 }
4272
4273
4274 /***********************************************************************
4275 * protocol_getMethod_nolock
4276 * Locking: runtimeLock must be held by the caller
4277 **********************************************************************/
4278 static method_t *
4279 protocol_getMethod_nolock(protocol_t *proto, SEL sel,
4280 bool isRequiredMethod, bool isInstanceMethod,
4281 bool recursive)
4282 {
4283 runtimeLock.assertLocked();
4284
4285 if (!proto || !sel) return nil;
4286
4287 ASSERT(proto->isFixedUp());
4288
4289 method_list_t *mlist =
4290 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4291 if (mlist) {
4292 method_t *m = search_method_list(mlist, sel);
4293 if (m) return m;
4294 }
4295
4296 if (recursive && proto->protocols) {
4297 method_t *m;
4298 for (uint32_t i = 0; i < proto->protocols->count; i++) {
4299 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
4300 m = protocol_getMethod_nolock(realProto, sel,
4301 isRequiredMethod, isInstanceMethod,
4302 true);
4303 if (m) return m;
4304 }
4305 }
4306
4307 return nil;
4308 }
4309
4310
4311 /***********************************************************************
4312 * protocol_getMethod
4313 * fixme
4314 * Locking: acquires runtimeLock
4315 **********************************************************************/
4316 Method
4317 protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
4318 {
4319 if (!proto) return nil;
4320 fixupProtocolIfNeeded(proto);
4321
4322 mutex_locker_t lock(runtimeLock);
4323 return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
4324 isInstanceMethod, recursive);
4325 }
4326
4327
4328 /***********************************************************************
4329 * protocol_getMethodTypeEncoding_nolock
4330 * Return the @encode string for the requested protocol method.
4331 * Returns nil if the compiler did not emit any extended @encode data.
4332 * Locking: runtimeLock must be held by the caller
4333 **********************************************************************/
4334 const char *
4335 protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
4336 bool isRequiredMethod,
4337 bool isInstanceMethod)
4338 {
4339 runtimeLock.assertLocked();
4340
4341 if (!proto) return nil;
4342 if (!proto->extendedMethodTypes()) return nil;
4343
4344 ASSERT(proto->isFixedUp());
4345
4346 method_t *m =
4347 protocol_getMethod_nolock(proto, sel,
4348 isRequiredMethod, isInstanceMethod, false);
4349 if (m) {
4350 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
4351 isRequiredMethod,
4352 isInstanceMethod);
4353 return proto->extendedMethodTypes()[i];
4354 }
4355
4356 // No method with that name. Search incorporated protocols.
4357 if (proto->protocols) {
4358 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4359 const char *enc =
4360 protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
4361 if (enc) return enc;
4362 }
4363 }
4364
4365 return nil;
4366 }
4367
4368 /***********************************************************************
4369 * _protocol_getMethodTypeEncoding
4370 * Return the @encode string for the requested protocol method.
4371 * Returns nil if the compiler did not emit any extended @encode data.
4372 * Locking: acquires runtimeLock
4373 **********************************************************************/
4374 const char *
4375 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
4376 BOOL isRequiredMethod, BOOL isInstanceMethod)
4377 {
4378 protocol_t *proto = newprotocol(proto_gen);
4379
4380 if (!proto) return nil;
4381 fixupProtocolIfNeeded(proto);
4382
4383 mutex_locker_t lock(runtimeLock);
4384 return protocol_getMethodTypeEncoding_nolock(proto, sel,
4385 isRequiredMethod,
4386 isInstanceMethod);
4387 }
4388
4389
4390 /***********************************************************************
4391 * protocol_t::demangledName
4392 * Returns the (Swift-demangled) name of the given protocol.
4393 * Locking: none
4394 **********************************************************************/
4395 const char *
4396 protocol_t::demangledName()
4397 {
4398 ASSERT(hasDemangledNameField());
4399
4400 if (! _demangledName) {
4401 char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
4402 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
4403 (void**)&_demangledName))
4404 {
4405 if (de) free(de);
4406 }
4407 }
4408 return _demangledName;
4409 }
4410
4411 /***********************************************************************
4412 * protocol_getName
4413 * Returns the (Swift-demangled) name of the given protocol.
4414 * Locking: runtimeLock must not be held by the caller
4415 **********************************************************************/
4416 const char *
4417 protocol_getName(Protocol *proto)
4418 {
4419 if (!proto) return "nil";
4420 else return newprotocol(proto)->demangledName();
4421 }
4422
4423
4424 /***********************************************************************
4425 * protocol_getInstanceMethodDescription
4426 * Returns the description of a named instance method.
4427 * Locking: runtimeLock must not be held by the caller
4428 **********************************************************************/
4429 struct objc_method_description
4430 protocol_getMethodDescription(Protocol *p, SEL aSel,
4431 BOOL isRequiredMethod, BOOL isInstanceMethod)
4432 {
4433 Method m =
4434 protocol_getMethod(newprotocol(p), aSel,
4435 isRequiredMethod, isInstanceMethod, true);
4436 // method_getDescription is inefficient for small methods. Don't bother
4437 // trying to use it, just make our own.
4438 if (m) return (struct objc_method_description){m->name(), (char *)m->types()};
4439 else return (struct objc_method_description){nil, nil};
4440 }
4441
4442
4443 /***********************************************************************
4444 * protocol_conformsToProtocol_nolock
4445 * Returns YES if self conforms to other.
4446 * Locking: runtimeLock must be held by the caller.
4447 **********************************************************************/
4448 static bool
4449 protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
4450 {
4451 runtimeLock.assertLocked();
4452
4453 if (!self || !other) {
4454 return NO;
4455 }
4456
4457 // protocols need not be fixed up
4458
4459 if (0 == strcmp(self->mangledName, other->mangledName)) {
4460 return YES;
4461 }
4462
4463 if (self->protocols) {
4464 uintptr_t i;
4465 for (i = 0; i < self->protocols->count; i++) {
4466 protocol_t *proto = remapProtocol(self->protocols->list[i]);
4467 if (other == proto) {
4468 return YES;
4469 }
4470 if (0 == strcmp(other->mangledName, proto->mangledName)) {
4471 return YES;
4472 }
4473 if (protocol_conformsToProtocol_nolock(proto, other)) {
4474 return YES;
4475 }
4476 }
4477 }
4478
4479 return NO;
4480 }
4481
4482
4483 /***********************************************************************
4484 * protocol_conformsToProtocol
4485 * Returns YES if self conforms to other.
4486 * Locking: acquires runtimeLock
4487 **********************************************************************/
4488 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
4489 {
4490 mutex_locker_t lock(runtimeLock);
4491 return protocol_conformsToProtocol_nolock(newprotocol(self),
4492 newprotocol(other));
4493 }
4494
4495
4496 /***********************************************************************
4497 * protocol_isEqual
4498 * Return YES if two protocols are equal (i.e. conform to each other)
4499 * Locking: acquires runtimeLock
4500 **********************************************************************/
4501 BOOL protocol_isEqual(Protocol *self, Protocol *other)
4502 {
4503 if (self == other) return YES;
4504 if (!self || !other) return NO;
4505
4506 if (!protocol_conformsToProtocol(self, other)) return NO;
4507 if (!protocol_conformsToProtocol(other, self)) return NO;
4508
4509 return YES;
4510 }
4511
4512
4513 /***********************************************************************
4514 * protocol_copyMethodDescriptionList
4515 * Returns descriptions of a protocol's methods.
4516 * Locking: acquires runtimeLock
4517 **********************************************************************/
4518 struct objc_method_description *
4519 protocol_copyMethodDescriptionList(Protocol *p,
4520 BOOL isRequiredMethod,BOOL isInstanceMethod,
4521 unsigned int *outCount)
4522 {
4523 protocol_t *proto = newprotocol(p);
4524 struct objc_method_description *result = nil;
4525 unsigned int count = 0;
4526
4527 if (!proto) {
4528 if (outCount) *outCount = 0;
4529 return nil;
4530 }
4531
4532 fixupProtocolIfNeeded(proto);
4533
4534 mutex_locker_t lock(runtimeLock);
4535
4536 method_list_t *mlist =
4537 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4538
4539 if (mlist) {
4540 result = (struct objc_method_description *)
4541 calloc(mlist->count + 1, sizeof(struct objc_method_description));
4542 for (const auto& meth : *mlist) {
4543 result[count].name = meth.name();
4544 result[count].types = (char *)meth.types();
4545 count++;
4546 }
4547 }
4548
4549 if (outCount) *outCount = count;
4550 return result;
4551 }
4552
4553
4554 /***********************************************************************
4555 * protocol_getProperty
4556 * fixme
4557 * Locking: runtimeLock must be held by the caller
4558 **********************************************************************/
4559 static property_t *
4560 protocol_getProperty_nolock(protocol_t *proto, const char *name,
4561 bool isRequiredProperty, bool isInstanceProperty)
4562 {
4563 runtimeLock.assertLocked();
4564
4565 if (!isRequiredProperty) {
4566 // Only required properties are currently supported.
4567 return nil;
4568 }
4569
4570 property_list_t *plist = isInstanceProperty ?
4571 proto->instanceProperties : proto->classProperties();
4572 if (plist) {
4573 for (auto& prop : *plist) {
4574 if (0 == strcmp(name, prop.name)) {
4575 return &prop;
4576 }
4577 }
4578 }
4579
4580 if (proto->protocols) {
4581 uintptr_t i;
4582 for (i = 0; i < proto->protocols->count; i++) {
4583 protocol_t *p = remapProtocol(proto->protocols->list[i]);
4584 property_t *prop =
4585 protocol_getProperty_nolock(p, name,
4586 isRequiredProperty,
4587 isInstanceProperty);
4588 if (prop) return prop;
4589 }
4590 }
4591
4592 return nil;
4593 }
4594
4595 objc_property_t protocol_getProperty(Protocol *p, const char *name,
4596 BOOL isRequiredProperty, BOOL isInstanceProperty)
4597 {
4598 if (!p || !name) return nil;
4599
4600 mutex_locker_t lock(runtimeLock);
4601 return (objc_property_t)
4602 protocol_getProperty_nolock(newprotocol(p), name,
4603 isRequiredProperty, isInstanceProperty);
4604 }
4605
4606
4607 /***********************************************************************
4608 * protocol_copyPropertyList
4609 * protocol_copyPropertyList2
4610 * fixme
4611 * Locking: acquires runtimeLock
4612 **********************************************************************/
4613 static property_t **
4614 copyPropertyList(property_list_t *plist, unsigned int *outCount)
4615 {
4616 property_t **result = nil;
4617 unsigned int count = 0;
4618
4619 if (plist) {
4620 count = plist->count;
4621 }
4622
4623 if (count > 0) {
4624 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4625
4626 count = 0;
4627 for (auto& prop : *plist) {
4628 result[count++] = &prop;
4629 }
4630 result[count] = nil;
4631 }
4632
4633 if (outCount) *outCount = count;
4634 return result;
4635 }
4636
4637 objc_property_t *
4638 protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
4639 BOOL isRequiredProperty, BOOL isInstanceProperty)
4640 {
4641 if (!proto || !isRequiredProperty) {
4642 // Optional properties are not currently supported.
4643 if (outCount) *outCount = 0;
4644 return nil;
4645 }
4646
4647 mutex_locker_t lock(runtimeLock);
4648
4649 property_list_t *plist = isInstanceProperty
4650 ? newprotocol(proto)->instanceProperties
4651 : newprotocol(proto)->classProperties();
4652 return (objc_property_t *)copyPropertyList(plist, outCount);
4653 }
4654
4655 objc_property_t *
4656 protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4657 {
4658 return protocol_copyPropertyList2(proto, outCount,
4659 YES/*required*/, YES/*instance*/);
4660 }
4661
4662
4663 /***********************************************************************
4664 * protocol_copyProtocolList
4665 * Copies this protocol's incorporated protocols.
4666 * Does not copy those protocol's incorporated protocols in turn.
4667 * Locking: acquires runtimeLock
4668 **********************************************************************/
4669 Protocol * __unsafe_unretained *
4670 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4671 {
4672 unsigned int count = 0;
4673 Protocol **result = nil;
4674 protocol_t *proto = newprotocol(p);
4675
4676 if (!proto) {
4677 if (outCount) *outCount = 0;
4678 return nil;
4679 }
4680
4681 mutex_locker_t lock(runtimeLock);
4682
4683 if (proto->protocols) {
4684 count = (unsigned int)proto->protocols->count;
4685 }
4686 if (count > 0) {
4687 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4688
4689 unsigned int i;
4690 for (i = 0; i < count; i++) {
4691 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4692 }
4693 result[i] = nil;
4694 }
4695
4696 if (outCount) *outCount = count;
4697 return result;
4698 }
4699
4700
4701 /***********************************************************************
4702 * objc_allocateProtocol
4703 * Creates a new protocol. The protocol may not be used until
4704 * objc_registerProtocol() is called.
4705 * Returns nil if a protocol with the same name already exists.
4706 * Locking: acquires runtimeLock
4707 **********************************************************************/
4708 Protocol *
4709 objc_allocateProtocol(const char *name)
4710 {
4711 mutex_locker_t lock(runtimeLock);
4712
4713 if (getProtocol(name)) {
4714 return nil;
4715 }
4716
4717 protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
4718
4719 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4720 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4721 result->initProtocolIsa(cls);
4722 result->size = sizeof(protocol_t);
4723 // fixme mangle the name if it looks swift-y?
4724 result->mangledName = strdupIfMutable(name);
4725
4726 // fixme reserve name without installing
4727
4728 return (Protocol *)result;
4729 }
4730
4731
4732 /***********************************************************************
4733 * objc_registerProtocol
4734 * Registers a newly-constructed protocol. The protocol is now
4735 * ready for use and immutable.
4736 * Locking: acquires runtimeLock
4737 **********************************************************************/
4738 void objc_registerProtocol(Protocol *proto_gen)
4739 {
4740 protocol_t *proto = newprotocol(proto_gen);
4741
4742 mutex_locker_t lock(runtimeLock);
4743
4744 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4745 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4746 extern objc_class OBJC_CLASS_$_Protocol;
4747 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4748
4749 if (proto->ISA() == cls) {
4750 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4751 "registered!", proto->nameForLogging());
4752 return;
4753 }
4754 if (proto->ISA() != oldcls) {
4755 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4756 "with objc_allocateProtocol!", proto->nameForLogging());
4757 return;
4758 }
4759
4760 // NOT initProtocolIsa(). The protocol object may already
4761 // have been retained and we must preserve that count.
4762 proto->changeIsa(cls);
4763
4764 // Don't add this protocol if we already have it.
4765 // Should we warn on duplicates?
4766 if (getProtocol(proto->mangledName) == nil) {
4767 NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
4768 }
4769 }
4770
4771
4772 /***********************************************************************
4773 * protocol_addProtocol
4774 * Adds an incorporated protocol to another protocol.
4775 * No method enforcement is performed.
4776 * `proto` must be under construction. `addition` must not.
4777 * Locking: acquires runtimeLock
4778 **********************************************************************/
4779 void
4780 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4781 {
4782 protocol_t *proto = newprotocol(proto_gen);
4783 protocol_t *addition = newprotocol(addition_gen);
4784
4785 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4786 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4787
4788 if (!proto_gen) return;
4789 if (!addition_gen) return;
4790
4791 mutex_locker_t lock(runtimeLock);
4792
4793 if (proto->ISA() != cls) {
4794 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4795 "under construction!", proto->nameForLogging());
4796 return;
4797 }
4798 if (addition->ISA() == cls) {
4799 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4800 "under construction!", addition->nameForLogging());
4801 return;
4802 }
4803
4804 protocol_list_t *protolist = proto->protocols;
4805 if (!protolist) {
4806 protolist = (protocol_list_t *)
4807 calloc(1, sizeof(protocol_list_t)
4808 + sizeof(protolist->list[0]));
4809 } else {
4810 protolist = (protocol_list_t *)
4811 realloc(protolist, protocol_list_size(protolist)
4812 + sizeof(protolist->list[0]));
4813 }
4814
4815 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4816 proto->protocols = protolist;
4817 }
4818
4819
4820 /***********************************************************************
4821 * protocol_addMethodDescription
4822 * Adds a method to a protocol. The protocol must be under construction.
4823 * Locking: acquires runtimeLock
4824 **********************************************************************/
4825 static void
4826 protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
4827 {
4828 if (!list) {
4829 list = (method_list_t *)calloc(method_list_t::byteSize(sizeof(struct method_t::big), 1), 1);
4830 list->entsizeAndFlags = sizeof(struct method_t::big);
4831 list->setFixedUp();
4832 } else {
4833 size_t size = list->byteSize() + list->entsize();
4834 list = (method_list_t *)realloc(list, size);
4835 }
4836
4837 auto &meth = list->get(list->count++).big();
4838 meth.name = name;
4839 meth.types = types ? strdupIfMutable(types) : "";
4840 meth.imp = nil;
4841 }
4842
4843 void
4844 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4845 BOOL isRequiredMethod, BOOL isInstanceMethod)
4846 {
4847 protocol_t *proto = newprotocol(proto_gen);
4848
4849 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4850 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4851
4852 if (!proto_gen) return;
4853
4854 mutex_locker_t lock(runtimeLock);
4855
4856 if (proto->ISA() != cls) {
4857 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4858 "under construction!", proto->nameForLogging());
4859 return;
4860 }
4861
4862 if (isRequiredMethod && isInstanceMethod) {
4863 protocol_addMethod_nolock(proto->instanceMethods, name, types);
4864 } else if (isRequiredMethod && !isInstanceMethod) {
4865 protocol_addMethod_nolock(proto->classMethods, name, types);
4866 } else if (!isRequiredMethod && isInstanceMethod) {
4867 protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
4868 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4869 protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
4870 }
4871 }
4872
4873
4874 /***********************************************************************
4875 * protocol_addProperty
4876 * Adds a property to a protocol. The protocol must be under construction.
4877 * Locking: acquires runtimeLock
4878 **********************************************************************/
4879 static void
4880 protocol_addProperty_nolock(property_list_t *&plist, const char *name,
4881 const objc_property_attribute_t *attrs,
4882 unsigned int count)
4883 {
4884 if (!plist) {
4885 plist = (property_list_t *)calloc(property_list_t::byteSize(sizeof(property_t), 1), 1);
4886 plist->entsizeAndFlags = sizeof(property_t);
4887 plist->count = 1;
4888 } else {
4889 plist->count++;
4890 plist = (property_list_t *)realloc(plist, plist->byteSize());
4891 }
4892
4893 property_t& prop = plist->get(plist->count - 1);
4894 prop.name = strdupIfMutable(name);
4895 prop.attributes = copyPropertyAttributeString(attrs, count);
4896 }
4897
4898 void
4899 protocol_addProperty(Protocol *proto_gen, const char *name,
4900 const objc_property_attribute_t *attrs,
4901 unsigned int count,
4902 BOOL isRequiredProperty, BOOL isInstanceProperty)
4903 {
4904 protocol_t *proto = newprotocol(proto_gen);
4905
4906 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4907 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4908
4909 if (!proto) return;
4910 if (!name) return;
4911
4912 mutex_locker_t lock(runtimeLock);
4913
4914 if (proto->ISA() != cls) {
4915 _objc_inform("protocol_addProperty: protocol '%s' is not "
4916 "under construction!", proto->nameForLogging());
4917 return;
4918 }
4919
4920 if (isRequiredProperty && isInstanceProperty) {
4921 protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
4922 }
4923 else if (isRequiredProperty && !isInstanceProperty) {
4924 protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
4925 }
4926 //else if (!isRequiredProperty && isInstanceProperty) {
4927 // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
4928 //}
4929 //else /* !isRequiredProperty && !isInstanceProperty) */ {
4930 // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
4931 //}
4932 }
4933
4934 static int
4935 objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
4936 {
4937 int count = 0;
4938
4939 if (buffer) {
4940 int c = 0;
4941 foreach_realized_class([=, &count, &c](Class cls) {
4942 count++;
4943 if (c < bufferLen) {
4944 buffer[c++] = cls;
4945 }
4946 return true;
4947 });
4948 } else {
4949 foreach_realized_class([&count](Class cls) {
4950 count++;
4951 return true;
4952 });
4953 }
4954
4955 return count;
4956 }
4957
4958 static Class *
4959 objc_copyRealizedClassList_nolock(unsigned int *outCount)
4960 {
4961 Class *result = nil;
4962 unsigned int count = 0;
4963
4964 foreach_realized_class([&count](Class cls) {
4965 count++;
4966 return true;
4967 });
4968
4969 if (count > 0) {
4970 unsigned int c = 0;
4971
4972 result = (Class *)malloc((1+count) * sizeof(Class));
4973 foreach_realized_class([=, &c](Class cls) {
4974 result[c++] = cls;
4975 return true;
4976 });
4977 result[c] = nil;
4978 }
4979
4980 if (outCount) *outCount = count;
4981 return result;
4982 }
4983
4984 static void
4985 class_getImpCache_nolock(Class cls, cache_t &cache, objc_imp_cache_entry *buffer, int len)
4986 {
4987 bucket_t *buckets = cache.buckets();
4988
4989 uintptr_t count = cache.capacity();
4990 uintptr_t index;
4991 int wpos = 0;
4992
4993 for (index = 0; index < count && wpos < len; index += 1) {
4994 if (buckets[index].sel()) {
4995 buffer[wpos].imp = buckets[index].imp(cls);
4996 buffer[wpos].sel = buckets[index].sel();
4997 wpos++;
4998 }
4999 }
5000 }
5001
5002 /***********************************************************************
5003 * objc_getClassList
5004 * Returns pointers to all classes.
5005 * This requires all classes be realized, which is regretfully non-lazy.
5006 * Locking: acquires runtimeLock
5007 **********************************************************************/
5008 int
5009 objc_getClassList(Class *buffer, int bufferLen)
5010 {
5011 mutex_locker_t lock(runtimeLock);
5012
5013 realizeAllClasses();
5014
5015 return objc_getRealizedClassList_nolock(buffer, bufferLen);
5016 }
5017
5018 /***********************************************************************
5019 * objc_copyClassList
5020 * Returns pointers to Realized classes.
5021 *
5022 * outCount may be nil. *outCount is the number of classes returned.
5023 * If the returned array is not nil, it is nil-terminated and must be
5024 * freed with free().
5025 * Locking: write-locks runtimeLock
5026 **********************************************************************/
5027 Class *
5028 objc_copyRealizedClassList(unsigned int *outCount)
5029 {
5030 mutex_locker_t lock(runtimeLock);
5031
5032 return objc_copyRealizedClassList_nolock(outCount);
5033 }
5034
5035
5036 /***********************************************************************
5037 * objc_copyClassList
5038 * Returns pointers to all classes.
5039 * This requires all classes be realized, which is regretfully non-lazy.
5040 *
5041 * outCount may be nil. *outCount is the number of classes returned.
5042 * If the returned array is not nil, it is nil-terminated and must be
5043 * freed with free().
5044 * Locking: write-locks runtimeLock
5045 **********************************************************************/
5046 Class *
5047 objc_copyClassList(unsigned int *outCount)
5048 {
5049 mutex_locker_t lock(runtimeLock);
5050
5051 realizeAllClasses();
5052
5053 return objc_copyRealizedClassList_nolock(outCount);
5054 }
5055
5056 /***********************************************************************
5057 * class_copyImpCache
5058 * Returns the current content of the Class IMP Cache
5059 *
5060 * outCount may be nil. *outCount is the number of entries returned.
5061 * If the returned array is not nil, it is nil-terminated and must be
5062 * freed with free().
5063 * Locking: write-locks cacheUpdateLock
5064 **********************************************************************/
5065 objc_imp_cache_entry *
5066 class_copyImpCache(Class cls, int *outCount)
5067 {
5068 objc_imp_cache_entry *buffer = nullptr;
5069
5070 #if CONFIG_USE_CACHE_LOCK
5071 mutex_locker_t lock(cacheUpdateLock);
5072 #else
5073 mutex_locker_t lock(runtimeLock);
5074 #endif
5075
5076 cache_t &cache = cls->cache;
5077 int count = (int)cache.occupied();
5078
5079 if (count) {
5080 buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
5081 class_getImpCache_nolock(cls, cache, buffer, count);
5082 }
5083
5084 if (outCount) *outCount = count;
5085 return buffer;
5086 }
5087
5088
5089 /***********************************************************************
5090 * objc_copyProtocolList
5091 * Returns pointers to all protocols.
5092 * Locking: read-locks runtimeLock
5093 **********************************************************************/
5094 Protocol * __unsafe_unretained *
5095 objc_copyProtocolList(unsigned int *outCount)
5096 {
5097 mutex_locker_t lock(runtimeLock);
5098
5099 NXMapTable *protocol_map = protocols();
5100
5101 // Find all the protocols from the pre-optimized images. These protocols
5102 // won't be in the protocol map.
5103 objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
5104 {
5105 header_info *hi;
5106 for (hi = FirstHeader; hi; hi = hi->getNext()) {
5107 if (!hi->hasPreoptimizedProtocols())
5108 continue;
5109
5110 size_t count, i;
5111 const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
5112 for (i = 0; i < count; i++) {
5113 const protocol_t* protocol = protolist[i];
5114
5115 // Skip protocols we have in the run time map. These likely
5116 // correspond to protocols added dynamically which have the same
5117 // name as a protocol found later in a dlopen'ed shared cache image.
5118 if (NXMapGet(protocol_map, protocol->mangledName) != nil)
5119 continue;
5120
5121 // The protocols in the shared cache protolist point to their
5122 // original on-disk object, not the optimized one. We can use the name
5123 // to find the optimized one.
5124 Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
5125 preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
5126 }
5127 }
5128 }
5129
5130 unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
5131 if (count == 0) {
5132 if (outCount) *outCount = 0;
5133 return nil;
5134 }
5135
5136 Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
5137
5138 unsigned int i = 0;
5139 Protocol *proto;
5140 const char *name;
5141 NXMapState state = NXInitMapState(protocol_map);
5142 while (NXNextMapState(protocol_map, &state,
5143 (const void **)&name, (const void **)&proto))
5144 {
5145 result[i++] = proto;
5146 }
5147
5148 // Add any protocols found in the pre-optimized table
5149 for (auto it : preoptimizedProtocols) {
5150 result[i++] = it.second;
5151 }
5152
5153 result[i++] = nil;
5154 ASSERT(i == count+1);
5155
5156 if (outCount) *outCount = count;
5157 return result;
5158 }
5159
5160
5161 /***********************************************************************
5162 * objc_getProtocol
5163 * Get a protocol by name, or return nil
5164 * Locking: read-locks runtimeLock
5165 **********************************************************************/
5166 Protocol *objc_getProtocol(const char *name)
5167 {
5168 mutex_locker_t lock(runtimeLock);
5169 return getProtocol(name);
5170 }
5171
5172
5173 /***********************************************************************
5174 * class_copyMethodList
5175 * fixme
5176 * Locking: read-locks runtimeLock
5177 **********************************************************************/
5178 Method *
5179 class_copyMethodList(Class cls, unsigned int *outCount)
5180 {
5181 unsigned int count = 0;
5182 Method *result = nil;
5183
5184 if (!cls) {
5185 if (outCount) *outCount = 0;
5186 return nil;
5187 }
5188
5189 mutex_locker_t lock(runtimeLock);
5190 const auto methods = cls->data()->methods();
5191
5192 ASSERT(cls->isRealized());
5193
5194 count = methods.count();
5195
5196 if (count > 0) {
5197 result = (Method *)malloc((count + 1) * sizeof(Method));
5198
5199 count = 0;
5200 for (auto& meth : methods) {
5201 result[count++] = &meth;
5202 }
5203 result[count] = nil;
5204 }
5205
5206 if (outCount) *outCount = count;
5207 return result;
5208 }
5209
5210
5211 /***********************************************************************
5212 * class_copyIvarList
5213 * fixme
5214 * Locking: read-locks runtimeLock
5215 **********************************************************************/
5216 Ivar *
5217 class_copyIvarList(Class cls, unsigned int *outCount)
5218 {
5219 const ivar_list_t *ivars;
5220 Ivar *result = nil;
5221 unsigned int count = 0;
5222
5223 if (!cls) {
5224 if (outCount) *outCount = 0;
5225 return nil;
5226 }
5227
5228 mutex_locker_t lock(runtimeLock);
5229
5230 ASSERT(cls->isRealized());
5231
5232 if ((ivars = cls->data()->ro()->ivars) && ivars->count) {
5233 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
5234
5235 for (auto& ivar : *ivars) {
5236 if (!ivar.offset) continue; // anonymous bitfield
5237 result[count++] = &ivar;
5238 }
5239 result[count] = nil;
5240 }
5241
5242 if (outCount) *outCount = count;
5243 return result;
5244 }
5245
5246
5247 /***********************************************************************
5248 * class_copyPropertyList. Returns a heap block containing the
5249 * properties declared in the class, or nil if the class
5250 * declares no properties. Caller must free the block.
5251 * Does not copy any superclass's properties.
5252 * Locking: read-locks runtimeLock
5253 **********************************************************************/
5254 objc_property_t *
5255 class_copyPropertyList(Class cls, unsigned int *outCount)
5256 {
5257 if (!cls) {
5258 if (outCount) *outCount = 0;
5259 return nil;
5260 }
5261
5262 mutex_locker_t lock(runtimeLock);
5263
5264 checkIsKnownClass(cls);
5265 ASSERT(cls->isRealized());
5266
5267 auto rw = cls->data();
5268
5269 property_t **result = nil;
5270 auto const properties = rw->properties();
5271 unsigned int count = properties.count();
5272 if (count > 0) {
5273 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
5274
5275 count = 0;
5276 for (auto& prop : properties) {
5277 result[count++] = &prop;
5278 }
5279 result[count] = nil;
5280 }
5281
5282 if (outCount) *outCount = count;
5283 return (objc_property_t *)result;
5284 }
5285
5286
5287 /***********************************************************************
5288 * objc_class::getLoadMethod
5289 * fixme
5290 * Called only from add_class_to_loadable_list.
5291 * Locking: runtimeLock must be read- or write-locked by the caller.
5292 **********************************************************************/
5293 IMP
5294 objc_class::getLoadMethod()
5295 {
5296 runtimeLock.assertLocked();
5297
5298 const method_list_t *mlist;
5299
5300 ASSERT(isRealized());
5301 ASSERT(ISA()->isRealized());
5302 ASSERT(!isMetaClass());
5303 ASSERT(ISA()->isMetaClass());
5304
5305 mlist = ISA()->data()->ro()->baseMethods();
5306 if (mlist) {
5307 for (const auto& meth : *mlist) {
5308 const char *name = sel_cname(meth.name());
5309 if (0 == strcmp(name, "load")) {
5310 return meth.imp(false);
5311 }
5312 }
5313 }
5314
5315 return nil;
5316 }
5317
5318
5319 /***********************************************************************
5320 * _category_getName
5321 * Returns a category's name.
5322 * Locking: none
5323 **********************************************************************/
5324 const char *
5325 _category_getName(Category cat)
5326 {
5327 return cat->name;
5328 }
5329
5330
5331 /***********************************************************************
5332 * _category_getClassName
5333 * Returns a category's class's name
5334 * Called only from add_category_to_loadable_list and
5335 * remove_category_from_loadable_list for logging purposes.
5336 * Locking: runtimeLock must be read- or write-locked by the caller
5337 **********************************************************************/
5338 const char *
5339 _category_getClassName(Category cat)
5340 {
5341 runtimeLock.assertLocked();
5342 return remapClass(cat->cls)->nameForLogging();
5343 }
5344
5345
5346 /***********************************************************************
5347 * _category_getClass
5348 * Returns a category's class
5349 * Called only by call_category_loads.
5350 * Locking: read-locks runtimeLock
5351 **********************************************************************/
5352 Class
5353 _category_getClass(Category cat)
5354 {
5355 mutex_locker_t lock(runtimeLock);
5356 Class result = remapClass(cat->cls);
5357 ASSERT(result->isRealized()); // ok for call_category_loads' usage
5358 return result;
5359 }
5360
5361
5362 /***********************************************************************
5363 * _category_getLoadMethod
5364 * fixme
5365 * Called only from add_category_to_loadable_list
5366 * Locking: runtimeLock must be read- or write-locked by the caller
5367 **********************************************************************/
5368 IMP
5369 _category_getLoadMethod(Category cat)
5370 {
5371 runtimeLock.assertLocked();
5372
5373 const method_list_t *mlist;
5374
5375 mlist = cat->classMethods;
5376 if (mlist) {
5377 for (const auto& meth : *mlist) {
5378 const char *name = sel_cname(meth.name());
5379 if (0 == strcmp(name, "load")) {
5380 return meth.imp(false);
5381 }
5382 }
5383 }
5384
5385 return nil;
5386 }
5387
5388
5389 /***********************************************************************
5390 * category_t::propertiesForMeta
5391 * Return a category's instance or class properties.
5392 * hi is the image containing the category.
5393 **********************************************************************/
5394 property_list_t *
5395 category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
5396 {
5397 if (!isMeta) return instanceProperties;
5398 else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
5399 else return nil;
5400 }
5401
5402
5403 /***********************************************************************
5404 * class_copyProtocolList
5405 * fixme
5406 * Locking: read-locks runtimeLock
5407 **********************************************************************/
5408 Protocol * __unsafe_unretained *
5409 class_copyProtocolList(Class cls, unsigned int *outCount)
5410 {
5411 unsigned int count = 0;
5412 Protocol **result = nil;
5413
5414 if (!cls) {
5415 if (outCount) *outCount = 0;
5416 return nil;
5417 }
5418
5419 mutex_locker_t lock(runtimeLock);
5420 const auto protocols = cls->data()->protocols();
5421
5422 checkIsKnownClass(cls);
5423
5424 ASSERT(cls->isRealized());
5425
5426 count = protocols.count();
5427
5428 if (count > 0) {
5429 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
5430
5431 count = 0;
5432 for (const auto& proto : protocols) {
5433 result[count++] = (Protocol *)remapProtocol(proto);
5434 }
5435 result[count] = nil;
5436 }
5437
5438 if (outCount) *outCount = count;
5439 return result;
5440 }
5441
5442
5443 /***********************************************************************
5444 * objc_copyImageNames
5445 * Copies names of loaded images with ObjC contents.
5446 *
5447 * Locking: acquires runtimeLock
5448 **********************************************************************/
5449 const char **objc_copyImageNames(unsigned int *outCount)
5450 {
5451 mutex_locker_t lock(runtimeLock);
5452
5453 int HeaderCount = 0;
5454 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5455 HeaderCount++;
5456 }
5457
5458 #if TARGET_OS_WIN32
5459 const TCHAR **names = (const TCHAR **)
5460 malloc((HeaderCount+1) * sizeof(TCHAR *));
5461 #else
5462 const char **names = (const char **)
5463 malloc((HeaderCount+1) * sizeof(char *));
5464 #endif
5465
5466 unsigned int count = 0;
5467 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5468 #if TARGET_OS_WIN32
5469 if (hi->moduleName) {
5470 names[count++] = hi->moduleName;
5471 }
5472 #else
5473 const char *fname = hi->fname();
5474 if (fname) {
5475 names[count++] = fname;
5476 }
5477 #endif
5478 }
5479 names[count] = nil;
5480
5481 if (count == 0) {
5482 // Return nil instead of empty list if there are no images
5483 free((void *)names);
5484 names = nil;
5485 }
5486
5487 if (outCount) *outCount = count;
5488 return names;
5489 }
5490
5491
5492 /***********************************************************************
5493 * copyClassNamesForImage_nolock
5494 * Copies class names from the given image.
5495 * Missing weak-import classes are omitted.
5496 * Swift class names are demangled.
5497 *
5498 * Locking: runtimeLock must be held by the caller
5499 **********************************************************************/
5500 const char **
5501 copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
5502 {
5503 runtimeLock.assertLocked();
5504 ASSERT(hi);
5505
5506 size_t count;
5507 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5508 const char **names = (const char **)
5509 malloc((count+1) * sizeof(const char *));
5510
5511 size_t shift = 0;
5512 for (size_t i = 0; i < count; i++) {
5513 Class cls = remapClass(classlist[i]);
5514 if (cls) {
5515 names[i-shift] = cls->demangledName(/* needs lock */false);
5516 } else {
5517 shift++; // ignored weak-linked class
5518 }
5519 }
5520 count -= shift;
5521 names[count] = nil;
5522
5523 if (outCount) *outCount = (unsigned int)count;
5524 return names;
5525 }
5526
5527
5528
5529 /***********************************************************************
5530 * objc_copyClassNamesForImage
5531 * Copies class names from the named image.
5532 * The image name must be identical to dladdr's dli_fname value.
5533 * Missing weak-import classes are omitted.
5534 * Swift class names are demangled.
5535 *
5536 * Locking: acquires runtimeLock
5537 **********************************************************************/
5538 const char **
5539 objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
5540 {
5541 if (!image) {
5542 if (outCount) *outCount = 0;
5543 return nil;
5544 }
5545
5546 mutex_locker_t lock(runtimeLock);
5547
5548 // Find the image.
5549 header_info *hi;
5550 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5551 #if TARGET_OS_WIN32
5552 if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
5553 #else
5554 if (0 == strcmp(image, hi->fname())) break;
5555 #endif
5556 }
5557
5558 if (!hi) {
5559 if (outCount) *outCount = 0;
5560 return nil;
5561 }
5562
5563 return copyClassNamesForImage_nolock(hi, outCount);
5564 }
5565
5566
5567 /***********************************************************************
5568 * objc_copyClassNamesForImageHeader
5569 * Copies class names from the given image.
5570 * Missing weak-import classes are omitted.
5571 * Swift class names are demangled.
5572 *
5573 * Locking: acquires runtimeLock
5574 **********************************************************************/
5575 const char **
5576 objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
5577 {
5578 if (!mh) {
5579 if (outCount) *outCount = 0;
5580 return nil;
5581 }
5582
5583 mutex_locker_t lock(runtimeLock);
5584
5585 // Find the image.
5586 header_info *hi;
5587 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5588 if (hi->mhdr() == (const headerType *)mh) break;
5589 }
5590
5591 if (!hi) {
5592 if (outCount) *outCount = 0;
5593 return nil;
5594 }
5595
5596 return copyClassNamesForImage_nolock(hi, outCount);
5597 }
5598
5599
5600 /***********************************************************************
5601 * saveTemporaryString
5602 * Save a string in a thread-local FIFO buffer.
5603 * This is suitable for temporary strings generated for logging purposes.
5604 **********************************************************************/
5605 static void
5606 saveTemporaryString(char *str)
5607 {
5608 // Fixed-size FIFO. We free the first string, shift
5609 // the rest, and add the new string to the end.
5610 _objc_pthread_data *data = _objc_fetch_pthread_data(true);
5611 if (data->printableNames[0]) {
5612 free(data->printableNames[0]);
5613 }
5614 int last = countof(data->printableNames) - 1;
5615 for (int i = 0; i < last; i++) {
5616 data->printableNames[i] = data->printableNames[i+1];
5617 }
5618 data->printableNames[last] = str;
5619 }
5620
5621
5622 /***********************************************************************
5623 * objc_class::nameForLogging
5624 * Returns the class's name, suitable for display.
5625 * The returned memory is TEMPORARY. Print it or copy it immediately.
5626 * Locking: none
5627 **********************************************************************/
5628 const char *
5629 objc_class::nameForLogging()
5630 {
5631 // Handle the easy case directly.
5632 if (isRealized() || isFuture()) {
5633 if (!isAnySwift()) {
5634 return data()->ro()->name;
5635 }
5636 auto rwe = data()->ext();
5637 if (rwe && rwe->demangledName) {
5638 return rwe->demangledName;
5639 }
5640 }
5641
5642 char *result;
5643
5644 const char *name = mangledName();
5645 char *de = copySwiftV1DemangledName(name);
5646 if (de) result = de;
5647 else result = strdup(name);
5648
5649 saveTemporaryString(result);
5650 return result;
5651 }
5652
5653
5654 /***********************************************************************
5655 * objc_class::demangledName
5656 * If realize=false, the class must already be realized or future.
5657 * Locking: runtimeLock may or may not be held by the caller.
5658 **********************************************************************/
5659 mutex_t DemangleCacheLock;
5660 static objc::DenseSet<const char *> *DemangleCache;
5661 const char *
5662 objc_class::demangledName(bool needsLock)
5663 {
5664 if (!needsLock) {
5665 runtimeLock.assertLocked();
5666 }
5667
5668 // Return previously demangled name if available.
5669 if (isRealized() || isFuture()) {
5670 // Swift metaclasses don't have the is-Swift bit.
5671 // We can't take this shortcut for them.
5672 if (!isMetaClass() && !isAnySwift()) {
5673 return data()->ro()->name;
5674 }
5675 auto rwe = data()->ext();
5676 if (rwe && rwe->demangledName) {
5677 return rwe->demangledName;
5678 }
5679 }
5680
5681 // Try demangling the mangled name.
5682 const char *mangled = mangledName();
5683 char *de = copySwiftV1DemangledName(mangled);
5684 class_rw_ext_t *rwe;
5685
5686 if (isRealized() || isFuture()) {
5687 if (needsLock) {
5688 mutex_locker_t lock(runtimeLock);
5689 rwe = data()->extAllocIfNeeded();
5690 } else {
5691 rwe = data()->extAllocIfNeeded();
5692 }
5693 // Class is already realized or future.
5694 // Save demangling result in rw data.
5695 // We may not own runtimeLock so use an atomic operation instead.
5696 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
5697 (void**)&rwe->demangledName))
5698 {
5699 if (de) free(de);
5700 }
5701 return rwe->demangledName;
5702 }
5703
5704 // Class is not yet realized.
5705 if (!de) {
5706 // Name is not mangled. Return it without caching.
5707 return mangled;
5708 }
5709
5710 // Class is not yet realized and name is mangled.
5711 // Allocate the name but don't save it in the class.
5712 // Save the name in a side cache instead to prevent leaks.
5713 // When the class is actually realized we may allocate a second
5714 // copy of the name, but we don't care.
5715 // (Previously we would try to realize the class now and save the
5716 // name there, but realization is more complicated for Swift classes.)
5717
5718 // Only objc_copyClassNamesForImage() should get here.
5719 // fixme lldb's calls to class_getName() can also get here when
5720 // interrogating the dyld shared cache. (rdar://27258517)
5721 // fixme ASSERT(realize);
5722
5723 const char *cached;
5724 {
5725 mutex_locker_t lock(DemangleCacheLock);
5726 if (!DemangleCache) {
5727 DemangleCache = new objc::DenseSet<const char *>{};
5728 }
5729 cached = *DemangleCache->insert(de).first;
5730 }
5731 if (cached != de) free(de);
5732 return cached;
5733 }
5734
5735
5736 /***********************************************************************
5737 * class_getName
5738 * fixme
5739 * Locking: may acquire DemangleCacheLock
5740 **********************************************************************/
5741 const char *class_getName(Class cls)
5742 {
5743 if (!cls) return "nil";
5744 // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
5745 // ASSERT(cls->isRealized() || cls->isFuture());
5746 return cls->demangledName(/* needs lock */true);
5747 }
5748
5749 /***********************************************************************
5750 * objc_debug_class_getNameRaw
5751 * fixme
5752 * Locking: none
5753 **********************************************************************/
5754 const char *objc_debug_class_getNameRaw(Class cls)
5755 {
5756 if (!cls) return "nil";
5757 return cls->mangledName();
5758 }
5759
5760
5761 /***********************************************************************
5762 * class_getVersion
5763 * fixme
5764 * Locking: none
5765 **********************************************************************/
5766 int
5767 class_getVersion(Class cls)
5768 {
5769 if (!cls) return 0;
5770 ASSERT(cls->isRealized());
5771 auto rwe = cls->data()->ext();
5772 if (rwe) {
5773 return rwe->version;
5774 }
5775 return cls->isMetaClass() ? 7 : 0;
5776 }
5777
5778
5779 /***********************************************************************
5780 * class_setVersion
5781 * fixme
5782 * Locking: none
5783 **********************************************************************/
5784 void
5785 class_setVersion(Class cls, int version)
5786 {
5787 if (!cls) return;
5788 ASSERT(cls->isRealized());
5789 auto rwe = cls->data()->ext();
5790 if (!rwe) {
5791 mutex_locker_t lock(runtimeLock);
5792 rwe = cls->data()->extAllocIfNeeded();
5793 }
5794
5795 rwe->version = version;
5796 }
5797
5798 /***********************************************************************
5799 * search_method_list_inline
5800 **********************************************************************/
5801 ALWAYS_INLINE static method_t *
5802 findMethodInSortedMethodList(SEL key, const method_list_t *list)
5803 {
5804 ASSERT(list);
5805
5806 auto first = list->begin();
5807 auto base = first;
5808 decltype(first) probe;
5809
5810 uintptr_t keyValue = (uintptr_t)key;
5811 uint32_t count;
5812
5813 for (count = list->count; count != 0; count >>= 1) {
5814 probe = base + (count >> 1);
5815
5816 uintptr_t probeValue = (uintptr_t)probe->name();
5817
5818 if (keyValue == probeValue) {
5819 // `probe` is a match.
5820 // Rewind looking for the *first* occurrence of this value.
5821 // This is required for correct category overrides.
5822 while (probe > first && keyValue == (uintptr_t)(probe - 1)->name()) {
5823 probe--;
5824 }
5825 return &*probe;
5826 }
5827
5828 if (keyValue > probeValue) {
5829 base = probe + 1;
5830 count--;
5831 }
5832 }
5833
5834 return nil;
5835 }
5836
5837 ALWAYS_INLINE static method_t *
5838 search_method_list_inline(const method_list_t *mlist, SEL sel)
5839 {
5840 int methodListIsFixedUp = mlist->isFixedUp();
5841 int methodListHasExpectedSize = mlist->isExpectedSize();
5842
5843 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5844 return findMethodInSortedMethodList(sel, mlist);
5845 } else {
5846 // Linear search of unsorted method list
5847 for (auto& meth : *mlist) {
5848 if (meth.name() == sel) return &meth;
5849 }
5850 }
5851
5852 #if DEBUG
5853 // sanity-check negative results
5854 if (mlist->isFixedUp()) {
5855 for (auto& meth : *mlist) {
5856 if (meth.name() == sel) {
5857 _objc_fatal("linear search worked when binary search did not");
5858 }
5859 }
5860 }
5861 #endif
5862
5863 return nil;
5864 }
5865
5866 NEVER_INLINE static method_t *
5867 search_method_list(const method_list_t *mlist, SEL sel)
5868 {
5869 return search_method_list_inline(mlist, sel);
5870 }
5871
5872 /***********************************************************************
5873 * method_lists_contains_any
5874 **********************************************************************/
5875 template<typename T>
5876 static NEVER_INLINE bool
5877 method_lists_contains_any(T *mlists, T *end,
5878 SEL sels[], size_t selcount)
5879 {
5880 while (mlists < end) {
5881 const method_list_t *mlist = *mlists++;
5882 int methodListIsFixedUp = mlist->isFixedUp();
5883 int methodListHasExpectedSize = mlist->entsize() == sizeof(struct method_t::big);
5884
5885 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5886 for (size_t i = 0; i < selcount; i++) {
5887 if (findMethodInSortedMethodList(sels[i], mlist)) {
5888 return true;
5889 }
5890 }
5891 } else {
5892 for (auto& meth : *mlist) {
5893 for (size_t i = 0; i < selcount; i++) {
5894 if (meth.name() == sels[i]) {
5895 return true;
5896 }
5897 }
5898 }
5899 }
5900 }
5901 return false;
5902 }
5903
5904 /***********************************************************************
5905 * getMethodNoSuper_nolock
5906 * fixme
5907 * Locking: runtimeLock must be read- or write-locked by the caller
5908 **********************************************************************/
5909 static method_t *
5910 getMethodNoSuper_nolock(Class cls, SEL sel)
5911 {
5912 runtimeLock.assertLocked();
5913
5914 ASSERT(cls->isRealized());
5915 // fixme nil cls?
5916 // fixme nil sel?
5917
5918 auto const methods = cls->data()->methods();
5919 for (auto mlists = methods.beginLists(),
5920 end = methods.endLists();
5921 mlists != end;
5922 ++mlists)
5923 {
5924 // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
5925 // caller of search_method_list, inlining it turns
5926 // getMethodNoSuper_nolock into a frame-less function and eliminates
5927 // any store from this codepath.
5928 method_t *m = search_method_list_inline(*mlists, sel);
5929 if (m) return m;
5930 }
5931
5932 return nil;
5933 }
5934
5935
5936 /***********************************************************************
5937 * getMethod_nolock
5938 * fixme
5939 * Locking: runtimeLock must be read- or write-locked by the caller
5940 **********************************************************************/
5941 static method_t *
5942 getMethod_nolock(Class cls, SEL sel)
5943 {
5944 method_t *m = nil;
5945
5946 runtimeLock.assertLocked();
5947
5948 // fixme nil cls?
5949 // fixme nil sel?
5950
5951 ASSERT(cls->isRealized());
5952
5953 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
5954 cls = cls->superclass;
5955 }
5956
5957 return m;
5958 }
5959
5960
5961 /***********************************************************************
5962 * _class_getMethod
5963 * fixme
5964 * Locking: read-locks runtimeLock
5965 **********************************************************************/
5966 static Method _class_getMethod(Class cls, SEL sel)
5967 {
5968 mutex_locker_t lock(runtimeLock);
5969 return getMethod_nolock(cls, sel);
5970 }
5971
5972
5973 /***********************************************************************
5974 * class_getInstanceMethod. Return the instance method for the
5975 * specified class and selector.
5976 **********************************************************************/
5977 Method class_getInstanceMethod(Class cls, SEL sel)
5978 {
5979 if (!cls || !sel) return nil;
5980
5981 // This deliberately avoids +initialize because it historically did so.
5982
5983 // This implementation is a bit weird because it's the only place that
5984 // wants a Method instead of an IMP.
5985
5986 #warning fixme build and search caches
5987
5988 // Search method lists, try method resolver, etc.
5989 lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
5990
5991 #warning fixme build and search caches
5992
5993 return _class_getMethod(cls, sel);
5994 }
5995
5996
5997 /***********************************************************************
5998 * resolveClassMethod
5999 * Call +resolveClassMethod, looking for a method to be added to class cls.
6000 * cls should be a metaclass.
6001 * Does not check if the method already exists.
6002 **********************************************************************/
6003 static void resolveClassMethod(id inst, SEL sel, Class cls)
6004 {
6005 runtimeLock.assertUnlocked();
6006 ASSERT(cls->isRealized());
6007 ASSERT(cls->isMetaClass());
6008
6009 if (!lookUpImpOrNil(inst, @selector(resolveClassMethod:), cls)) {
6010 // Resolver not implemented.
6011 return;
6012 }
6013
6014 Class nonmeta;
6015 {
6016 mutex_locker_t lock(runtimeLock);
6017 nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
6018 // +initialize path should have realized nonmeta already
6019 if (!nonmeta->isRealized()) {
6020 _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
6021 nonmeta->nameForLogging(), nonmeta);
6022 }
6023 }
6024 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6025 bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
6026
6027 // Cache the result (good or bad) so the resolver doesn't fire next time.
6028 // +resolveClassMethod adds to self->ISA() a.k.a. cls
6029 IMP imp = lookUpImpOrNil(inst, sel, cls);
6030
6031 if (resolved && PrintResolving) {
6032 if (imp) {
6033 _objc_inform("RESOLVE: method %c[%s %s] "
6034 "dynamically resolved to %p",
6035 cls->isMetaClass() ? '+' : '-',
6036 cls->nameForLogging(), sel_getName(sel), imp);
6037 }
6038 else {
6039 // Method resolver didn't add anything?
6040 _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
6041 ", but no new implementation of %c[%s %s] was found",
6042 cls->nameForLogging(), sel_getName(sel),
6043 cls->isMetaClass() ? '+' : '-',
6044 cls->nameForLogging(), sel_getName(sel));
6045 }
6046 }
6047 }
6048
6049
6050 /***********************************************************************
6051 * resolveInstanceMethod
6052 * Call +resolveInstanceMethod, looking for a method to be added to class cls.
6053 * cls may be a metaclass or a non-meta class.
6054 * Does not check if the method already exists.
6055 **********************************************************************/
6056 static void resolveInstanceMethod(id inst, SEL sel, Class cls)
6057 {
6058 runtimeLock.assertUnlocked();
6059 ASSERT(cls->isRealized());
6060 SEL resolve_sel = @selector(resolveInstanceMethod:);
6061
6062 if (!lookUpImpOrNil(cls, resolve_sel, cls->ISA())) {
6063 // Resolver not implemented.
6064 return;
6065 }
6066
6067 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6068 bool resolved = msg(cls, resolve_sel, sel);
6069
6070 // Cache the result (good or bad) so the resolver doesn't fire next time.
6071 // +resolveInstanceMethod adds to self a.k.a. cls
6072 IMP imp = lookUpImpOrNil(inst, sel, cls);
6073
6074 if (resolved && PrintResolving) {
6075 if (imp) {
6076 _objc_inform("RESOLVE: method %c[%s %s] "
6077 "dynamically resolved to %p",
6078 cls->isMetaClass() ? '+' : '-',
6079 cls->nameForLogging(), sel_getName(sel), imp);
6080 }
6081 else {
6082 // Method resolver didn't add anything?
6083 _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
6084 ", but no new implementation of %c[%s %s] was found",
6085 cls->nameForLogging(), sel_getName(sel),
6086 cls->isMetaClass() ? '+' : '-',
6087 cls->nameForLogging(), sel_getName(sel));
6088 }
6089 }
6090 }
6091
6092
6093 /***********************************************************************
6094 * resolveMethod_locked
6095 * Call +resolveClassMethod or +resolveInstanceMethod.
6096 *
6097 * Called with the runtimeLock held to avoid pressure in the caller
6098 * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
6099 **********************************************************************/
6100 static NEVER_INLINE IMP
6101 resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
6102 {
6103 runtimeLock.assertLocked();
6104 ASSERT(cls->isRealized());
6105
6106 runtimeLock.unlock();
6107
6108 if (! cls->isMetaClass()) {
6109 // try [cls resolveInstanceMethod:sel]
6110 resolveInstanceMethod(inst, sel, cls);
6111 }
6112 else {
6113 // try [nonMetaClass resolveClassMethod:sel]
6114 // and [cls resolveInstanceMethod:sel]
6115 resolveClassMethod(inst, sel, cls);
6116 if (!lookUpImpOrNil(inst, sel, cls)) {
6117 resolveInstanceMethod(inst, sel, cls);
6118 }
6119 }
6120
6121 // chances are that calling the resolver have populated the cache
6122 // so attempt using it
6123 return lookUpImpOrForward(inst, sel, cls, behavior | LOOKUP_CACHE);
6124 }
6125
6126
6127 /***********************************************************************
6128 * log_and_fill_cache
6129 * Log this method call. If the logger permits it, fill the method cache.
6130 * cls is the method whose cache should be filled.
6131 * implementer is the class that owns the implementation in question.
6132 **********************************************************************/
6133 static void
6134 log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
6135 {
6136 #if SUPPORT_MESSAGE_LOGGING
6137 if (slowpath(objcMsgLogEnabled && implementer)) {
6138 bool cacheIt = logMessageSend(implementer->isMetaClass(),
6139 cls->nameForLogging(),
6140 implementer->nameForLogging(),
6141 sel);
6142 if (!cacheIt) return;
6143 }
6144 #endif
6145 cache_fill(cls, sel, imp, receiver);
6146 }
6147
6148
6149 /***********************************************************************
6150 * lookUpImpOrForward.
6151 * The standard IMP lookup.
6152 * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
6153 * Without LOOKUP_CACHE: skips optimistic unlocked lookup (but uses cache elsewhere)
6154 * Most callers should use LOOKUP_INITIALIZE and LOOKUP_CACHE
6155 * inst is an instance of cls or a subclass thereof, or nil if none is known.
6156 * If cls is an un-initialized metaclass then a non-nil inst is faster.
6157 * May return _objc_msgForward_impcache. IMPs destined for external use
6158 * must be converted to _objc_msgForward or _objc_msgForward_stret.
6159 * If you don't want forwarding at all, use LOOKUP_NIL.
6160 **********************************************************************/
6161 IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
6162 {
6163 const IMP forward_imp = (IMP)_objc_msgForward_impcache;
6164 IMP imp = nil;
6165 Class curClass;
6166
6167 runtimeLock.assertUnlocked();
6168
6169 // Optimistic cache lookup
6170 if (fastpath(behavior & LOOKUP_CACHE)) {
6171 imp = cache_getImp(cls, sel);
6172 if (imp) goto done_nolock;
6173 }
6174
6175 // runtimeLock is held during isRealized and isInitialized checking
6176 // to prevent races against concurrent realization.
6177
6178 // runtimeLock is held during method search to make
6179 // method-lookup + cache-fill atomic with respect to method addition.
6180 // Otherwise, a category could be added but ignored indefinitely because
6181 // the cache was re-filled with the old value after the cache flush on
6182 // behalf of the category.
6183
6184 runtimeLock.lock();
6185
6186 // We don't want people to be able to craft a binary blob that looks like
6187 // a class but really isn't one and do a CFI attack.
6188 //
6189 // To make these harder we want to make sure this is a class that was
6190 // either built into the binary or legitimately registered through
6191 // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
6192 checkIsKnownClass(cls);
6193
6194 if (slowpath(!cls->isRealized())) {
6195 cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
6196 // runtimeLock may have been dropped but is now locked again
6197 }
6198
6199 if (slowpath((behavior & LOOKUP_INITIALIZE) && !cls->isInitialized())) {
6200 cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
6201 // runtimeLock may have been dropped but is now locked again
6202
6203 // If sel == initialize, class_initialize will send +initialize and
6204 // then the messenger will send +initialize again after this
6205 // procedure finishes. Of course, if this is not being called
6206 // from the messenger then it won't happen. 2778172
6207 }
6208
6209 runtimeLock.assertLocked();
6210 curClass = cls;
6211
6212 // The code used to lookpu the class's cache again right after
6213 // we take the lock but for the vast majority of the cases
6214 // evidence shows this is a miss most of the time, hence a time loss.
6215 //
6216 // The only codepath calling into this without having performed some
6217 // kind of cache lookup is class_getInstanceMethod().
6218
6219 for (unsigned attempts = unreasonableClassCount();;) {
6220 // curClass method list.
6221 Method meth = getMethodNoSuper_nolock(curClass, sel);
6222 if (meth) {
6223 imp = meth->imp(false);
6224 goto done;
6225 }
6226
6227 if (slowpath((curClass = curClass->superclass) == nil)) {
6228 // No implementation found, and method resolver didn't help.
6229 // Use forwarding.
6230 imp = forward_imp;
6231 break;
6232 }
6233
6234 // Halt if there is a cycle in the superclass chain.
6235 if (slowpath(--attempts == 0)) {
6236 _objc_fatal("Memory corruption in class list.");
6237 }
6238
6239 // Superclass cache.
6240 imp = cache_getImp(curClass, sel);
6241 if (slowpath(imp == forward_imp)) {
6242 // Found a forward:: entry in a superclass.
6243 // Stop searching, but don't cache yet; call method
6244 // resolver for this class first.
6245 break;
6246 }
6247 if (fastpath(imp)) {
6248 // Found the method in a superclass. Cache it in this class.
6249 goto done;
6250 }
6251 }
6252
6253 // No implementation found. Try method resolver once.
6254
6255 if (slowpath(behavior & LOOKUP_RESOLVER)) {
6256 behavior ^= LOOKUP_RESOLVER;
6257 return resolveMethod_locked(inst, sel, cls, behavior);
6258 }
6259
6260 done:
6261 log_and_fill_cache(cls, imp, sel, inst, curClass);
6262 runtimeLock.unlock();
6263 done_nolock:
6264 if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
6265 return nil;
6266 }
6267 return imp;
6268 }
6269
6270 /***********************************************************************
6271 * lookupMethodInClassAndLoadCache.
6272 * Like lookUpImpOrForward, but does not search superclasses.
6273 * Caches and returns objc_msgForward if the method is not found in the class.
6274 **********************************************************************/
6275 IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
6276 {
6277 Method meth;
6278 IMP imp;
6279
6280 // fixme this is incomplete - no resolver, +initialize -
6281 // but it's only used for .cxx_construct/destruct so we don't care
6282 ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
6283
6284 // Search cache first.
6285 imp = cache_getImp(cls, sel);
6286 if (imp) return imp;
6287
6288 // Cache miss. Search method list.
6289
6290 mutex_locker_t lock(runtimeLock);
6291
6292 meth = getMethodNoSuper_nolock(cls, sel);
6293
6294 if (meth) {
6295 // Hit in method list. Cache it.
6296 cache_fill(cls, sel, meth->imp(false), nil);
6297 return meth->imp(false);
6298 } else {
6299 // Miss in method list. Cache objc_msgForward.
6300 cache_fill(cls, sel, _objc_msgForward_impcache, nil);
6301 return _objc_msgForward_impcache;
6302 }
6303 }
6304
6305
6306 /***********************************************************************
6307 * class_getProperty
6308 * fixme
6309 * Locking: read-locks runtimeLock
6310 **********************************************************************/
6311 objc_property_t class_getProperty(Class cls, const char *name)
6312 {
6313 if (!cls || !name) return nil;
6314
6315 mutex_locker_t lock(runtimeLock);
6316
6317 checkIsKnownClass(cls);
6318
6319 ASSERT(cls->isRealized());
6320
6321 for ( ; cls; cls = cls->superclass) {
6322 for (auto& prop : cls->data()->properties()) {
6323 if (0 == strcmp(name, prop.name)) {
6324 return (objc_property_t)&prop;
6325 }
6326 }
6327 }
6328
6329 return nil;
6330 }
6331
6332
6333 /***********************************************************************
6334 * Locking: fixme
6335 **********************************************************************/
6336
6337 Class gdb_class_getClass(Class cls)
6338 {
6339 const char *className = cls->mangledName();
6340 if(!className || !strlen(className)) return Nil;
6341 Class rCls = look_up_class(className, NO, NO);
6342 return rCls;
6343 }
6344
6345 Class gdb_object_getClass(id obj)
6346 {
6347 if (!obj) return nil;
6348 return gdb_class_getClass(obj->getIsa());
6349 }
6350
6351
6352 /***********************************************************************
6353 * Locking: write-locks runtimeLock
6354 **********************************************************************/
6355 void
6356 objc_class::setInitialized()
6357 {
6358 Class metacls;
6359 Class cls;
6360
6361 ASSERT(!isMetaClass());
6362
6363 cls = (Class)this;
6364 metacls = cls->ISA();
6365
6366 mutex_locker_t lock(runtimeLock);
6367
6368 // Special cases:
6369 // - NSObject AWZ class methods are default.
6370 // - NSObject RR class and instance methods are default.
6371 // - NSObject Core class and instance methods are default.
6372 // adjustCustomFlagsForMethodChange() also knows these special cases.
6373 // attachMethodLists() also knows these special cases.
6374
6375 objc::AWZScanner::scanInitializedClass(cls, metacls);
6376 objc::RRScanner::scanInitializedClass(cls, metacls);
6377 objc::CoreScanner::scanInitializedClass(cls, metacls);
6378
6379 // Update the +initialize flags.
6380 // Do this last.
6381 metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
6382 }
6383
6384
6385 void
6386 objc_class::printInstancesRequireRawIsa(bool inherited)
6387 {
6388 ASSERT(PrintRawIsa);
6389 ASSERT(instancesRequireRawIsa());
6390 _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
6391 isMetaClass() ? " (meta)" : "",
6392 inherited ? " (inherited)" : "");
6393 }
6394
6395 /***********************************************************************
6396 * Mark this class and all of its subclasses as requiring raw isa pointers
6397 **********************************************************************/
6398 void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
6399 {
6400 Class cls = (Class)this;
6401 runtimeLock.assertLocked();
6402
6403 if (instancesRequireRawIsa()) return;
6404
6405 foreach_realized_class_and_subclass(cls, [=](Class c){
6406 if (c->instancesRequireRawIsa()) {
6407 return false;
6408 }
6409
6410 c->setInstancesRequireRawIsa();
6411
6412 if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
6413 return true;
6414 });
6415 }
6416
6417
6418 /***********************************************************************
6419 * Choose a class index.
6420 * Set instancesRequireRawIsa if no more class indexes are available.
6421 **********************************************************************/
6422 void objc_class::chooseClassArrayIndex()
6423 {
6424 #if SUPPORT_INDEXED_ISA
6425 Class cls = (Class)this;
6426 runtimeLock.assertLocked();
6427
6428 if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
6429 // No more indexes available.
6430 ASSERT(cls->classArrayIndex() == 0);
6431 cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
6432 return;
6433 }
6434
6435 unsigned index = objc_indexed_classes_count++;
6436 if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
6437 classForIndex(index) = cls;
6438 cls->setClassArrayIndex(index);
6439 #endif
6440 }
6441
6442
6443 /***********************************************************************
6444 * Update custom RR and AWZ when a method changes its IMP
6445 **********************************************************************/
6446 static void
6447 adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
6448 {
6449 objc::AWZScanner::scanChangedMethod(cls, meth);
6450 objc::RRScanner::scanChangedMethod(cls, meth);
6451 objc::CoreScanner::scanChangedMethod(cls, meth);
6452 }
6453
6454
6455 /***********************************************************************
6456 * class_getIvarLayout
6457 * Called by the garbage collector.
6458 * The class must be nil or already realized.
6459 * Locking: none
6460 **********************************************************************/
6461 const uint8_t *
6462 class_getIvarLayout(Class cls)
6463 {
6464 if (cls) return cls->data()->ro()->ivarLayout;
6465 else return nil;
6466 }
6467
6468
6469 /***********************************************************************
6470 * class_getWeakIvarLayout
6471 * Called by the garbage collector.
6472 * The class must be nil or already realized.
6473 * Locking: none
6474 **********************************************************************/
6475 const uint8_t *
6476 class_getWeakIvarLayout(Class cls)
6477 {
6478 if (cls) return cls->data()->ro()->weakIvarLayout;
6479 else return nil;
6480 }
6481
6482
6483 /***********************************************************************
6484 * class_setIvarLayout
6485 * Changes the class's ivar layout.
6486 * nil layout means no unscanned ivars
6487 * The class must be under construction.
6488 * fixme: sanity-check layout vs instance size?
6489 * fixme: sanity-check layout vs superclass?
6490 * Locking: acquires runtimeLock
6491 **********************************************************************/
6492 void
6493 class_setIvarLayout(Class cls, const uint8_t *layout)
6494 {
6495 if (!cls) return;
6496
6497 mutex_locker_t lock(runtimeLock);
6498
6499 checkIsKnownClass(cls);
6500
6501 // Can only change layout of in-construction classes.
6502 // note: if modifications to post-construction classes were
6503 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6504 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6505 _objc_inform("*** Can't set ivar layout for already-registered "
6506 "class '%s'", cls->nameForLogging());
6507 return;
6508 }
6509
6510 class_ro_t *ro_w = make_ro_writeable(cls->data());
6511
6512 try_free(ro_w->ivarLayout);
6513 ro_w->ivarLayout = ustrdupMaybeNil(layout);
6514 }
6515
6516
6517 /***********************************************************************
6518 * class_setWeakIvarLayout
6519 * Changes the class's weak ivar layout.
6520 * nil layout means no weak ivars
6521 * The class must be under construction.
6522 * fixme: sanity-check layout vs instance size?
6523 * fixme: sanity-check layout vs superclass?
6524 * Locking: acquires runtimeLock
6525 **********************************************************************/
6526 void
6527 class_setWeakIvarLayout(Class cls, const uint8_t *layout)
6528 {
6529 if (!cls) return;
6530
6531 mutex_locker_t lock(runtimeLock);
6532
6533 checkIsKnownClass(cls);
6534
6535 // Can only change layout of in-construction classes.
6536 // note: if modifications to post-construction classes were
6537 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6538 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6539 _objc_inform("*** Can't set weak ivar layout for already-registered "
6540 "class '%s'", cls->nameForLogging());
6541 return;
6542 }
6543
6544 class_ro_t *ro_w = make_ro_writeable(cls->data());
6545
6546 try_free(ro_w->weakIvarLayout);
6547 ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
6548 }
6549
6550
6551 /***********************************************************************
6552 * getIvar
6553 * Look up an ivar by name.
6554 * Locking: runtimeLock must be read- or write-locked by the caller.
6555 **********************************************************************/
6556 static ivar_t *getIvar(Class cls, const char *name)
6557 {
6558 runtimeLock.assertLocked();
6559
6560 const ivar_list_t *ivars;
6561 ASSERT(cls->isRealized());
6562 if ((ivars = cls->data()->ro()->ivars)) {
6563 for (auto& ivar : *ivars) {
6564 if (!ivar.offset) continue; // anonymous bitfield
6565
6566 // ivar.name may be nil for anonymous bitfields etc.
6567 if (ivar.name && 0 == strcmp(name, ivar.name)) {
6568 return &ivar;
6569 }
6570 }
6571 }
6572
6573 return nil;
6574 }
6575
6576
6577 /***********************************************************************
6578 * _class_getClassForIvar
6579 * Given a class and an ivar that is in it or one of its superclasses,
6580 * find the actual class that defined the ivar.
6581 **********************************************************************/
6582 Class _class_getClassForIvar(Class cls, Ivar ivar)
6583 {
6584 mutex_locker_t lock(runtimeLock);
6585
6586 for ( ; cls; cls = cls->superclass) {
6587 if (auto ivars = cls->data()->ro()->ivars) {
6588 if (ivars->containsIvar(ivar)) {
6589 return cls;
6590 }
6591 }
6592 }
6593
6594 return nil;
6595 }
6596
6597
6598 /***********************************************************************
6599 * _class_getVariable
6600 * fixme
6601 * Locking: read-locks runtimeLock
6602 **********************************************************************/
6603 Ivar
6604 _class_getVariable(Class cls, const char *name)
6605 {
6606 mutex_locker_t lock(runtimeLock);
6607
6608 for ( ; cls; cls = cls->superclass) {
6609 ivar_t *ivar = getIvar(cls, name);
6610 if (ivar) {
6611 return ivar;
6612 }
6613 }
6614
6615 return nil;
6616 }
6617
6618
6619 /***********************************************************************
6620 * class_conformsToProtocol
6621 * fixme
6622 * Locking: read-locks runtimeLock
6623 **********************************************************************/
6624 BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
6625 {
6626 protocol_t *proto = newprotocol(proto_gen);
6627
6628 if (!cls) return NO;
6629 if (!proto_gen) return NO;
6630
6631 mutex_locker_t lock(runtimeLock);
6632
6633 checkIsKnownClass(cls);
6634
6635 ASSERT(cls->isRealized());
6636
6637 for (const auto& proto_ref : cls->data()->protocols()) {
6638 protocol_t *p = remapProtocol(proto_ref);
6639 if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
6640 return YES;
6641 }
6642 }
6643
6644 return NO;
6645 }
6646
6647
6648 /**********************************************************************
6649 * addMethod
6650 * fixme
6651 * Locking: runtimeLock must be held by the caller
6652 **********************************************************************/
6653 static IMP
6654 addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
6655 {
6656 IMP result = nil;
6657
6658 runtimeLock.assertLocked();
6659
6660 checkIsKnownClass(cls);
6661
6662 ASSERT(types);
6663 ASSERT(cls->isRealized());
6664
6665 method_t *m;
6666 if ((m = getMethodNoSuper_nolock(cls, name))) {
6667 // already exists
6668 if (!replace) {
6669 result = m->imp(false);
6670 } else {
6671 result = _method_setImplementation(cls, m, imp);
6672 }
6673 } else {
6674 auto rwe = cls->data()->extAllocIfNeeded();
6675
6676 // fixme optimize
6677 method_list_t *newlist;
6678 newlist = (method_list_t *)calloc(method_list_t::byteSize(method_t::bigSize, 1), 1);
6679 newlist->entsizeAndFlags =
6680 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
6681 newlist->count = 1;
6682 auto &first = newlist->begin()->big();
6683 first.name = name;
6684 first.types = strdupIfMutable(types);
6685 first.imp = imp;
6686
6687 prepareMethodLists(cls, &newlist, 1, NO, NO);
6688 rwe->methods.attachLists(&newlist, 1);
6689 flushCaches(cls);
6690
6691 result = nil;
6692 }
6693
6694 return result;
6695 }
6696
6697 /**********************************************************************
6698 * addMethods
6699 * Add the given methods to a class in bulk.
6700 * Returns the selectors which could not be added, when replace == NO and a
6701 * method already exists. The returned selectors are NULL terminated and must be
6702 * freed by the caller. They are NULL if no failures occurred.
6703 * Locking: runtimeLock must be held by the caller
6704 **********************************************************************/
6705 static SEL *
6706 addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
6707 uint32_t count, bool replace, uint32_t *outFailedCount)
6708 {
6709 runtimeLock.assertLocked();
6710
6711 ASSERT(names);
6712 ASSERT(imps);
6713 ASSERT(types);
6714 ASSERT(cls->isRealized());
6715
6716 method_list_t *newlist;
6717 size_t newlistSize = method_list_t::byteSize(sizeof(struct method_t::big), count);
6718 newlist = (method_list_t *)calloc(newlistSize, 1);
6719 newlist->entsizeAndFlags =
6720 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
6721 newlist->count = 0;
6722
6723 SEL *failedNames = nil;
6724 uint32_t failedCount = 0;
6725
6726 for (uint32_t i = 0; i < count; i++) {
6727 method_t *m;
6728 if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
6729 // already exists
6730 if (!replace) {
6731 // report failure
6732 if (failedNames == nil) {
6733 // allocate an extra entry for a trailing NULL in case
6734 // every method fails
6735 failedNames = (SEL *)calloc(sizeof(*failedNames),
6736 count + 1);
6737 }
6738 failedNames[failedCount] = m->name();
6739 failedCount++;
6740 } else {
6741 _method_setImplementation(cls, m, imps[i]);
6742 }
6743 } else {
6744 auto &newmethod = newlist->end()->big();
6745 newmethod.name = names[i];
6746 newmethod.types = strdupIfMutable(types[i]);
6747 newmethod.imp = imps[i];
6748 newlist->count++;
6749 }
6750 }
6751
6752 if (newlist->count > 0) {
6753 auto rwe = cls->data()->extAllocIfNeeded();
6754
6755 // fixme resize newlist because it may have been over-allocated above.
6756 // Note that realloc() alone doesn't work due to ptrauth.
6757
6758 method_t::SortBySELAddress sorter;
6759 std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter);
6760
6761 prepareMethodLists(cls, &newlist, 1, NO, NO);
6762 rwe->methods.attachLists(&newlist, 1);
6763 flushCaches(cls);
6764 } else {
6765 // Attaching the method list to the class consumes it. If we don't
6766 // do that, we have to free the memory ourselves.
6767 free(newlist);
6768 }
6769
6770 if (outFailedCount) *outFailedCount = failedCount;
6771
6772 return failedNames;
6773 }
6774
6775
6776 BOOL
6777 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
6778 {
6779 if (!cls) return NO;
6780
6781 mutex_locker_t lock(runtimeLock);
6782 return ! addMethod(cls, name, imp, types ?: "", NO);
6783 }
6784
6785
6786 IMP
6787 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
6788 {
6789 if (!cls) return nil;
6790
6791 mutex_locker_t lock(runtimeLock);
6792 return addMethod(cls, name, imp, types ?: "", YES);
6793 }
6794
6795
6796 SEL *
6797 class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6798 const char **types, uint32_t count,
6799 uint32_t *outFailedCount)
6800 {
6801 if (!cls) {
6802 if (outFailedCount) *outFailedCount = count;
6803 return (SEL *)memdup(names, count * sizeof(*names));
6804 }
6805
6806 mutex_locker_t lock(runtimeLock);
6807 return addMethods(cls, names, imps, types, count, NO, outFailedCount);
6808 }
6809
6810 void
6811 class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6812 const char **types, uint32_t count)
6813 {
6814 if (!cls) return;
6815
6816 mutex_locker_t lock(runtimeLock);
6817 addMethods(cls, names, imps, types, count, YES, nil);
6818 }
6819
6820
6821 /***********************************************************************
6822 * class_addIvar
6823 * Adds an ivar to a class.
6824 * Locking: acquires runtimeLock
6825 **********************************************************************/
6826 BOOL
6827 class_addIvar(Class cls, const char *name, size_t size,
6828 uint8_t alignment, const char *type)
6829 {
6830 if (!cls) return NO;
6831
6832 if (!type) type = "";
6833 if (name && 0 == strcmp(name, "")) name = nil;
6834
6835 mutex_locker_t lock(runtimeLock);
6836
6837 checkIsKnownClass(cls);
6838 ASSERT(cls->isRealized());
6839
6840 // No class variables
6841 if (cls->isMetaClass()) {
6842 return NO;
6843 }
6844
6845 // Can only add ivars to in-construction classes.
6846 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6847 return NO;
6848 }
6849
6850 // Check for existing ivar with this name, unless it's anonymous.
6851 // Check for too-big ivar.
6852 // fixme check for superclass ivar too?
6853 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
6854 return NO;
6855 }
6856
6857 class_ro_t *ro_w = make_ro_writeable(cls->data());
6858
6859 // fixme allocate less memory here
6860
6861 ivar_list_t *oldlist, *newlist;
6862 if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) {
6863 size_t oldsize = oldlist->byteSize();
6864 newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
6865 memcpy(newlist, oldlist, oldsize);
6866 free(oldlist);
6867 } else {
6868 newlist = (ivar_list_t *)calloc(ivar_list_t::byteSize(sizeof(ivar_t), 1), 1);
6869 newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
6870 }
6871
6872 uint32_t offset = cls->unalignedInstanceSize();
6873 uint32_t alignMask = (1<<alignment)-1;
6874 offset = (offset + alignMask) & ~alignMask;
6875
6876 ivar_t& ivar = newlist->get(newlist->count++);
6877 #if __x86_64__
6878 // Deliberately over-allocate the ivar offset variable.
6879 // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
6880 ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
6881 #else
6882 ivar.offset = (int32_t *)malloc(sizeof(int32_t));
6883 #endif
6884 *ivar.offset = offset;
6885 ivar.name = name ? strdupIfMutable(name) : nil;
6886 ivar.type = strdupIfMutable(type);
6887 ivar.alignment_raw = alignment;
6888 ivar.size = (uint32_t)size;
6889
6890 ro_w->ivars = newlist;
6891 cls->setInstanceSize((uint32_t)(offset + size));
6892
6893 // Ivar layout updated in registerClass.
6894
6895 return YES;
6896 }
6897
6898
6899 /***********************************************************************
6900 * class_addProtocol
6901 * Adds a protocol to a class.
6902 * Locking: acquires runtimeLock
6903 **********************************************************************/
6904 BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
6905 {
6906 protocol_t *protocol = newprotocol(protocol_gen);
6907
6908 if (!cls) return NO;
6909 if (class_conformsToProtocol(cls, protocol_gen)) return NO;
6910
6911 mutex_locker_t lock(runtimeLock);
6912 auto rwe = cls->data()->extAllocIfNeeded();
6913
6914 ASSERT(cls->isRealized());
6915
6916 // fixme optimize
6917 protocol_list_t *protolist = (protocol_list_t *)
6918 malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
6919 protolist->count = 1;
6920 protolist->list[0] = (protocol_ref_t)protocol;
6921
6922 rwe->protocols.attachLists(&protolist, 1);
6923
6924 // fixme metaclass?
6925
6926 return YES;
6927 }
6928
6929
6930 /***********************************************************************
6931 * class_addProperty
6932 * Adds a property to a class.
6933 * Locking: acquires runtimeLock
6934 **********************************************************************/
6935 static bool
6936 _class_addProperty(Class cls, const char *name,
6937 const objc_property_attribute_t *attrs, unsigned int count,
6938 bool replace)
6939 {
6940 if (!cls) return NO;
6941 if (!name) return NO;
6942
6943 property_t *prop = class_getProperty(cls, name);
6944 if (prop && !replace) {
6945 // already exists, refuse to replace
6946 return NO;
6947 }
6948 else if (prop) {
6949 // replace existing
6950 mutex_locker_t lock(runtimeLock);
6951 try_free(prop->attributes);
6952 prop->attributes = copyPropertyAttributeString(attrs, count);
6953 return YES;
6954 }
6955 else {
6956 mutex_locker_t lock(runtimeLock);
6957 auto rwe = cls->data()->extAllocIfNeeded();
6958
6959 ASSERT(cls->isRealized());
6960
6961 property_list_t *proplist = (property_list_t *)
6962 malloc(property_list_t::byteSize(sizeof(property_t), 1));
6963 proplist->count = 1;
6964 proplist->entsizeAndFlags = sizeof(property_t);
6965 proplist->begin()->name = strdupIfMutable(name);
6966 proplist->begin()->attributes = copyPropertyAttributeString(attrs, count);
6967
6968 rwe->properties.attachLists(&proplist, 1);
6969
6970 return YES;
6971 }
6972 }
6973
6974 BOOL
6975 class_addProperty(Class cls, const char *name,
6976 const objc_property_attribute_t *attrs, unsigned int n)
6977 {
6978 return _class_addProperty(cls, name, attrs, n, NO);
6979 }
6980
6981 void
6982 class_replaceProperty(Class cls, const char *name,
6983 const objc_property_attribute_t *attrs, unsigned int n)
6984 {
6985 _class_addProperty(cls, name, attrs, n, YES);
6986 }
6987
6988
6989 /***********************************************************************
6990 * look_up_class
6991 * Look up a class by name, and realize it.
6992 * Locking: acquires runtimeLock
6993 **********************************************************************/
6994 static BOOL empty_getClass(const char *name, Class *outClass)
6995 {
6996 *outClass = nil;
6997 return NO;
6998 }
6999
7000 static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
7001
7002 void objc_setHook_getClass(objc_hook_getClass newValue,
7003 objc_hook_getClass *outOldValue)
7004 {
7005 GetClassHook.set(newValue, outOldValue);
7006 }
7007
7008 Class
7009 look_up_class(const char *name,
7010 bool includeUnconnected __attribute__((unused)),
7011 bool includeClassHandler __attribute__((unused)))
7012 {
7013 if (!name) return nil;
7014
7015 Class result;
7016 bool unrealized;
7017 {
7018 runtimeLock.lock();
7019 result = getClassExceptSomeSwift(name);
7020 unrealized = result && !result->isRealized();
7021 if (unrealized) {
7022 result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
7023 // runtimeLock is now unlocked
7024 } else {
7025 runtimeLock.unlock();
7026 }
7027 }
7028
7029 if (!result) {
7030 // Ask Swift about its un-instantiated classes.
7031
7032 // We use thread-local storage to prevent infinite recursion
7033 // if the hook function provokes another lookup of the same name
7034 // (for example, if the hook calls objc_allocateClassPair)
7035
7036 auto *tls = _objc_fetch_pthread_data(true);
7037
7038 // Stop if this thread is already looking up this name.
7039 for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
7040 if (0 == strcmp(name, tls->classNameLookups[i])) {
7041 return nil;
7042 }
7043 }
7044
7045 // Save this lookup in tls.
7046 if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
7047 tls->classNameLookupsAllocated =
7048 (tls->classNameLookupsAllocated * 2 ?: 1);
7049 size_t size = tls->classNameLookupsAllocated *
7050 sizeof(tls->classNameLookups[0]);
7051 tls->classNameLookups = (const char **)
7052 realloc(tls->classNameLookups, size);
7053 }
7054 tls->classNameLookups[tls->classNameLookupsUsed++] = name;
7055
7056 // Call the hook.
7057 Class swiftcls = nil;
7058 if (GetClassHook.get()(name, &swiftcls)) {
7059 ASSERT(swiftcls->isRealized());
7060 result = swiftcls;
7061 }
7062
7063 // Erase the name from tls.
7064 unsigned slot = --tls->classNameLookupsUsed;
7065 ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
7066 ASSERT(name == tls->classNameLookups[slot]);
7067 tls->classNameLookups[slot] = nil;
7068 }
7069
7070 return result;
7071 }
7072
7073
7074 /***********************************************************************
7075 * objc_duplicateClass
7076 * fixme
7077 * Locking: acquires runtimeLock
7078 **********************************************************************/
7079 Class
7080 objc_duplicateClass(Class original, const char *name,
7081 size_t extraBytes)
7082 {
7083 Class duplicate;
7084
7085 mutex_locker_t lock(runtimeLock);
7086
7087 checkIsKnownClass(original);
7088
7089 auto orig_rw = original->data();
7090 auto orig_rwe = orig_rw->ext();
7091 auto orig_ro = orig_rw->ro();
7092
7093 ASSERT(original->isRealized());
7094 ASSERT(!original->isMetaClass());
7095
7096 duplicate = alloc_class_for_subclass(original, extraBytes);
7097
7098 duplicate->initClassIsa(original->ISA());
7099 duplicate->superclass = original->superclass;
7100
7101 duplicate->cache.initializeToEmpty();
7102
7103 class_rw_t *rw = objc::zalloc<class_rw_t>();
7104 rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING);
7105 rw->firstSubclass = nil;
7106 rw->nextSiblingClass = nil;
7107
7108 duplicate->bits = original->bits;
7109 duplicate->setData(rw);
7110
7111 auto ro = orig_ro->duplicate();
7112 *(char **)&ro->name = strdupIfMutable(name);
7113 rw->set_ro(ro);
7114
7115 if (orig_rwe) {
7116 auto rwe = rw->extAllocIfNeeded();
7117 rwe->version = orig_rwe->version;
7118 orig_rwe->methods.duplicateInto(rwe->methods);
7119
7120 // fixme dies when categories are added to the base
7121 rwe->properties = orig_rwe->properties;
7122 rwe->protocols = orig_rwe->protocols;
7123 } else if (ro->baseMethods()) {
7124 // if we have base methods, we need to make a deep copy
7125 // which requires a class_rw_ext_t to be allocated
7126 rw->deepCopy(ro);
7127 }
7128
7129 duplicate->chooseClassArrayIndex();
7130
7131 if (duplicate->superclass) {
7132 addSubclass(duplicate->superclass, duplicate);
7133 // duplicate->isa == original->isa so don't addSubclass() for it
7134 } else {
7135 addRootClass(duplicate);
7136 }
7137
7138 // Don't methodize class - construction above is correct
7139
7140 addNamedClass(duplicate, ro->name);
7141 addClassTableEntry(duplicate, /*addMeta=*/false);
7142
7143 if (PrintConnecting) {
7144 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
7145 name, original->nameForLogging(), (void*)duplicate, ro);
7146 }
7147
7148 duplicate->clearInfo(RW_REALIZING);
7149
7150 return duplicate;
7151 }
7152
7153 /***********************************************************************
7154 * objc_initializeClassPair
7155 * Locking: runtimeLock must be write-locked by the caller
7156 **********************************************************************/
7157
7158 // &UnsetLayout is the default ivar layout during class construction
7159 static const uint8_t UnsetLayout = 0;
7160
7161 static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
7162 {
7163 runtimeLock.assertLocked();
7164
7165 class_ro_t *cls_ro_w, *meta_ro_w;
7166 class_rw_t *cls_rw_w, *meta_rw_w;
7167
7168 cls_rw_w = objc::zalloc<class_rw_t>();
7169 meta_rw_w = objc::zalloc<class_rw_t>();
7170 cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7171 meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7172
7173 cls->setData(cls_rw_w);
7174 cls_rw_w->set_ro(cls_ro_w);
7175 meta->setData(meta_rw_w);
7176 meta_rw_w->set_ro(meta_ro_w);
7177
7178 // Set basic info
7179
7180 cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
7181 meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META;
7182
7183 cls_ro_w->flags = 0;
7184 meta_ro_w->flags = RO_META;
7185 if (superclass) {
7186 uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
7187 cls_rw_w->flags |= superclass->data()->flags & flagsToCopy;
7188 cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
7189 meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
7190 cls->setInstanceSize(cls_ro_w->instanceStart);
7191 meta->setInstanceSize(meta_ro_w->instanceStart);
7192 } else {
7193 cls_ro_w->flags |= RO_ROOT;
7194 meta_ro_w->flags |= RO_ROOT;
7195 cls_ro_w->instanceStart = 0;
7196 meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
7197 cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
7198 meta->setInstanceSize(meta_ro_w->instanceStart);
7199 }
7200
7201 cls_ro_w->name = strdupIfMutable(name);
7202 meta_ro_w->name = strdupIfMutable(name);
7203
7204 cls_ro_w->ivarLayout = &UnsetLayout;
7205 cls_ro_w->weakIvarLayout = &UnsetLayout;
7206
7207 meta->chooseClassArrayIndex();
7208 cls->chooseClassArrayIndex();
7209
7210 // This absolutely needs to be done before addSubclass
7211 // as initializeToEmpty() clobbers the FAST_CACHE bits
7212 cls->cache.initializeToEmpty();
7213 meta->cache.initializeToEmpty();
7214
7215 #if FAST_CACHE_META
7216 meta->cache.setBit(FAST_CACHE_META);
7217 #endif
7218 meta->setInstancesRequireRawIsa();
7219
7220 // Connect to superclasses and metaclasses
7221 cls->initClassIsa(meta);
7222
7223 if (superclass) {
7224 meta->initClassIsa(superclass->ISA()->ISA());
7225 cls->superclass = superclass;
7226 meta->superclass = superclass->ISA();
7227 addSubclass(superclass, cls);
7228 addSubclass(superclass->ISA(), meta);
7229 } else {
7230 meta->initClassIsa(meta);
7231 cls->superclass = Nil;
7232 meta->superclass = cls;
7233 addRootClass(cls);
7234 addSubclass(cls, meta);
7235 }
7236
7237 addClassTableEntry(cls);
7238 }
7239
7240
7241 /***********************************************************************
7242 * verifySuperclass
7243 * Sanity-check the superclass provided to
7244 * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
7245 **********************************************************************/
7246 bool
7247 verifySuperclass(Class superclass, bool rootOK)
7248 {
7249 if (!superclass) {
7250 // Superclass does not exist.
7251 // If subclass may be a root class, this is OK.
7252 // If subclass must not be a root class, this is bad.
7253 return rootOK;
7254 }
7255
7256 // Superclass must be realized.
7257 if (! superclass->isRealized()) return false;
7258
7259 // Superclass must not be under construction.
7260 if (superclass->data()->flags & RW_CONSTRUCTING) return false;
7261
7262 return true;
7263 }
7264
7265
7266 /***********************************************************************
7267 * objc_initializeClassPair
7268 **********************************************************************/
7269 Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
7270 {
7271 // Fail if the class name is in use.
7272 if (look_up_class(name, NO, NO)) return nil;
7273
7274 mutex_locker_t lock(runtimeLock);
7275
7276 // Fail if the class name is in use.
7277 // Fail if the superclass isn't kosher.
7278 if (getClassExceptSomeSwift(name) ||
7279 !verifySuperclass(superclass, true/*rootOK*/))
7280 {
7281 return nil;
7282 }
7283
7284 objc_initializeClassPair_internal(superclass, name, cls, meta);
7285
7286 return cls;
7287 }
7288
7289
7290 /***********************************************************************
7291 * objc_allocateClassPair
7292 * fixme
7293 * Locking: acquires runtimeLock
7294 **********************************************************************/
7295 Class objc_allocateClassPair(Class superclass, const char *name,
7296 size_t extraBytes)
7297 {
7298 Class cls, meta;
7299
7300 // Fail if the class name is in use.
7301 if (look_up_class(name, NO, NO)) return nil;
7302
7303 mutex_locker_t lock(runtimeLock);
7304
7305 // Fail if the class name is in use.
7306 // Fail if the superclass isn't kosher.
7307 if (getClassExceptSomeSwift(name) ||
7308 !verifySuperclass(superclass, true/*rootOK*/))
7309 {
7310 return nil;
7311 }
7312
7313 // Allocate new classes.
7314 cls = alloc_class_for_subclass(superclass, extraBytes);
7315 meta = alloc_class_for_subclass(superclass, extraBytes);
7316
7317 // fixme mangle the name if it looks swift-y?
7318 objc_initializeClassPair_internal(superclass, name, cls, meta);
7319
7320 return cls;
7321 }
7322
7323
7324 /***********************************************************************
7325 * objc_registerClassPair
7326 * fixme
7327 * Locking: acquires runtimeLock
7328 **********************************************************************/
7329 void objc_registerClassPair(Class cls)
7330 {
7331 mutex_locker_t lock(runtimeLock);
7332
7333 checkIsKnownClass(cls);
7334
7335 if ((cls->data()->flags & RW_CONSTRUCTED) ||
7336 (cls->ISA()->data()->flags & RW_CONSTRUCTED))
7337 {
7338 _objc_inform("objc_registerClassPair: class '%s' was already "
7339 "registered!", cls->data()->ro()->name);
7340 return;
7341 }
7342
7343 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
7344 !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
7345 {
7346 _objc_inform("objc_registerClassPair: class '%s' was not "
7347 "allocated with objc_allocateClassPair!",
7348 cls->data()->ro()->name);
7349 return;
7350 }
7351
7352 // Clear "under construction" bit, set "done constructing" bit
7353 cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7354 cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7355
7356 // Add to named class table.
7357 addNamedClass(cls, cls->data()->ro()->name);
7358 }
7359
7360
7361 /***********************************************************************
7362 * objc_readClassPair()
7363 * Read a class and metaclass as written by a compiler.
7364 * Assumes the class and metaclass are not referenced by other things
7365 * that might need to be fixed up (such as categories and subclasses).
7366 * Does not call +load.
7367 * Returns the class pointer, or nil.
7368 *
7369 * Locking: runtimeLock acquired by map_images
7370 **********************************************************************/
7371 Class objc_readClassPair(Class bits, const struct objc_image_info *info)
7372 {
7373 mutex_locker_t lock(runtimeLock);
7374
7375 // No info bits are significant yet.
7376 (void)info;
7377
7378 // Fail if the superclass isn't kosher.
7379 bool rootOK = bits->data()->flags & RO_ROOT;
7380 if (!verifySuperclass(bits->superclass, rootOK)){
7381 return nil;
7382 }
7383
7384 // Duplicate classes are allowed, just like they are for image loading.
7385 // readClass will complain about the duplicate.
7386
7387 Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
7388 if (cls != bits) {
7389 // This function isn't allowed to remap anything.
7390 _objc_fatal("objc_readClassPair for class %s changed %p to %p",
7391 cls->nameForLogging(), bits, cls);
7392 }
7393
7394 // The only client of this function is old Swift.
7395 // Stable Swift won't use it.
7396 // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
7397 cls = realizeClassWithoutSwift(cls, nil);
7398
7399 return cls;
7400 }
7401
7402
7403 /***********************************************************************
7404 * detach_class
7405 * Disconnect a class from other data structures.
7406 * Exception: does not remove the class from the +load list
7407 * Call this before free_class.
7408 * Locking: runtimeLock must be held by the caller.
7409 **********************************************************************/
7410 static void detach_class(Class cls, bool isMeta)
7411 {
7412 runtimeLock.assertLocked();
7413
7414 // categories not yet attached to this class
7415 objc::unattachedCategories.eraseClass(cls);
7416
7417 // superclass's subclass list
7418 if (cls->isRealized()) {
7419 Class supercls = cls->superclass;
7420 if (supercls) {
7421 removeSubclass(supercls, cls);
7422 } else {
7423 removeRootClass(cls);
7424 }
7425 }
7426
7427 // class tables and +load queue
7428 if (!isMeta) {
7429 removeNamedClass(cls, cls->mangledName());
7430 }
7431 objc::allocatedClasses.get().erase(cls);
7432 }
7433
7434
7435 /***********************************************************************
7436 * free_class
7437 * Frees a class's data structures.
7438 * Call this after detach_class.
7439 * Locking: runtimeLock must be held by the caller
7440 **********************************************************************/
7441 static void free_class(Class cls)
7442 {
7443 runtimeLock.assertLocked();
7444
7445 if (! cls->isRealized()) return;
7446
7447 auto rw = cls->data();
7448 auto rwe = rw->ext();
7449 auto ro = rw->ro();
7450
7451 cache_delete(cls);
7452
7453 if (rwe) {
7454 for (auto& meth : rwe->methods) {
7455 try_free(meth.types());
7456 }
7457 rwe->methods.tryFree();
7458 }
7459
7460 const ivar_list_t *ivars = ro->ivars;
7461 if (ivars) {
7462 for (auto& ivar : *ivars) {
7463 try_free(ivar.offset);
7464 try_free(ivar.name);
7465 try_free(ivar.type);
7466 }
7467 try_free(ivars);
7468 }
7469
7470 if (rwe) {
7471 for (auto& prop : rwe->properties) {
7472 try_free(prop.name);
7473 try_free(prop.attributes);
7474 }
7475 rwe->properties.tryFree();
7476
7477 rwe->protocols.tryFree();
7478 }
7479
7480 try_free(ro->ivarLayout);
7481 try_free(ro->weakIvarLayout);
7482 try_free(ro->name);
7483 try_free(ro);
7484 objc::zfree(rwe);
7485 objc::zfree(rw);
7486 try_free(cls);
7487 }
7488
7489
7490 void objc_disposeClassPair(Class cls)
7491 {
7492 mutex_locker_t lock(runtimeLock);
7493
7494 checkIsKnownClass(cls);
7495
7496 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
7497 !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
7498 {
7499 // class not allocated with objc_allocateClassPair
7500 // disposing still-unregistered class is OK!
7501 _objc_inform("objc_disposeClassPair: class '%s' was not "
7502 "allocated with objc_allocateClassPair!",
7503 cls->data()->ro()->name);
7504 return;
7505 }
7506
7507 if (cls->isMetaClass()) {
7508 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
7509 "not a class!", cls->data()->ro()->name);
7510 return;
7511 }
7512
7513 // Shouldn't have any live subclasses.
7514 if (cls->data()->firstSubclass) {
7515 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7516 "including '%s'!", cls->data()->ro()->name,
7517 cls->data()->firstSubclass->nameForLogging());
7518 }
7519 if (cls->ISA()->data()->firstSubclass) {
7520 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7521 "including '%s'!", cls->data()->ro()->name,
7522 cls->ISA()->data()->firstSubclass->nameForLogging());
7523 }
7524
7525 // don't remove_class_from_loadable_list()
7526 // - it's not there and we don't have the lock
7527 detach_class(cls->ISA(), YES);
7528 detach_class(cls, NO);
7529 free_class(cls->ISA());
7530 free_class(cls);
7531 }
7532
7533
7534 /***********************************************************************
7535 * objc_constructInstance
7536 * Creates an instance of `cls` at the location pointed to by `bytes`.
7537 * `bytes` must point to at least class_getInstanceSize(cls) bytes of
7538 * well-aligned zero-filled memory.
7539 * The new object's isa is set. Any C++ constructors are called.
7540 * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
7541 * nil, or if C++ constructors fail.
7542 * Note: class_createInstance() and class_createInstances() preflight this.
7543 **********************************************************************/
7544 id
7545 objc_constructInstance(Class cls, void *bytes)
7546 {
7547 if (!cls || !bytes) return nil;
7548
7549 id obj = (id)bytes;
7550
7551 // Read class's info bits all at once for performance
7552 bool hasCxxCtor = cls->hasCxxCtor();
7553 bool hasCxxDtor = cls->hasCxxDtor();
7554 bool fast = cls->canAllocNonpointer();
7555
7556 if (fast) {
7557 obj->initInstanceIsa(cls, hasCxxDtor);
7558 } else {
7559 obj->initIsa(cls);
7560 }
7561
7562 if (hasCxxCtor) {
7563 return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
7564 } else {
7565 return obj;
7566 }
7567 }
7568
7569
7570 /***********************************************************************
7571 * class_createInstance
7572 * fixme
7573 * Locking: none
7574 *
7575 * Note: this function has been carefully written so that the fastpath
7576 * takes no branch.
7577 **********************************************************************/
7578 static ALWAYS_INLINE id
7579 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
7580 int construct_flags = OBJECT_CONSTRUCT_NONE,
7581 bool cxxConstruct = true,
7582 size_t *outAllocatedSize = nil)
7583 {
7584 ASSERT(cls->isRealized());
7585
7586 // Read class's info bits all at once for performance
7587 bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
7588 bool hasCxxDtor = cls->hasCxxDtor();
7589 bool fast = cls->canAllocNonpointer();
7590 size_t size;
7591
7592 size = cls->instanceSize(extraBytes);
7593 if (outAllocatedSize) *outAllocatedSize = size;
7594
7595 id obj;
7596 if (zone) {
7597 obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
7598 } else {
7599 obj = (id)calloc(1, size);
7600 }
7601 if (slowpath(!obj)) {
7602 if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
7603 return _objc_callBadAllocHandler(cls);
7604 }
7605 return nil;
7606 }
7607
7608 if (!zone && fast) {
7609 obj->initInstanceIsa(cls, hasCxxDtor);
7610 } else {
7611 // Use raw pointer isa on the assumption that they might be
7612 // doing something weird with the zone or RR.
7613 obj->initIsa(cls);
7614 }
7615
7616 if (fastpath(!hasCxxCtor)) {
7617 return obj;
7618 }
7619
7620 construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
7621 return object_cxxConstructFromClass(obj, cls, construct_flags);
7622 }
7623
7624 id
7625 class_createInstance(Class cls, size_t extraBytes)
7626 {
7627 if (!cls) return nil;
7628 return _class_createInstanceFromZone(cls, extraBytes, nil);
7629 }
7630
7631 NEVER_INLINE
7632 id
7633 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
7634 {
7635 // allocWithZone under __OBJC2__ ignores the zone parameter
7636 return _class_createInstanceFromZone(cls, 0, nil,
7637 OBJECT_CONSTRUCT_CALL_BADALLOC);
7638 }
7639
7640 /***********************************************************************
7641 * class_createInstances
7642 * fixme
7643 * Locking: none
7644 **********************************************************************/
7645 #if SUPPORT_NONPOINTER_ISA
7646 #warning fixme optimize class_createInstances
7647 #endif
7648 unsigned
7649 class_createInstances(Class cls, size_t extraBytes,
7650 id *results, unsigned num_requested)
7651 {
7652 return _class_createInstancesFromZone(cls, extraBytes, nil,
7653 results, num_requested);
7654 }
7655
7656 /***********************************************************************
7657 * object_copyFromZone
7658 * fixme
7659 * Locking: none
7660 **********************************************************************/
7661 static id
7662 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7663 {
7664 if (!oldObj) return nil;
7665 if (oldObj->isTaggedPointer()) return oldObj;
7666
7667 // fixme this doesn't handle C++ ivars correctly (#4619414)
7668
7669 Class cls = oldObj->ISA();
7670 size_t size;
7671 id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
7672 OBJECT_CONSTRUCT_NONE, false, &size);
7673 if (!obj) return nil;
7674
7675 // Copy everything except the isa, which was already set above.
7676 uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
7677 uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
7678 size_t copySize = size - sizeof(Class);
7679 memmove(copyDst, copySrc, copySize);
7680
7681 fixupCopiedIvars(obj, oldObj);
7682
7683 return obj;
7684 }
7685
7686
7687 /***********************************************************************
7688 * object_copy
7689 * fixme
7690 * Locking: none
7691 **********************************************************************/
7692 id
7693 object_copy(id oldObj, size_t extraBytes)
7694 {
7695 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
7696 }
7697
7698
7699 #if SUPPORT_ZONES
7700
7701 /***********************************************************************
7702 * class_createInstanceFromZone
7703 * fixme
7704 * Locking: none
7705 **********************************************************************/
7706 id
7707 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
7708 {
7709 if (!cls) return nil;
7710 return _class_createInstanceFromZone(cls, extraBytes, zone);
7711 }
7712
7713 /***********************************************************************
7714 * object_copyFromZone
7715 * fixme
7716 * Locking: none
7717 **********************************************************************/
7718 id
7719 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7720 {
7721 return _object_copyFromZone(oldObj, extraBytes, zone);
7722 }
7723
7724 #endif
7725
7726
7727 /***********************************************************************
7728 * objc_destructInstance
7729 * Destroys an instance without freeing memory.
7730 * Calls C++ destructors.
7731 * Calls ARC ivar cleanup.
7732 * Removes associative references.
7733 * Returns `obj`. Does nothing if `obj` is nil.
7734 **********************************************************************/
7735 void *objc_destructInstance(id obj)
7736 {
7737 if (obj) {
7738 // Read all of the flags at once for performance.
7739 bool cxx = obj->hasCxxDtor();
7740 bool assoc = obj->hasAssociatedObjects();
7741
7742 // This order is important.
7743 if (cxx) object_cxxDestruct(obj);
7744 if (assoc) _object_remove_assocations(obj);
7745 obj->clearDeallocating();
7746 }
7747
7748 return obj;
7749 }
7750
7751
7752 /***********************************************************************
7753 * object_dispose
7754 * fixme
7755 * Locking: none
7756 **********************************************************************/
7757 id
7758 object_dispose(id obj)
7759 {
7760 if (!obj) return nil;
7761
7762 objc_destructInstance(obj);
7763 free(obj);
7764
7765 return nil;
7766 }
7767
7768
7769 /***********************************************************************
7770 * _objc_getFreedObjectClass
7771 * fixme
7772 * Locking: none
7773 **********************************************************************/
7774 Class _objc_getFreedObjectClass (void)
7775 {
7776 return nil;
7777 }
7778
7779
7780
7781 /***********************************************************************
7782 * Tagged pointer objects.
7783 *
7784 * Tagged pointer objects store the class and the object value in the
7785 * object pointer; the "pointer" does not actually point to anything.
7786 *
7787 * Tagged pointer objects currently use this representation:
7788 * (LSB)
7789 * 1 bit set if tagged, clear if ordinary object pointer
7790 * 3 bits tag index
7791 * 60 bits payload
7792 * (MSB)
7793 * The tag index defines the object's class.
7794 * The payload format is defined by the object's class.
7795 *
7796 * If the tag index is 0b111, the tagged pointer object uses an
7797 * "extended" representation, allowing more classes but with smaller payloads:
7798 * (LSB)
7799 * 1 bit set if tagged, clear if ordinary object pointer
7800 * 3 bits 0b111
7801 * 8 bits extended tag index
7802 * 52 bits payload
7803 * (MSB)
7804 *
7805 * Some architectures reverse the MSB and LSB in these representations.
7806 *
7807 * This representation is subject to change. Representation-agnostic SPI is:
7808 * objc-internal.h for class implementers.
7809 * objc-gdb.h for debuggers.
7810 **********************************************************************/
7811 #if !SUPPORT_TAGGED_POINTERS
7812
7813 // These variables are always provided for debuggers.
7814 uintptr_t objc_debug_taggedpointer_obfuscator = 0;
7815 uintptr_t objc_debug_taggedpointer_mask = 0;
7816 unsigned objc_debug_taggedpointer_slot_shift = 0;
7817 uintptr_t objc_debug_taggedpointer_slot_mask = 0;
7818 unsigned objc_debug_taggedpointer_payload_lshift = 0;
7819 unsigned objc_debug_taggedpointer_payload_rshift = 0;
7820 Class objc_debug_taggedpointer_classes[1] = { nil };
7821
7822 uintptr_t objc_debug_taggedpointer_ext_mask = 0;
7823 unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
7824 uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
7825 unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
7826 unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
7827 Class objc_debug_taggedpointer_ext_classes[1] = { nil };
7828
7829 static void
7830 disableTaggedPointers() { }
7831
7832 static void
7833 initializeTaggedPointerObfuscator(void) { }
7834
7835 #else
7836
7837 // The "slot" used in the class table and given to the debugger
7838 // includes the is-tagged bit. This makes objc_msgSend faster.
7839 // The "ext" representation doesn't do that.
7840
7841 uintptr_t objc_debug_taggedpointer_obfuscator;
7842 uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
7843 unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
7844 uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
7845 unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
7846 unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
7847 // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
7848
7849 uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
7850 unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
7851 uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
7852 unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
7853 unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
7854 // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
7855
7856 static void
7857 disableTaggedPointers()
7858 {
7859 objc_debug_taggedpointer_mask = 0;
7860 objc_debug_taggedpointer_slot_shift = 0;
7861 objc_debug_taggedpointer_slot_mask = 0;
7862 objc_debug_taggedpointer_payload_lshift = 0;
7863 objc_debug_taggedpointer_payload_rshift = 0;
7864
7865 objc_debug_taggedpointer_ext_mask = 0;
7866 objc_debug_taggedpointer_ext_slot_shift = 0;
7867 objc_debug_taggedpointer_ext_slot_mask = 0;
7868 objc_debug_taggedpointer_ext_payload_lshift = 0;
7869 objc_debug_taggedpointer_ext_payload_rshift = 0;
7870 }
7871
7872
7873 // Returns a pointer to the class's storage in the tagged class arrays.
7874 // Assumes the tag is a valid basic tag.
7875 static Class *
7876 classSlotForBasicTagIndex(objc_tag_index_t tag)
7877 {
7878 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7879 >> _OBJC_TAG_INDEX_SHIFT)
7880 & _OBJC_TAG_INDEX_MASK);
7881 uintptr_t obfuscatedTag = tag ^ tagObfuscator;
7882 // Array index in objc_tag_classes includes the tagged bit itself
7883 #if SUPPORT_MSB_TAGGED_POINTERS
7884 return &objc_tag_classes[0x8 | obfuscatedTag];
7885 #else
7886 return &objc_tag_classes[(obfuscatedTag << 1) | 1];
7887 #endif
7888 }
7889
7890
7891 // Returns a pointer to the class's storage in the tagged class arrays,
7892 // or nil if the tag is out of range.
7893 static Class *
7894 classSlotForTagIndex(objc_tag_index_t tag)
7895 {
7896 if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
7897 return classSlotForBasicTagIndex(tag);
7898 }
7899
7900 if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
7901 int index = tag - OBJC_TAG_First52BitPayload;
7902 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7903 >> _OBJC_TAG_EXT_INDEX_SHIFT)
7904 & _OBJC_TAG_EXT_INDEX_MASK);
7905 return &objc_tag_ext_classes[index ^ tagObfuscator];
7906 }
7907
7908 return nil;
7909 }
7910
7911 /***********************************************************************
7912 * initializeTaggedPointerObfuscator
7913 * Initialize objc_debug_taggedpointer_obfuscator with randomness.
7914 *
7915 * The tagged pointer obfuscator is intended to make it more difficult
7916 * for an attacker to construct a particular object as a tagged pointer,
7917 * in the presence of a buffer overflow or other write control over some
7918 * memory. The obfuscator is XORed with the tagged pointers when setting
7919 * or retrieving payload values. They are filled with randomness on first
7920 * use.
7921 **********************************************************************/
7922 static void
7923 initializeTaggedPointerObfuscator(void)
7924 {
7925 if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) ||
7926 // Set the obfuscator to zero for apps linked against older SDKs,
7927 // in case they're relying on the tagged pointer representation.
7928 DisableTaggedPointerObfuscation) {
7929 objc_debug_taggedpointer_obfuscator = 0;
7930 } else {
7931 // Pull random data into the variable, then shift away all non-payload bits.
7932 arc4random_buf(&objc_debug_taggedpointer_obfuscator,
7933 sizeof(objc_debug_taggedpointer_obfuscator));
7934 objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
7935 }
7936 }
7937
7938
7939 /***********************************************************************
7940 * _objc_registerTaggedPointerClass
7941 * Set the class to use for the given tagged pointer index.
7942 * Aborts if the tag is out of range, or if the tag is already
7943 * used by some other class.
7944 **********************************************************************/
7945 void
7946 _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
7947 {
7948 if (objc_debug_taggedpointer_mask == 0) {
7949 _objc_fatal("tagged pointers are disabled");
7950 }
7951
7952 Class *slot = classSlotForTagIndex(tag);
7953 if (!slot) {
7954 _objc_fatal("tag index %u is invalid", (unsigned int)tag);
7955 }
7956
7957 Class oldCls = *slot;
7958
7959 if (cls && oldCls && cls != oldCls) {
7960 _objc_fatal("tag index %u used for two different classes "
7961 "(was %p %s, now %p %s)", tag,
7962 oldCls, oldCls->nameForLogging(),
7963 cls, cls->nameForLogging());
7964 }
7965
7966 *slot = cls;
7967
7968 // Store a placeholder class in the basic tag slot that is
7969 // reserved for the extended tag space, if it isn't set already.
7970 // Do this lazily when the first extended tag is registered so
7971 // that old debuggers characterize bogus pointers correctly more often.
7972 if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
7973 Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
7974 if (*extSlot == nil) {
7975 extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7976 *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7977 }
7978 }
7979 }
7980
7981
7982 /***********************************************************************
7983 * _objc_getClassForTag
7984 * Returns the class that is using the given tagged pointer tag.
7985 * Returns nil if no class is using that tag or the tag is out of range.
7986 **********************************************************************/
7987 Class
7988 _objc_getClassForTag(objc_tag_index_t tag)
7989 {
7990 Class *slot = classSlotForTagIndex(tag);
7991 if (slot) return *slot;
7992 else return nil;
7993 }
7994
7995 #endif
7996
7997
7998 #if SUPPORT_FIXUP
7999
8000 OBJC_EXTERN void objc_msgSend_fixup(void);
8001 OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
8002 OBJC_EXTERN void objc_msgSend_stret_fixup(void);
8003 OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
8004 #if defined(__i386__) || defined(__x86_64__)
8005 OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
8006 #endif
8007 #if defined(__x86_64__)
8008 OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
8009 #endif
8010
8011 OBJC_EXTERN void objc_msgSend_fixedup(void);
8012 OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
8013 OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
8014 OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
8015 #if defined(__i386__) || defined(__x86_64__)
8016 OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
8017 #endif
8018 #if defined(__x86_64__)
8019 OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
8020 #endif
8021
8022 /***********************************************************************
8023 * fixupMessageRef
8024 * Repairs an old vtable dispatch call site.
8025 * vtable dispatch itself is not supported.
8026 **********************************************************************/
8027 static void
8028 fixupMessageRef(message_ref_t *msg)
8029 {
8030 msg->sel = sel_registerName((const char *)msg->sel);
8031
8032 if (msg->imp == &objc_msgSend_fixup) {
8033 if (msg->sel == @selector(alloc)) {
8034 msg->imp = (IMP)&objc_alloc;
8035 } else if (msg->sel == @selector(allocWithZone:)) {
8036 msg->imp = (IMP)&objc_allocWithZone;
8037 } else if (msg->sel == @selector(retain)) {
8038 msg->imp = (IMP)&objc_retain;
8039 } else if (msg->sel == @selector(release)) {
8040 msg->imp = (IMP)&objc_release;
8041 } else if (msg->sel == @selector(autorelease)) {
8042 msg->imp = (IMP)&objc_autorelease;
8043 } else {
8044 msg->imp = &objc_msgSend_fixedup;
8045 }
8046 }
8047 else if (msg->imp == &objc_msgSendSuper2_fixup) {
8048 msg->imp = &objc_msgSendSuper2_fixedup;
8049 }
8050 else if (msg->imp == &objc_msgSend_stret_fixup) {
8051 msg->imp = &objc_msgSend_stret_fixedup;
8052 }
8053 else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
8054 msg->imp = &objc_msgSendSuper2_stret_fixedup;
8055 }
8056 #if defined(__i386__) || defined(__x86_64__)
8057 else if (msg->imp == &objc_msgSend_fpret_fixup) {
8058 msg->imp = &objc_msgSend_fpret_fixedup;
8059 }
8060 #endif
8061 #if defined(__x86_64__)
8062 else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
8063 msg->imp = &objc_msgSend_fp2ret_fixedup;
8064 }
8065 #endif
8066 }
8067
8068 // SUPPORT_FIXUP
8069 #endif
8070
8071
8072 // ProKit SPI
8073 static Class setSuperclass(Class cls, Class newSuper)
8074 {
8075 Class oldSuper;
8076
8077 runtimeLock.assertLocked();
8078
8079 ASSERT(cls->isRealized());
8080 ASSERT(newSuper->isRealized());
8081
8082 oldSuper = cls->superclass;
8083 removeSubclass(oldSuper, cls);
8084 removeSubclass(oldSuper->ISA(), cls->ISA());
8085
8086 cls->superclass = newSuper;
8087 cls->ISA()->superclass = newSuper->ISA();
8088 addSubclass(newSuper, cls);
8089 addSubclass(newSuper->ISA(), cls->ISA());
8090
8091 // Flush subclass's method caches.
8092 flushCaches(cls);
8093 flushCaches(cls->ISA());
8094
8095 return oldSuper;
8096 }
8097
8098
8099 Class class_setSuperclass(Class cls, Class newSuper)
8100 {
8101 mutex_locker_t lock(runtimeLock);
8102 return setSuperclass(cls, newSuper);
8103 }
8104
8105 void runtime_init(void)
8106 {
8107 objc::unattachedCategories.init(32);
8108 objc::allocatedClasses.init();
8109 }
8110
8111 // __OBJC2__
8112 #endif