]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.mm
objc4-818.2.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.mm
1 /*
2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "DenseMapExtras.h"
32 #include "objc-private.h"
33 #include "objc-runtime-new.h"
34 #include "objc-file.h"
35 #include "objc-zalloc.h"
36 #include <Block.h>
37 #include <objc/message.h>
38 #include <mach/shared_region.h>
39
40 #define newprotocol(p) ((protocol_t *)p)
41
42 static void disableTaggedPointers();
43 static void detach_class(Class cls, bool isMeta);
44 static void free_class(Class cls);
45 static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
46 static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
47 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
48 template<typename T> static bool method_lists_contains_any(T *mlists, T *end,
49 SEL sels[], size_t selcount);
50 static void flushCaches(Class cls, const char *func, bool (^predicate)(Class c));
51 static void initializeTaggedPointerObfuscator(void);
52 #if SUPPORT_FIXUP
53 static void fixupMessageRef(message_ref_t *msg);
54 #endif
55 static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
56 static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
57
58 struct locstamped_category_t {
59 category_t *cat;
60 struct header_info *hi;
61 };
62 enum {
63 ATTACH_CLASS = 1 << 0,
64 ATTACH_METACLASS = 1 << 1,
65 ATTACH_CLASS_AND_METACLASS = 1 << 2,
66 ATTACH_EXISTING = 1 << 3,
67 };
68 static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
69
70
71 /***********************************************************************
72 * Lock management
73 **********************************************************************/
74 mutex_t runtimeLock;
75 mutex_t selLock;
76 #if CONFIG_USE_CACHE_LOCK
77 mutex_t cacheUpdateLock;
78 #endif
79 recursive_mutex_t loadMethodLock;
80
81 /***********************************************************************
82 * Class structure decoding
83 **********************************************************************/
84
85 const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
86
87
88 /***********************************************************************
89 * Non-pointer isa decoding
90 **********************************************************************/
91 #if SUPPORT_INDEXED_ISA
92
93 // Indexed non-pointer isa.
94
95 // These are used to mask the ISA and see if its got an index or not.
96 const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
97 const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
98
99 // die if masks overlap
100 STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
101
102 // die if magic is wrong
103 STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
104
105 // Then these are used to extract the index from the ISA.
106 const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
107 const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
108
109 asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
110 "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
111 asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
112 "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
113 asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
114 "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
115 asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
116 "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
117
118
119 // And then we can use that index to get the class from this array. Note
120 // the size is provided so that clients can ensure the index they get is in
121 // bounds and not read off the end of the array.
122 // Defined in the objc-msg-*.s files
123 // const Class objc_indexed_classes[]
124
125 // When we don't have enough bits to store a class*, we can instead store an
126 // index in to this array. Classes are added here when they are realized.
127 // Note, an index of 0 is illegal.
128 uintptr_t objc_indexed_classes_count = 0;
129
130 // SUPPORT_INDEXED_ISA
131 #else
132 // not SUPPORT_INDEXED_ISA
133
134 // These variables exist but are all set to 0 so that they are ignored.
135 const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
136 const uintptr_t objc_debug_indexed_isa_magic_value = 0;
137 const uintptr_t objc_debug_indexed_isa_index_mask = 0;
138 const uintptr_t objc_debug_indexed_isa_index_shift = 0;
139 Class objc_indexed_classes[1] = { nil };
140 uintptr_t objc_indexed_classes_count = 0;
141
142 // not SUPPORT_INDEXED_ISA
143 #endif
144
145
146 #if SUPPORT_PACKED_ISA
147
148 // Packed non-pointer isa.
149
150 asm("\n .globl _objc_absolute_packed_isa_class_mask" \
151 "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
152
153 // a better definition is
154 // (uintptr_t)ptrauth_strip((void *)ISA_MASK, ISA_SIGNING_KEY)
155 // however we know that PAC uses bits outside of MACH_VM_MAX_ADDRESS
156 // so approximate the definition here to be constant
157 template <typename T>
158 static constexpr T coveringMask(T n) {
159 for (T mask = 0; mask != ~T{0}; mask = (mask << 1) | 1) {
160 if ((n & mask) == n) return mask;
161 }
162 return ~T{0};
163 }
164 const uintptr_t objc_debug_isa_class_mask = ISA_MASK & coveringMask(MACH_VM_MAX_ADDRESS - 1);
165
166 const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
167 const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
168
169 // die if masks overlap
170 STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
171
172 // die if magic is wrong
173 STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
174
175 // die if virtual address space bound goes up
176 STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
177 ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
178
179 // SUPPORT_PACKED_ISA
180 #else
181 // not SUPPORT_PACKED_ISA
182
183 // These variables exist but enforce pointer alignment only.
184 const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
185 const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
186 const uintptr_t objc_debug_isa_magic_value = 0;
187
188 // not SUPPORT_PACKED_ISA
189 #endif
190
191
192 /***********************************************************************
193 * Swift marker bits
194 **********************************************************************/
195 const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
196
197
198 /***********************************************************************
199 * allocatedClasses
200 * A table of all classes (and metaclasses) which have been allocated
201 * with objc_allocateClassPair.
202 **********************************************************************/
203 namespace objc {
204 static ExplicitInitDenseSet<Class> allocatedClasses;
205 }
206
207 /***********************************************************************
208 * _firstRealizedClass
209 * The root of all realized classes
210 **********************************************************************/
211 static Class _firstRealizedClass = nil;
212
213 /***********************************************************************
214 * didInitialAttachCategories
215 * Whether the initial attachment of categories present at startup has
216 * been done.
217 **********************************************************************/
218 static bool didInitialAttachCategories = false;
219
220 /***********************************************************************
221 * didCallDyldNotifyRegister
222 * Whether the call to _dyld_objc_notify_register has completed.
223 **********************************************************************/
224 bool didCallDyldNotifyRegister = false;
225
226
227 /***********************************************************************
228 * smallMethodIMPMap
229 * The map from small method pointers to replacement IMPs.
230 *
231 * Locking: runtimeLock must be held when accessing this map.
232 **********************************************************************/
233 namespace objc {
234 static objc::LazyInitDenseMap<const method_t *, IMP> smallMethodIMPMap;
235 }
236
237 static IMP method_t_remappedImp_nolock(const method_t *m) {
238 runtimeLock.assertLocked();
239 auto *map = objc::smallMethodIMPMap.get(false);
240 if (!map)
241 return nullptr;
242 auto iter = map->find(m);
243 if (iter == map->end())
244 return nullptr;
245 return iter->second;
246 }
247
248 IMP method_t::remappedImp(bool needsLock) const {
249 ASSERT(isSmall());
250 if (needsLock) {
251 mutex_locker_t guard(runtimeLock);
252 return method_t_remappedImp_nolock(this);
253 } else {
254 return method_t_remappedImp_nolock(this);
255 }
256 }
257
258 void method_t::remapImp(IMP imp) {
259 ASSERT(isSmall());
260 runtimeLock.assertLocked();
261 auto *map = objc::smallMethodIMPMap.get(true);
262 (*map)[this] = imp;
263 }
264
265 objc_method_description *method_t::getSmallDescription() const {
266 static objc::LazyInitDenseMap<const method_t *, objc_method_description *> map;
267
268 mutex_locker_t guard(runtimeLock);
269
270 auto &ptr = (*map.get(true))[this];
271 if (!ptr) {
272 ptr = (objc_method_description *)malloc(sizeof *ptr);
273 ptr->name = name();
274 ptr->types = (char *)types();
275 }
276 return ptr;
277 }
278
279 /*
280 Low two bits of mlist->entsize is used as the fixed-up marker.
281 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
282 (Protocol method lists are not sorted because of their extra parallel data)
283 Runtime fixed-up method lists get 3.
284
285 High two bits of protocol->flags is used as the fixed-up marker.
286 PREOPTIMIZED VERSION:
287 Protocols from shared cache are 1<<30.
288 Runtime fixed-up protocols get 1<<30.
289 UN-PREOPTIMIZED VERSION:
290 Protocols from shared cache are 1<<30.
291 Shared cache's fixups are not trusted.
292 Runtime fixed-up protocols get 3<<30.
293 */
294
295 static const uint32_t fixed_up_method_list = 3;
296 static const uint32_t uniqued_method_list = 1;
297 static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
298 static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
299
300 void
301 disableSharedCacheOptimizations(void)
302 {
303 fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
304 // Its safe to just set canonical protocol to 0 as we'll never call
305 // clearIsCanonical() unless isCanonical() returned true, which can't happen
306 // with a 0 mask
307 canonical_protocol = 0;
308 }
309
310 bool method_list_t::isUniqued() const {
311 return (flags() & uniqued_method_list) != 0;
312 }
313
314 bool method_list_t::isFixedUp() const {
315 // Ignore any flags in the top bits, just look at the bottom two.
316 return (flags() & 0x3) == fixed_up_method_list;
317 }
318
319 void method_list_t::setFixedUp() {
320 runtimeLock.assertLocked();
321 ASSERT(!isFixedUp());
322 entsizeAndFlags = entsize() | fixed_up_method_list;
323 }
324
325 bool protocol_t::isFixedUp() const {
326 return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
327 }
328
329 void protocol_t::setFixedUp() {
330 runtimeLock.assertLocked();
331 ASSERT(!isFixedUp());
332 flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
333 }
334
335 bool protocol_t::isCanonical() const {
336 return (flags & canonical_protocol) != 0;
337 }
338
339 void protocol_t::clearIsCanonical() {
340 runtimeLock.assertLocked();
341 ASSERT(isCanonical());
342 flags = flags & ~canonical_protocol;
343 }
344
345
346 const method_list_t_authed_ptr<method_list_t> *method_array_t::endCategoryMethodLists(Class cls) const
347 {
348 auto mlists = beginLists();
349 auto mlistsEnd = endLists();
350
351 if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods())
352 {
353 // No methods, or no base methods.
354 // Everything here is a category method.
355 return mlistsEnd;
356 }
357
358 // Have base methods. Category methods are
359 // everything except the last method list.
360 return mlistsEnd - 1;
361 }
362
363 static const char *sel_cname(SEL sel)
364 {
365 return (const char *)(void *)sel;
366 }
367
368
369 static size_t protocol_list_size(const protocol_list_t *plist)
370 {
371 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
372 }
373
374
375 static void try_free(const void *p)
376 {
377 if (p && malloc_size(p)) free((void *)p);
378 }
379
380
381 using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
382 Class _Nonnull newClass);
383 // Normally there's only one handler registered.
384 static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
385
386 void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
387 (Class _Nonnull oldClass, Class _Nonnull newClass)) {
388 mutex_locker_t lock(runtimeLock);
389
390 classCopyFixupHandlers.append(newFixupHandler);
391 }
392
393 static Class
394 alloc_class_for_subclass(Class supercls, size_t extraBytes)
395 {
396 if (!supercls || !supercls->isAnySwift()) {
397 return _calloc_class(sizeof(objc_class) + extraBytes);
398 }
399
400 // Superclass is a Swift class. New subclass must duplicate its extra bits.
401
402 // Allocate the new class, with space for super's prefix and suffix
403 // and self's extraBytes.
404 swift_class_t *swiftSupercls = (swift_class_t *)supercls;
405 size_t superSize = swiftSupercls->classSize;
406 void *superBits = swiftSupercls->baseAddress();
407 void *bits = malloc(superSize + extraBytes);
408
409 // Copy all of the superclass's data to the new class.
410 memcpy(bits, superBits, superSize);
411
412 // Erase the objc data and the Swift description in the new class.
413 swift_class_t *swcls = (swift_class_t *)
414 ((uint8_t *)bits + swiftSupercls->classAddressOffset);
415 bzero(swcls, sizeof(objc_class));
416 swcls->description = nil;
417
418 for (auto handler : classCopyFixupHandlers) {
419 handler(supercls, (Class)swcls);
420 }
421
422 // Mark this class as Swift-enhanced.
423 if (supercls->isSwiftStable()) {
424 swcls->bits.setIsSwiftStable();
425 }
426 if (supercls->isSwiftLegacy()) {
427 swcls->bits.setIsSwiftLegacy();
428 }
429
430 return (Class)swcls;
431 }
432
433
434 /***********************************************************************
435 * object_getIndexedIvars.
436 **********************************************************************/
437 void *object_getIndexedIvars(id obj)
438 {
439 uint8_t *base = (uint8_t *)obj;
440
441 if (obj->isTaggedPointerOrNil()) return nil;
442
443 if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
444
445 Class cls = (Class)obj;
446 if (!cls->isAnySwift()) return base + sizeof(objc_class);
447
448 swift_class_t *swcls = (swift_class_t *)cls;
449 return base - swcls->classAddressOffset + word_align(swcls->classSize);
450 }
451
452
453 /***********************************************************************
454 * make_ro_writeable
455 * Reallocates rw->ro if necessary to make it writeable.
456 * Locking: runtimeLock must be held by the caller.
457 **********************************************************************/
458 static class_ro_t *make_ro_writeable(class_rw_t *rw)
459 {
460 runtimeLock.assertLocked();
461
462 if (rw->flags & RW_COPIED_RO) {
463 // already writeable, do nothing
464 } else {
465 rw->set_ro(rw->ro()->duplicate());
466 rw->flags |= RW_COPIED_RO;
467 }
468 return const_cast<class_ro_t *>(rw->ro());
469 }
470
471
472 /***********************************************************************
473 * dataSegmentsContain
474 * Returns true if the given address lies within a data segment in any
475 * loaded image.
476 **********************************************************************/
477 NEVER_INLINE
478 static bool
479 dataSegmentsContain(Class cls)
480 {
481 uint32_t index;
482 if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
483 // if the class is realized (hence has a class_rw_t),
484 // memorize where we found the range
485 if (cls->isRealized()) {
486 cls->data()->witness = (uint16_t)index;
487 }
488 return true;
489 }
490 return false;
491 }
492
493
494 /***********************************************************************
495 * isKnownClass
496 * Return true if the class is known to the runtime (located within the
497 * shared cache, within the data segment of a loaded image, or has been
498 * allocated with obj_allocateClassPair).
499 *
500 * The result of this operation is cached on the class in a "witness"
501 * value that is cheaply checked in the fastpath.
502 **********************************************************************/
503 ALWAYS_INLINE
504 static bool
505 isKnownClass(Class cls)
506 {
507 if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
508 return true;
509 }
510 auto &set = objc::allocatedClasses.get();
511 return set.find(cls) != set.end() || dataSegmentsContain(cls);
512 }
513
514
515 /***********************************************************************
516 * addClassTableEntry
517 * Add a class to the table of all classes. If addMeta is true,
518 * automatically adds the metaclass of the class as well.
519 * Locking: runtimeLock must be held by the caller.
520 **********************************************************************/
521 static void
522 addClassTableEntry(Class cls, bool addMeta = true)
523 {
524 runtimeLock.assertLocked();
525
526 // This class is allowed to be a known class via the shared cache or via
527 // data segments, but it is not allowed to be in the dynamic table already.
528 auto &set = objc::allocatedClasses.get();
529
530 ASSERT(set.find(cls) == set.end());
531
532 if (!isKnownClass(cls))
533 set.insert(cls);
534 if (addMeta)
535 addClassTableEntry(cls->ISA(), false);
536 }
537
538
539 /***********************************************************************
540 * checkIsKnownClass
541 * Checks the given class against the list of all known classes. Dies
542 * with a fatal error if the class is not known.
543 * Locking: runtimeLock must be held by the caller.
544 **********************************************************************/
545 ALWAYS_INLINE
546 static void
547 checkIsKnownClass(Class cls)
548 {
549 if (slowpath(!isKnownClass(cls))) {
550 _objc_fatal("Attempt to use unknown class %p.", cls);
551 }
552 }
553
554 /***********************************************************************
555 * classNSObject
556 * Returns class NSObject.
557 * Locking: none
558 **********************************************************************/
559 static Class classNSObject(void)
560 {
561 extern objc_class OBJC_CLASS_$_NSObject;
562 return (Class)&OBJC_CLASS_$_NSObject;
563 }
564
565 static Class metaclassNSObject(void)
566 {
567 extern objc_class OBJC_METACLASS_$_NSObject;
568 return (Class)&OBJC_METACLASS_$_NSObject;
569 }
570
571 /***********************************************************************
572 * printReplacements
573 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
574 * Warn about methods from cats that override other methods in cats or cls.
575 * Assumes no methods from cats have been added to cls yet.
576 **********************************************************************/
577 __attribute__((cold, noinline))
578 static void
579 printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
580 {
581 uint32_t c;
582 bool isMeta = cls->isMetaClass();
583
584 // Newest categories are LAST in cats
585 // Later categories override earlier ones.
586 for (c = 0; c < cats_count; c++) {
587 category_t *cat = cats_list[c].cat;
588
589 method_list_t *mlist = cat->methodsForMeta(isMeta);
590 if (!mlist) continue;
591
592 for (const auto& meth : *mlist) {
593 SEL s = sel_registerName(sel_cname(meth.name()));
594
595 // Search for replaced methods in method lookup order.
596 // Complain about the first duplicate only.
597
598 // Look for method in earlier categories
599 for (uint32_t c2 = 0; c2 < c; c2++) {
600 category_t *cat2 = cats_list[c2].cat;
601
602 const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
603 if (!mlist2) continue;
604
605 for (const auto& meth2 : *mlist2) {
606 SEL s2 = sel_registerName(sel_cname(meth2.name()));
607 if (s == s2) {
608 logReplacedMethod(cls->nameForLogging(), s,
609 cls->isMetaClass(), cat->name,
610 meth2.imp(false), meth.imp(false));
611 goto complained;
612 }
613 }
614 }
615
616 // Look for method in cls
617 for (const auto& meth2 : cls->data()->methods()) {
618 SEL s2 = sel_registerName(sel_cname(meth2.name()));
619 if (s == s2) {
620 logReplacedMethod(cls->nameForLogging(), s,
621 cls->isMetaClass(), cat->name,
622 meth2.imp(false), meth.imp(false));
623 goto complained;
624 }
625 }
626
627 complained:
628 ;
629 }
630 }
631 }
632
633
634 /***********************************************************************
635 * unreasonableClassCount
636 * Provides an upper bound for any iteration of classes,
637 * to prevent spins when runtime metadata is corrupted.
638 **********************************************************************/
639 static unsigned unreasonableClassCount()
640 {
641 runtimeLock.assertLocked();
642
643 int base = NXCountMapTable(gdb_objc_realized_classes) +
644 getPreoptimizedClassUnreasonableCount();
645
646 // Provide lots of slack here. Some iterations touch metaclasses too.
647 // Some iterations backtrack (like realized class iteration).
648 // We don't need an efficient bound, merely one that prevents spins.
649 return (base + 1) * 16;
650 }
651
652
653 /***********************************************************************
654 * Class enumerators
655 * The passed in block returns `false` if subclasses can be skipped
656 * Locking: runtimeLock must be held by the caller.
657 **********************************************************************/
658 static inline void
659 foreach_realized_class_and_subclass_2(Class top, unsigned &count,
660 bool skip_metaclass,
661 bool (^code)(Class) __attribute((noescape)))
662 {
663 Class cls = top;
664
665 runtimeLock.assertLocked();
666 ASSERT(top);
667
668 while (1) {
669 if (--count == 0) {
670 _objc_fatal("Memory corruption in class list.");
671 }
672
673 bool skip_subclasses;
674
675 if (skip_metaclass && cls->isMetaClass()) {
676 skip_subclasses = true;
677 } else {
678 skip_subclasses = !code(cls);
679 }
680
681 if (!skip_subclasses && cls->data()->firstSubclass) {
682 cls = cls->data()->firstSubclass;
683 } else {
684 while (!cls->data()->nextSiblingClass && cls != top) {
685 cls = cls->getSuperclass();
686 if (--count == 0) {
687 _objc_fatal("Memory corruption in class list.");
688 }
689 }
690 if (cls == top) break;
691 cls = cls->data()->nextSiblingClass;
692 }
693 }
694 }
695
696 // Enumerates a class and all of its realized subclasses.
697 static void
698 foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
699 {
700 unsigned int count = unreasonableClassCount();
701
702 foreach_realized_class_and_subclass_2(top, count, false, code);
703 }
704
705 // Enumerates all realized classes and metaclasses.
706 static void
707 foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
708 {
709 unsigned int count = unreasonableClassCount();
710
711 for (Class top = _firstRealizedClass;
712 top != nil;
713 top = top->data()->nextSiblingClass)
714 {
715 foreach_realized_class_and_subclass_2(top, count, false, code);
716 }
717 }
718
719 // Enumerates all realized classes (ignoring metaclasses).
720 static void
721 foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
722 {
723 unsigned int count = unreasonableClassCount();
724
725 for (Class top = _firstRealizedClass;
726 top != nil;
727 top = top->data()->nextSiblingClass)
728 {
729 foreach_realized_class_and_subclass_2(top, count, true, code);
730 }
731 }
732
733
734 /***********************************************************************
735 * Method Scanners / Optimization tracking
736 * Implementation of scanning for various implementations of methods.
737 **********************************************************************/
738
739 namespace objc {
740
741 enum SelectorBundle {
742 AWZ,
743 RR,
744 Core,
745 };
746
747 namespace scanner {
748
749 // The current state of NSObject swizzling for every scanner
750 //
751 // It allows for cheap checks of global swizzles, and also lets
752 // things like IMP Swizzling before NSObject has been initialized
753 // to be remembered, as setInitialized() would miss these.
754 //
755 // Every pair of bits describes a SelectorBundle.
756 // even bits: is NSObject class swizzled for this bundle
757 // odd bits: is NSObject meta class swizzled for this bundle
758 static uintptr_t NSObjectSwizzledMask;
759
760 static ALWAYS_INLINE uintptr_t
761 swizzlingBit(SelectorBundle bundle, bool isMeta)
762 {
763 return 1UL << (2 * bundle + isMeta);
764 }
765
766 static void __attribute__((cold, noinline))
767 printCustom(Class cls, SelectorBundle bundle, bool inherited)
768 {
769 static char const * const SelectorBundleName[] = {
770 [AWZ] = "CUSTOM AWZ",
771 [RR] = "CUSTOM RR",
772 [Core] = "CUSTOM Core",
773 };
774
775 _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
776 cls->nameForLogging(),
777 cls->isMetaClass() ? " (meta)" : "",
778 inherited ? " (inherited)" : "");
779 }
780
781 enum class Scope { Instances, Classes, Both };
782
783 template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
784 class Mixin {
785
786 // work around compiler being broken with templates using Class/objc_class,
787 // probably some weird confusion with Class being builtin
788 ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
789 return (objc_class *)cls;
790 }
791
792 static void
793 setCustomRecursively(Class cls, bool inherited = false)
794 {
795 foreach_realized_class_and_subclass(cls, [=](Class c){
796 if (c != cls && !as_objc_class(c)->isInitialized()) {
797 // Subclass not yet initialized. Wait for setInitialized() to do it
798 return false;
799 }
800 if (Traits::isCustom(c)) {
801 return false;
802 }
803 Traits::setCustom(c);
804 if (ShouldPrint) {
805 printCustom(cls, Bundle, inherited || c != cls);
806 }
807 return true;
808 });
809 }
810
811 static bool
812 isNSObjectSwizzled(bool isMeta)
813 {
814 return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
815 }
816
817 static void
818 setNSObjectSwizzled(Class NSOClass, bool isMeta)
819 {
820 NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
821 if (as_objc_class(NSOClass)->isInitialized()) {
822 setCustomRecursively(NSOClass);
823 }
824 }
825
826 static void
827 scanChangedMethodForUnknownClass(const method_t *meth)
828 {
829 Class cls;
830
831 cls = classNSObject();
832 if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
833 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
834 if (meth == &meth2) {
835 setNSObjectSwizzled(cls, NO);
836 break;
837 }
838 }
839 }
840
841 cls = metaclassNSObject();
842 if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
843 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
844 if (meth == &meth2) {
845 setNSObjectSwizzled(cls, YES);
846 break;
847 }
848 }
849 }
850 }
851
852 static void
853 scanAddedClassImpl(Class cls, bool isMeta)
854 {
855 bool setCustom = NO, inherited = NO;
856
857 if (isNSObjectSwizzled(isMeta)) {
858 setCustom = YES;
859 } else if (Traits::knownClassHasDefaultImpl(cls, isMeta)) {
860 // This class is known to have the default implementations,
861 // but we need to check categories.
862 auto &methods = as_objc_class(cls)->data()->methods();
863 setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
864 methods.endCategoryMethodLists(cls));
865 } else if (!isMeta && !as_objc_class(cls)->getSuperclass()) {
866 // Custom Root class
867 setCustom = YES;
868 } else if (Traits::isCustom(as_objc_class(cls)->getSuperclass())) {
869 // Superclass is custom, therefore we are too.
870 setCustom = YES;
871 inherited = YES;
872 } else {
873 // Not NSObject.
874 auto &methods = as_objc_class(cls)->data()->methods();
875 setCustom = Traits::scanMethodLists(methods.beginLists(),
876 methods.endLists());
877 }
878 if (slowpath(setCustom)) {
879 if (ShouldPrint) printCustom(cls, Bundle, inherited);
880 } else {
881 Traits::setDefault(cls);
882 }
883 }
884
885 public:
886 static bool knownClassHasDefaultImpl(Class cls, bool isMeta) {
887 // Typically only NSObject has default implementations.
888 // Allow this to be extended by overriding (to allow
889 // SwiftObject, for example).
890 Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
891 return cls == NSOClass;
892 }
893
894 // Scan a class that is about to be marked Initialized for particular
895 // bundles of selectors, and mark the class and its children
896 // accordingly.
897 //
898 // This also handles inheriting properties from its superclass.
899 //
900 // Caller: objc_class::setInitialized()
901 static void
902 scanInitializedClass(Class cls, Class metacls)
903 {
904 if (Domain != Scope::Classes) {
905 scanAddedClassImpl(cls, false);
906 }
907 if (Domain != Scope::Instances) {
908 scanAddedClassImpl(metacls, true);
909 }
910 }
911
912 // Inherit various properties from the superclass when a class
913 // is being added to the graph.
914 //
915 // Caller: addSubclass()
916 static void
917 scanAddedSubClass(Class subcls, Class supercls)
918 {
919 if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
920 setCustomRecursively(subcls, true);
921 }
922 }
923
924 // Scan Method lists for selectors that would override things
925 // in a Bundle.
926 //
927 // This is used to detect when categories override problematic selectors
928 // are injected in a class after it has been initialized.
929 //
930 // Caller: prepareMethodLists()
931 static void
932 scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
933 {
934 if (slowpath(Traits::isCustom(cls))) {
935 return;
936 }
937 if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
938 setCustomRecursively(cls);
939 }
940 }
941
942 // Handle IMP Swizzling (the IMP for an exisiting method being changed).
943 //
944 // In almost all cases, IMP swizzling does not affect custom bits.
945 // Custom search will already find the method whether or not
946 // it is swizzled, so it does not transition from non-custom to custom.
947 //
948 // The only cases where IMP swizzling can affect the custom bits is
949 // if the swizzled method is one of the methods that is assumed to be
950 // non-custom. These special cases are listed in setInitialized().
951 // We look for such cases here.
952 //
953 // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
954 static void
955 scanChangedMethod(Class cls, const method_t *meth)
956 {
957 if (fastpath(!Traits::isInterestingSelector(meth->name()))) {
958 return;
959 }
960
961 if (cls) {
962 bool isMeta = as_objc_class(cls)->isMetaClass();
963 if (isMeta && Domain != Scope::Instances) {
964 if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
965 setNSObjectSwizzled(cls, isMeta);
966 }
967 }
968 if (!isMeta && Domain != Scope::Classes) {
969 if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
970 setNSObjectSwizzled(cls, isMeta);
971 }
972 }
973 } else {
974 // We're called from method_exchangeImplementations, only NSObject
975 // class and metaclass may be problematic (exchanging the default
976 // builtin IMP of an interesting seleector, is a swizzling that,
977 // may flip our scanned property. For other classes, the previous
978 // value had already flipped the property).
979 //
980 // However, as we don't know the class, we need to scan all of
981 // NSObject class and metaclass methods (this is SLOW).
982 scanChangedMethodForUnknownClass(meth);
983 }
984 }
985 };
986
987 } // namespace scanner
988
989 // AWZ methods: +alloc / +allocWithZone:
990 struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
991 static bool isCustom(Class cls) {
992 return cls->hasCustomAWZ();
993 }
994 static void setCustom(Class cls) {
995 cls->setHasCustomAWZ();
996 }
997 static void setDefault(Class cls) {
998 cls->setHasDefaultAWZ();
999 }
1000 static bool isInterestingSelector(SEL sel) {
1001 return sel == @selector(alloc) || sel == @selector(allocWithZone:);
1002 }
1003 template<typename T>
1004 static bool scanMethodLists(T *mlists, T *end) {
1005 SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
1006 return method_lists_contains_any(mlists, end, sels, 2);
1007 }
1008 };
1009
1010 // Retain/Release methods that are extremely rarely overridden
1011 //
1012 // retain/release/autorelease/retainCount/
1013 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
1014 struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
1015 #if !SUPPORT_NONPOINTER_ISA
1016 , scanner::Scope::Instances
1017 #endif
1018 > {
1019 static bool isCustom(Class cls) {
1020 return cls->hasCustomRR();
1021 }
1022 static void setCustom(Class cls) {
1023 cls->setHasCustomRR();
1024 }
1025 static void setDefault(Class cls) {
1026 cls->setHasDefaultRR();
1027 }
1028 static bool isInterestingSelector(SEL sel) {
1029 return sel == @selector(retain) ||
1030 sel == @selector(release) ||
1031 sel == @selector(autorelease) ||
1032 sel == @selector(_tryRetain) ||
1033 sel == @selector(_isDeallocating) ||
1034 sel == @selector(retainCount) ||
1035 sel == @selector(allowsWeakReference) ||
1036 sel == @selector(retainWeakReference);
1037 }
1038 template <typename T>
1039 static bool scanMethodLists(T *mlists, T *end) {
1040 SEL sels[8] = {
1041 @selector(retain),
1042 @selector(release),
1043 @selector(autorelease),
1044 @selector(_tryRetain),
1045 @selector(_isDeallocating),
1046 @selector(retainCount),
1047 @selector(allowsWeakReference),
1048 @selector(retainWeakReference),
1049 };
1050 return method_lists_contains_any(mlists, end, sels, 8);
1051 }
1052 };
1053
1054 // Core NSObject methods that are extremely rarely overridden
1055 //
1056 // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
1057 struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
1058 static bool knownClassHasDefaultImpl(Class cls, bool isMeta) {
1059 if (scanner::Mixin<CoreScanner, Core, PrintCustomCore>::knownClassHasDefaultImpl(cls, isMeta))
1060 return true;
1061 if ((cls->isRootClass() || cls->isRootMetaclass())
1062 && strcmp(cls->mangledName(), "_TtCs12_SwiftObject") == 0)
1063 return true;
1064
1065 return false;
1066 }
1067
1068 static bool isCustom(Class cls) {
1069 return cls->hasCustomCore();
1070 }
1071 static void setCustom(Class cls) {
1072 cls->setHasCustomCore();
1073 }
1074 static void setDefault(Class cls) {
1075 cls->setHasDefaultCore();
1076 }
1077 static bool isInterestingSelector(SEL sel) {
1078 return sel == @selector(new) ||
1079 sel == @selector(self) ||
1080 sel == @selector(class) ||
1081 sel == @selector(isKindOfClass:) ||
1082 sel == @selector(respondsToSelector:);
1083 }
1084 template <typename T>
1085 static bool scanMethodLists(T *mlists, T *end) {
1086 SEL sels[5] = {
1087 @selector(new),
1088 @selector(self),
1089 @selector(class),
1090 @selector(isKindOfClass:),
1091 @selector(respondsToSelector:)
1092 };
1093 return method_lists_contains_any(mlists, end, sels, 5);
1094 }
1095 };
1096
1097 class category_list : nocopy_t {
1098 union {
1099 locstamped_category_t lc;
1100 struct {
1101 locstamped_category_t *array;
1102 // this aliases with locstamped_category_t::hi
1103 // which is an aliased pointer
1104 uint32_t is_array : 1;
1105 uint32_t count : 31;
1106 uint32_t size : 32;
1107 };
1108 } _u;
1109
1110 public:
1111 category_list() : _u{{nullptr, nullptr}} { }
1112 category_list(locstamped_category_t lc) : _u{{lc}} { }
1113 category_list(category_list &&other) : category_list() {
1114 std::swap(_u, other._u);
1115 }
1116 ~category_list()
1117 {
1118 if (_u.is_array) {
1119 free(_u.array);
1120 }
1121 }
1122
1123 uint32_t count() const
1124 {
1125 if (_u.is_array) return _u.count;
1126 return _u.lc.cat ? 1 : 0;
1127 }
1128
1129 uint32_t arrayByteSize(uint32_t size) const
1130 {
1131 return sizeof(locstamped_category_t) * size;
1132 }
1133
1134 const locstamped_category_t *array() const
1135 {
1136 return _u.is_array ? _u.array : &_u.lc;
1137 }
1138
1139 void append(locstamped_category_t lc)
1140 {
1141 if (_u.is_array) {
1142 if (_u.count == _u.size) {
1143 // Have a typical malloc growth:
1144 // - size <= 8: grow by 2
1145 // - size <= 16: grow by 4
1146 // - size <= 32: grow by 8
1147 // ... etc
1148 _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
1149 _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
1150 }
1151 _u.array[_u.count++] = lc;
1152 } else if (_u.lc.cat == NULL) {
1153 _u.lc = lc;
1154 } else {
1155 locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
1156 arr[0] = _u.lc;
1157 arr[1] = lc;
1158
1159 _u.array = arr;
1160 _u.is_array = true;
1161 _u.count = 2;
1162 _u.size = 2;
1163 }
1164 }
1165
1166 void erase(category_t *cat)
1167 {
1168 if (_u.is_array) {
1169 for (int i = 0; i < _u.count; i++) {
1170 if (_u.array[i].cat == cat) {
1171 // shift entries to preserve list order
1172 memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
1173 return;
1174 }
1175 }
1176 } else if (_u.lc.cat == cat) {
1177 _u.lc.cat = NULL;
1178 _u.lc.hi = NULL;
1179 }
1180 }
1181 };
1182
1183 class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
1184 {
1185 public:
1186 void addForClass(locstamped_category_t lc, Class cls)
1187 {
1188 runtimeLock.assertLocked();
1189
1190 if (slowpath(PrintConnecting)) {
1191 _objc_inform("CLASS: found category %c%s(%s)",
1192 cls->isMetaClassMaybeUnrealized() ? '+' : '-',
1193 cls->nameForLogging(), lc.cat->name);
1194 }
1195
1196 auto result = get().try_emplace(cls, lc);
1197 if (!result.second) {
1198 result.first->second.append(lc);
1199 }
1200 }
1201
1202 void attachToClass(Class cls, Class previously, int flags)
1203 {
1204 runtimeLock.assertLocked();
1205 ASSERT((flags & ATTACH_CLASS) ||
1206 (flags & ATTACH_METACLASS) ||
1207 (flags & ATTACH_CLASS_AND_METACLASS));
1208
1209 auto &map = get();
1210 auto it = map.find(previously);
1211
1212 if (it != map.end()) {
1213 category_list &list = it->second;
1214 if (flags & ATTACH_CLASS_AND_METACLASS) {
1215 int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
1216 attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
1217 attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
1218 } else {
1219 attachCategories(cls, list.array(), list.count(), flags);
1220 }
1221 map.erase(it);
1222 }
1223 }
1224
1225 void eraseCategoryForClass(category_t *cat, Class cls)
1226 {
1227 runtimeLock.assertLocked();
1228
1229 auto &map = get();
1230 auto it = map.find(cls);
1231 if (it != map.end()) {
1232 category_list &list = it->second;
1233 list.erase(cat);
1234 if (list.count() == 0) {
1235 map.erase(it);
1236 }
1237 }
1238 }
1239
1240 void eraseClass(Class cls)
1241 {
1242 runtimeLock.assertLocked();
1243
1244 get().erase(cls);
1245 }
1246 };
1247
1248 static UnattachedCategories unattachedCategories;
1249
1250 } // namespace objc
1251
1252 static bool isBundleClass(Class cls)
1253 {
1254 return cls->data()->ro()->flags & RO_FROM_BUNDLE;
1255 }
1256
1257
1258 static void
1259 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1260 {
1261 runtimeLock.assertLocked();
1262 ASSERT(!mlist->isFixedUp());
1263
1264 // fixme lock less in attachMethodLists ?
1265 // dyld3 may have already uniqued, but not sorted, the list
1266 if (!mlist->isUniqued()) {
1267 mutex_locker_t lock(selLock);
1268
1269 // Unique selectors in list.
1270 for (auto& meth : *mlist) {
1271 const char *name = sel_cname(meth.name());
1272 meth.setName(sel_registerNameNoLock(name, bundleCopy));
1273 }
1274 }
1275
1276 // Sort by selector address.
1277 // Don't try to sort small lists, as they're immutable.
1278 // Don't try to sort big lists of nonstandard size, as stable_sort
1279 // won't copy the entries properly.
1280 if (sort && !mlist->isSmallList() && mlist->entsize() == method_t::bigSize) {
1281 method_t::SortBySELAddress sorter;
1282 std::stable_sort(&mlist->begin()->big(), &mlist->end()->big(), sorter);
1283 }
1284
1285 // Mark method list as uniqued and sorted.
1286 // Can't mark small lists, since they're immutable.
1287 if (!mlist->isSmallList()) {
1288 mlist->setFixedUp();
1289 }
1290 }
1291
1292
1293 static void
1294 prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
1295 bool baseMethods, bool methodsFromBundle, const char *why)
1296 {
1297 runtimeLock.assertLocked();
1298
1299 if (addedCount == 0) return;
1300
1301 // There exist RR/AWZ/Core special cases for some class's base methods.
1302 // But this code should never need to scan base methods for RR/AWZ/Core:
1303 // default RR/AWZ/Core cannot be set before setInitialized().
1304 // Therefore we need not handle any special cases here.
1305 if (baseMethods) {
1306 ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
1307 } else if (cls->cache.isConstantOptimizedCache()) {
1308 cls->setDisallowPreoptCachesRecursively(why);
1309 } else if (cls->allowsPreoptInlinedSels()) {
1310 #if CONFIG_USE_PREOPT_CACHES
1311 SEL *sels = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_START];
1312 SEL *sels_end = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_END];
1313 if (method_lists_contains_any(addedLists, addedLists + addedCount, sels, sels_end - sels)) {
1314 cls->setDisallowPreoptInlinedSelsRecursively(why);
1315 }
1316 #endif
1317 }
1318
1319 // Add method lists to array.
1320 // Reallocate un-fixed method lists.
1321 // The new methods are PREPENDED to the method list array.
1322
1323 for (int i = 0; i < addedCount; i++) {
1324 method_list_t *mlist = addedLists[i];
1325 ASSERT(mlist);
1326
1327 // Fixup selectors if necessary
1328 if (!mlist->isFixedUp()) {
1329 fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1330 }
1331 }
1332
1333 // If the class is initialized, then scan for method implementations
1334 // tracked by the class's flags. If it's not initialized yet,
1335 // then objc_class::setInitialized() will take care of it.
1336 if (cls->isInitialized()) {
1337 objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1338 objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1339 objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1340 }
1341 }
1342
1343 class_rw_ext_t *
1344 class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy)
1345 {
1346 runtimeLock.assertLocked();
1347
1348 auto rwe = objc::zalloc<class_rw_ext_t>();
1349
1350 rwe->version = (ro->flags & RO_META) ? 7 : 0;
1351
1352 method_list_t *list = ro->baseMethods();
1353 if (list) {
1354 if (deepCopy) list = list->duplicate();
1355 rwe->methods.attachLists(&list, 1);
1356 }
1357
1358 // See comments in objc_duplicateClass
1359 // property lists and protocol lists historically
1360 // have not been deep-copied
1361 //
1362 // This is probably wrong and ought to be fixed some day
1363 property_list_t *proplist = ro->baseProperties;
1364 if (proplist) {
1365 rwe->properties.attachLists(&proplist, 1);
1366 }
1367
1368 protocol_list_t *protolist = ro->baseProtocols;
1369 if (protolist) {
1370 rwe->protocols.attachLists(&protolist, 1);
1371 }
1372
1373 set_ro_or_rwe(rwe, ro);
1374 return rwe;
1375 }
1376
1377 // Attach method lists and properties and protocols from categories to a class.
1378 // Assumes the categories in cats are all loaded and sorted by load order,
1379 // oldest categories first.
1380 static void
1381 attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
1382 int flags)
1383 {
1384 if (slowpath(PrintReplacedMethods)) {
1385 printReplacements(cls, cats_list, cats_count);
1386 }
1387 if (slowpath(PrintConnecting)) {
1388 _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
1389 cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
1390 cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
1391 }
1392
1393 /*
1394 * Only a few classes have more than 64 categories during launch.
1395 * This uses a little stack, and avoids malloc.
1396 *
1397 * Categories must be added in the proper order, which is back
1398 * to front. To do that with the chunking, we iterate cats_list
1399 * from front to back, build up the local buffers backwards,
1400 * and call attachLists on the chunks. attachLists prepends the
1401 * lists, so the final result is in the expected order.
1402 */
1403 constexpr uint32_t ATTACH_BUFSIZ = 64;
1404 method_list_t *mlists[ATTACH_BUFSIZ];
1405 property_list_t *proplists[ATTACH_BUFSIZ];
1406 protocol_list_t *protolists[ATTACH_BUFSIZ];
1407
1408 uint32_t mcount = 0;
1409 uint32_t propcount = 0;
1410 uint32_t protocount = 0;
1411 bool fromBundle = NO;
1412 bool isMeta = (flags & ATTACH_METACLASS);
1413 auto rwe = cls->data()->extAllocIfNeeded();
1414
1415 for (uint32_t i = 0; i < cats_count; i++) {
1416 auto& entry = cats_list[i];
1417
1418 method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
1419 if (mlist) {
1420 if (mcount == ATTACH_BUFSIZ) {
1421 prepareMethodLists(cls, mlists, mcount, NO, fromBundle, __func__);
1422 rwe->methods.attachLists(mlists, mcount);
1423 mcount = 0;
1424 }
1425 mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
1426 fromBundle |= entry.hi->isBundle();
1427 }
1428
1429 property_list_t *proplist =
1430 entry.cat->propertiesForMeta(isMeta, entry.hi);
1431 if (proplist) {
1432 if (propcount == ATTACH_BUFSIZ) {
1433 rwe->properties.attachLists(proplists, propcount);
1434 propcount = 0;
1435 }
1436 proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
1437 }
1438
1439 protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
1440 if (protolist) {
1441 if (protocount == ATTACH_BUFSIZ) {
1442 rwe->protocols.attachLists(protolists, protocount);
1443 protocount = 0;
1444 }
1445 protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
1446 }
1447 }
1448
1449 if (mcount > 0) {
1450 prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount,
1451 NO, fromBundle, __func__);
1452 rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
1453 if (flags & ATTACH_EXISTING) {
1454 flushCaches(cls, __func__, [](Class c){
1455 // constant caches have been dealt with in prepareMethodLists
1456 // if the class still is constant here, it's fine to keep
1457 return !c->cache.isConstantOptimizedCache();
1458 });
1459 }
1460 }
1461
1462 rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
1463
1464 rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
1465 }
1466
1467
1468 /***********************************************************************
1469 * methodizeClass
1470 * Fixes up cls's method list, protocol list, and property list.
1471 * Attaches any outstanding categories.
1472 * Locking: runtimeLock must be held by the caller
1473 **********************************************************************/
1474 static void methodizeClass(Class cls, Class previously)
1475 {
1476 runtimeLock.assertLocked();
1477
1478 bool isMeta = cls->isMetaClass();
1479 auto rw = cls->data();
1480 auto ro = rw->ro();
1481 auto rwe = rw->ext();
1482
1483 // Methodizing for the first time
1484 if (PrintConnecting) {
1485 _objc_inform("CLASS: methodizing class '%s' %s",
1486 cls->nameForLogging(), isMeta ? "(meta)" : "");
1487 }
1488
1489 // Install methods and properties that the class implements itself.
1490 method_list_t *list = ro->baseMethods();
1491 if (list) {
1492 prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls), nullptr);
1493 if (rwe) rwe->methods.attachLists(&list, 1);
1494 }
1495
1496 property_list_t *proplist = ro->baseProperties;
1497 if (rwe && proplist) {
1498 rwe->properties.attachLists(&proplist, 1);
1499 }
1500
1501 protocol_list_t *protolist = ro->baseProtocols;
1502 if (rwe && protolist) {
1503 rwe->protocols.attachLists(&protolist, 1);
1504 }
1505
1506 // Root classes get bonus method implementations if they don't have
1507 // them already. These apply before category replacements.
1508 if (cls->isRootMetaclass()) {
1509 // root metaclass
1510 addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
1511 }
1512
1513 // Attach categories.
1514 if (previously) {
1515 if (isMeta) {
1516 objc::unattachedCategories.attachToClass(cls, previously,
1517 ATTACH_METACLASS);
1518 } else {
1519 // When a class relocates, categories with class methods
1520 // may be registered on the class itself rather than on
1521 // the metaclass. Tell attachToClass to look for those.
1522 objc::unattachedCategories.attachToClass(cls, previously,
1523 ATTACH_CLASS_AND_METACLASS);
1524 }
1525 }
1526 objc::unattachedCategories.attachToClass(cls, cls,
1527 isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
1528
1529 #if DEBUG
1530 // Debug: sanity-check all SELs; log method list contents
1531 for (const auto& meth : rw->methods()) {
1532 if (PrintConnecting) {
1533 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1534 cls->nameForLogging(), sel_getName(meth.name()));
1535 }
1536 ASSERT(sel_registerName(sel_getName(meth.name())) == meth.name());
1537 }
1538 #endif
1539 }
1540
1541
1542 /***********************************************************************
1543 * nonMetaClasses
1544 * Returns the secondary metaclass => class map
1545 * Used for some cases of +initialize and +resolveClassMethod:.
1546 * This map does not contain all class and metaclass pairs. It only
1547 * contains metaclasses whose classes would be in the runtime-allocated
1548 * named-class table, but are not because some other class with the same name
1549 * is in that table.
1550 * Classes with no duplicates are not included.
1551 * Classes in the preoptimized named-class table are not included.
1552 * Classes whose duplicates are in the preoptimized table are not included.
1553 * Most code should use getMaybeUnrealizedNonMetaClass()
1554 * instead of reading this table.
1555 * Locking: runtimeLock must be read- or write-locked by the caller
1556 **********************************************************************/
1557 static NXMapTable *nonmeta_class_map = nil;
1558 static NXMapTable *nonMetaClasses(void)
1559 {
1560 runtimeLock.assertLocked();
1561
1562 if (nonmeta_class_map) return nonmeta_class_map;
1563
1564 // nonmeta_class_map is typically small
1565 INIT_ONCE_PTR(nonmeta_class_map,
1566 NXCreateMapTable(NXPtrValueMapPrototype, 32),
1567 NXFreeMapTable(v));
1568
1569 return nonmeta_class_map;
1570 }
1571
1572
1573 /***********************************************************************
1574 * addNonMetaClass
1575 * Adds metacls => cls to the secondary metaclass map
1576 * Locking: runtimeLock must be held by the caller
1577 **********************************************************************/
1578 static void addNonMetaClass(Class cls)
1579 {
1580 runtimeLock.assertLocked();
1581 void *old;
1582 old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
1583
1584 ASSERT(!cls->isMetaClassMaybeUnrealized());
1585 ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
1586 ASSERT(!old);
1587 }
1588
1589
1590 static void removeNonMetaClass(Class cls)
1591 {
1592 runtimeLock.assertLocked();
1593 NXMapRemove(nonMetaClasses(), cls->ISA());
1594 }
1595
1596
1597 static bool scanMangledField(const char *&string, const char *end,
1598 const char *&field, int& length)
1599 {
1600 // Leading zero not allowed.
1601 if (*string == '0') return false;
1602
1603 length = 0;
1604 field = string;
1605 while (field < end) {
1606 char c = *field;
1607 if (!isdigit(c)) break;
1608 field++;
1609 if (__builtin_smul_overflow(length, 10, &length)) return false;
1610 if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
1611 }
1612
1613 string = field + length;
1614 return length > 0 && string <= end;
1615 }
1616
1617
1618 /***********************************************************************
1619 * copySwiftV1DemangledName
1620 * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
1621 * Returns nil if the string doesn't look like a mangled Swift v1 name.
1622 * The result must be freed with free().
1623 **********************************************************************/
1624 static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
1625 {
1626 if (!string) return nil;
1627
1628 // Swift mangling prefix.
1629 if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
1630 string += 4;
1631
1632 const char *end = string + strlen(string);
1633
1634 // Module name.
1635 const char *prefix;
1636 int prefixLength;
1637 if (string[0] == 's') {
1638 // "s" is the Swift module.
1639 prefix = "Swift";
1640 prefixLength = 5;
1641 string += 1;
1642 } else {
1643 if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
1644 }
1645
1646 // Class or protocol name.
1647 const char *suffix;
1648 int suffixLength;
1649 if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
1650
1651 if (isProtocol) {
1652 // Remainder must be "_".
1653 if (strcmp(string, "_") != 0) return nil;
1654 } else {
1655 // Remainder must be empty.
1656 if (string != end) return nil;
1657 }
1658
1659 char *result;
1660 asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
1661 return result;
1662 }
1663
1664
1665 /***********************************************************************
1666 * copySwiftV1MangledName
1667 * Returns the Swift 1.0 mangled form of the given class or protocol name.
1668 * Returns nil if the string doesn't look like an unmangled Swift name.
1669 * The result must be freed with free().
1670 **********************************************************************/
1671 static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
1672 {
1673 if (!string) return nil;
1674
1675 size_t dotCount = 0;
1676 size_t dotIndex;
1677 const char *s;
1678 for (s = string; *s; s++) {
1679 if (*s == '.') {
1680 dotCount++;
1681 dotIndex = s - string;
1682 }
1683 }
1684 size_t stringLength = s - string;
1685
1686 if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
1687 return nil;
1688 }
1689
1690 const char *prefix = string;
1691 size_t prefixLength = dotIndex;
1692 const char *suffix = string + dotIndex + 1;
1693 size_t suffixLength = stringLength - (dotIndex + 1);
1694
1695 char *name;
1696
1697 if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
1698 asprintf(&name, "_Tt%cs%zu%.*s%s",
1699 isProtocol ? 'P' : 'C',
1700 suffixLength, (int)suffixLength, suffix,
1701 isProtocol ? "_" : "");
1702 } else {
1703 asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
1704 isProtocol ? 'P' : 'C',
1705 prefixLength, (int)prefixLength, prefix,
1706 suffixLength, (int)suffixLength, suffix,
1707 isProtocol ? "_" : "");
1708 }
1709 return name;
1710 }
1711
1712
1713 /***********************************************************************
1714 * getClassExceptSomeSwift
1715 * Looks up a class by name. The class MIGHT NOT be realized.
1716 * Demangled Swift names are recognized.
1717 * Classes known to the Swift runtime but not yet used are NOT recognized.
1718 * (such as subclasses of un-instantiated generics)
1719 * Use look_up_class() to find them as well.
1720 * Locking: runtimeLock must be read- or write-locked by the caller.
1721 **********************************************************************/
1722
1723 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1724 // named classes not in the dyld shared cache, whether realized or not.
1725 // This list excludes lazily named classes, which have to be looked up
1726 // using a getClass hook.
1727 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1728 uintptr_t objc_debug_realized_class_generation_count;
1729
1730 static Class getClass_impl(const char *name)
1731 {
1732 runtimeLock.assertLocked();
1733
1734 // allocated in _read_images
1735 ASSERT(gdb_objc_realized_classes);
1736
1737 // Try runtime-allocated table
1738 Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
1739 if (result) return result;
1740
1741 // Try table from dyld shared cache.
1742 // Note we do this last to handle the case where we dlopen'ed a shared cache
1743 // dylib with duplicates of classes already present in the main executable.
1744 // In that case, we put the class from the main executable in
1745 // gdb_objc_realized_classes and want to check that before considering any
1746 // newly loaded shared cache binaries.
1747 return getPreoptimizedClass(name);
1748 }
1749
1750 static Class getClassExceptSomeSwift(const char *name)
1751 {
1752 runtimeLock.assertLocked();
1753
1754 // Try name as-is
1755 Class result = getClass_impl(name);
1756 if (result) return result;
1757
1758 // Try Swift-mangled equivalent of the given name.
1759 if (char *swName = copySwiftV1MangledName(name)) {
1760 result = getClass_impl(swName);
1761 free(swName);
1762 return result;
1763 }
1764
1765 return nil;
1766 }
1767
1768
1769 /***********************************************************************
1770 * addNamedClass
1771 * Adds name => cls to the named non-meta class map.
1772 * Warns about duplicate class names and keeps the old mapping.
1773 * Locking: runtimeLock must be held by the caller
1774 **********************************************************************/
1775 static void addNamedClass(Class cls, const char *name, Class replacing = nil)
1776 {
1777 runtimeLock.assertLocked();
1778 Class old;
1779 if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
1780 inform_duplicate(name, old, cls);
1781
1782 // getMaybeUnrealizedNonMetaClass uses name lookups.
1783 // Classes not found by name lookup must be in the
1784 // secondary meta->nonmeta table.
1785 addNonMetaClass(cls);
1786 } else {
1787 NXMapInsert(gdb_objc_realized_classes, name, cls);
1788 }
1789 ASSERT(!(cls->data()->flags & RO_META));
1790
1791 // wrong: constructed classes are already realized when they get here
1792 // ASSERT(!cls->isRealized());
1793 }
1794
1795
1796 /***********************************************************************
1797 * removeNamedClass
1798 * Removes cls from the name => cls map.
1799 * Locking: runtimeLock must be held by the caller
1800 **********************************************************************/
1801 static void removeNamedClass(Class cls, const char *name)
1802 {
1803 runtimeLock.assertLocked();
1804 ASSERT(!(cls->data()->flags & RO_META));
1805 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1806 NXMapRemove(gdb_objc_realized_classes, name);
1807 } else {
1808 // cls has a name collision with another class - don't remove the other
1809 // but do remove cls from the secondary metaclass->class map.
1810 removeNonMetaClass(cls);
1811 }
1812 }
1813
1814
1815 /***********************************************************************
1816 * futureNamedClasses
1817 * Returns the classname => future class map for unrealized future classes.
1818 * Locking: runtimeLock must be held by the caller
1819 **********************************************************************/
1820 static NXMapTable *future_named_class_map = nil;
1821 static NXMapTable *futureNamedClasses()
1822 {
1823 runtimeLock.assertLocked();
1824
1825 if (future_named_class_map) return future_named_class_map;
1826
1827 // future_named_class_map is big enough for CF's classes and a few others
1828 future_named_class_map =
1829 NXCreateMapTable(NXStrValueMapPrototype, 32);
1830
1831 return future_named_class_map;
1832 }
1833
1834
1835 static bool haveFutureNamedClasses() {
1836 return future_named_class_map && NXCountMapTable(future_named_class_map);
1837 }
1838
1839
1840 /***********************************************************************
1841 * addFutureNamedClass
1842 * Installs cls as the class structure to use for the named class if it appears.
1843 * Locking: runtimeLock must be held by the caller
1844 **********************************************************************/
1845 static void addFutureNamedClass(const char *name, Class cls)
1846 {
1847 void *old;
1848
1849 runtimeLock.assertLocked();
1850
1851 if (PrintFuture) {
1852 _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
1853 }
1854
1855 class_rw_t *rw = objc::zalloc<class_rw_t>();
1856 class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
1857 ro->name.store(strdupIfMutable(name), std::memory_order_relaxed);
1858 rw->set_ro(ro);
1859 cls->setData(rw);
1860 cls->data()->flags = RO_FUTURE;
1861
1862 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1863 ASSERT(!old);
1864 }
1865
1866
1867 /***********************************************************************
1868 * popFutureNamedClass
1869 * Removes the named class from the unrealized future class list,
1870 * because it has been realized.
1871 * Returns nil if the name is not used by a future class.
1872 * Locking: runtimeLock must be held by the caller
1873 **********************************************************************/
1874 static Class popFutureNamedClass(const char *name)
1875 {
1876 runtimeLock.assertLocked();
1877
1878 Class cls = nil;
1879
1880 if (future_named_class_map) {
1881 cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
1882 if (cls && NXCountMapTable(future_named_class_map) == 0) {
1883 NXFreeMapTable(future_named_class_map);
1884 future_named_class_map = nil;
1885 }
1886 }
1887
1888 return cls;
1889 }
1890
1891
1892 /***********************************************************************
1893 * remappedClasses
1894 * Returns the oldClass => newClass map for realized future classes.
1895 * Returns the oldClass => nil map for ignored weak-linked classes.
1896 * Locking: runtimeLock must be read- or write-locked by the caller
1897 **********************************************************************/
1898 static objc::DenseMap<Class, Class> *remappedClasses(bool create)
1899 {
1900 static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
1901
1902 runtimeLock.assertLocked();
1903
1904 // start big enough to hold CF's classes and a few others
1905 return remapped_class_map.get(create, 32);
1906 }
1907
1908
1909 /***********************************************************************
1910 * noClassesRemapped
1911 * Returns YES if no classes have been remapped
1912 * Locking: runtimeLock must be read- or write-locked by the caller
1913 **********************************************************************/
1914 static bool noClassesRemapped(void)
1915 {
1916 runtimeLock.assertLocked();
1917
1918 bool result = (remappedClasses(NO) == nil);
1919 #if DEBUG
1920 // Catch construction of an empty table, which defeats optimization.
1921 auto *map = remappedClasses(NO);
1922 if (map) ASSERT(map->size() > 0);
1923 #endif
1924 return result;
1925 }
1926
1927
1928 /***********************************************************************
1929 * addRemappedClass
1930 * newcls is a realized future class, replacing oldcls.
1931 * OR newcls is nil, replacing ignored weak-linked class oldcls.
1932 * Locking: runtimeLock must be write-locked by the caller
1933 **********************************************************************/
1934 static void addRemappedClass(Class oldcls, Class newcls)
1935 {
1936 runtimeLock.assertLocked();
1937
1938 if (PrintFuture) {
1939 _objc_inform("FUTURE: using %p instead of %p for %s",
1940 (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
1941 }
1942
1943 auto result = remappedClasses(YES)->insert({ oldcls, newcls });
1944 #if DEBUG
1945 if (!std::get<1>(result)) {
1946 // An existing mapping was overwritten. This is not allowed
1947 // unless it was to nil.
1948 auto iterator = std::get<0>(result);
1949 auto value = std::get<1>(*iterator);
1950 ASSERT(value == nil);
1951 }
1952 #else
1953 (void)result;
1954 #endif
1955 }
1956
1957
1958 /***********************************************************************
1959 * remapClass
1960 * Returns the live class pointer for cls, which may be pointing to
1961 * a class struct that has been reallocated.
1962 * Returns nil if cls is ignored because of weak linking.
1963 * Locking: runtimeLock must be read- or write-locked by the caller
1964 **********************************************************************/
1965 static Class remapClass(Class cls)
1966 {
1967 runtimeLock.assertLocked();
1968
1969 if (!cls) return nil;
1970
1971 auto *map = remappedClasses(NO);
1972 if (!map)
1973 return cls;
1974
1975 auto iterator = map->find(cls);
1976 if (iterator == map->end())
1977 return cls;
1978 return std::get<1>(*iterator);
1979 }
1980
1981 static Class remapClass(classref_t cls)
1982 {
1983 return remapClass((Class)cls);
1984 }
1985
1986 Class _class_remap(Class cls)
1987 {
1988 mutex_locker_t lock(runtimeLock);
1989 return remapClass(cls);
1990 }
1991
1992 /***********************************************************************
1993 * remapClassRef
1994 * Fix up a class ref, in case the class referenced has been reallocated
1995 * or is an ignored weak-linked class.
1996 * Locking: runtimeLock must be read- or write-locked by the caller
1997 **********************************************************************/
1998 static void remapClassRef(Class *clsref)
1999 {
2000 runtimeLock.assertLocked();
2001
2002 Class newcls = remapClass(*clsref);
2003 if (*clsref != newcls) *clsref = newcls;
2004 }
2005
2006
2007 _Nullable Class
2008 objc_loadClassref(_Nullable Class * _Nonnull clsref)
2009 {
2010 auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
2011
2012 uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
2013 if (fastpath((cls & 1) == 0))
2014 return (Class)cls;
2015
2016 auto stub = (stub_class_t *)(cls & ~1ULL);
2017 Class initialized = stub->initializer((Class)stub, nil);
2018 atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
2019 return initialized;
2020 }
2021
2022
2023 /***********************************************************************
2024 * getMaybeUnrealizedNonMetaClass
2025 * Return the ordinary class for this class or metaclass.
2026 * `inst` is an instance of `cls` or a subclass thereof, or nil.
2027 * Non-nil inst is faster.
2028 * The result may be unrealized.
2029 * Used by +initialize.
2030 * Locking: runtimeLock must be read- or write-locked by the caller
2031 **********************************************************************/
2032 static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
2033 {
2034 static int total, named, secondary, sharedcache, dyld3;
2035 runtimeLock.assertLocked();
2036 ASSERT(metacls->isRealized());
2037
2038 total++;
2039
2040 // return cls itself if it's already a non-meta class
2041 if (!metacls->isMetaClass()) return metacls;
2042
2043 // metacls really is a metaclass
2044 // which means inst (if any) is a class
2045
2046 // special case for root metaclass
2047 // where inst == inst->ISA() == metacls is possible
2048 if (metacls->ISA() == metacls) {
2049 Class cls = metacls->getSuperclass();
2050 ASSERT(cls->isRealized());
2051 ASSERT(!cls->isMetaClass());
2052 ASSERT(cls->ISA() == metacls);
2053 if (cls->ISA() == metacls) return cls;
2054 }
2055
2056 // use inst if available
2057 if (inst) {
2058 Class cls = remapClass((Class)inst);
2059 // cls may be a subclass - find the real class for metacls
2060 // fixme this probably stops working once Swift starts
2061 // reallocating classes if cls is unrealized.
2062 while (cls) {
2063 if (cls->ISA() == metacls) {
2064 ASSERT(!cls->isMetaClassMaybeUnrealized());
2065 return cls;
2066 }
2067 cls = cls->getSuperclass();
2068 }
2069 #if DEBUG
2070 _objc_fatal("cls is not an instance of metacls");
2071 #else
2072 // release build: be forgiving and fall through to slow lookups
2073 #endif
2074 }
2075
2076 // See if the metaclass has a pointer to its nonmetaclass.
2077 if (Class cls = metacls->bits.safe_ro()->getNonMetaclass())
2078 return cls;
2079
2080 // try name lookup
2081 {
2082 Class cls = getClassExceptSomeSwift(metacls->mangledName());
2083 if (cls && cls->ISA() == metacls) {
2084 named++;
2085 if (PrintInitializing) {
2086 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2087 "successful by-name metaclass lookups",
2088 named, total, named*100.0/total);
2089 }
2090 return cls;
2091 }
2092 }
2093
2094 // try secondary table
2095 {
2096 Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
2097 if (cls) {
2098 secondary++;
2099 if (PrintInitializing) {
2100 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2101 "successful secondary metaclass lookups",
2102 secondary, total, secondary*100.0/total);
2103 }
2104
2105 ASSERT(cls->ISA() == metacls);
2106 return cls;
2107 }
2108 }
2109
2110 // try the dyld closure table
2111 if (isPreoptimized())
2112 {
2113 // Try table from dyld closure first. It was built to ignore the dupes it
2114 // knows will come from the cache, so anything left in here was there when
2115 // we launched
2116 Class cls = nil;
2117 // Note, we have to pass the lambda directly here as otherwise we would try
2118 // message copy and autorelease.
2119 _dyld_for_each_objc_class(metacls->mangledName(),
2120 [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
2121 // Skip images which aren't loaded. This supports the case where dyld
2122 // might soft link an image from the main binary so its possibly not
2123 // loaded yet.
2124 if (!isLoaded)
2125 return;
2126
2127 // Found a loaded image with this class name, so check if its the right one
2128 Class result = (Class)classPtr;
2129 if (result->ISA() == metacls) {
2130 cls = result;
2131 *stop = true;
2132 }
2133 });
2134
2135 if (cls) {
2136 dyld3++;
2137 if (PrintInitializing) {
2138 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2139 "successful dyld closure metaclass lookups",
2140 dyld3, total, dyld3*100.0/total);
2141 }
2142
2143 return cls;
2144 }
2145 }
2146
2147 // try any duplicates in the dyld shared cache
2148 {
2149 Class cls = nil;
2150
2151 int count;
2152 Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
2153 if (classes) {
2154 for (int i = 0; i < count; i++) {
2155 if (classes[i]->ISA() == metacls) {
2156 cls = classes[i];
2157 break;
2158 }
2159 }
2160 free(classes);
2161 }
2162
2163 if (cls) {
2164 sharedcache++;
2165 if (PrintInitializing) {
2166 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2167 "successful shared cache metaclass lookups",
2168 sharedcache, total, sharedcache*100.0/total);
2169 }
2170
2171 return cls;
2172 }
2173 }
2174
2175 _objc_fatal("no class for metaclass %p", (void*)metacls);
2176 }
2177
2178
2179 /***********************************************************************
2180 * class_initialize. Send the '+initialize' message on demand to any
2181 * uninitialized class. Force initialization of superclasses first.
2182 * inst is an instance of cls, or nil. Non-nil is better for performance.
2183 * Returns the class pointer. If the class was unrealized then
2184 * it may be reallocated.
2185 * Locking:
2186 * runtimeLock must be held by the caller
2187 * This function may drop the lock.
2188 * On exit the lock is re-acquired or dropped as requested by leaveLocked.
2189 **********************************************************************/
2190 static Class initializeAndMaybeRelock(Class cls, id inst,
2191 mutex_t& lock, bool leaveLocked)
2192 {
2193 lock.assertLocked();
2194 ASSERT(cls->isRealized());
2195
2196 if (cls->isInitialized()) {
2197 if (!leaveLocked) lock.unlock();
2198 return cls;
2199 }
2200
2201 // Find the non-meta class for cls, if it is not already one.
2202 // The +initialize message is sent to the non-meta class object.
2203 Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
2204
2205 // Realize the non-meta class if necessary.
2206 if (nonmeta->isRealized()) {
2207 // nonmeta is cls, which was already realized
2208 // OR nonmeta is distinct, but is already realized
2209 // - nothing else to do
2210 lock.unlock();
2211 } else {
2212 nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
2213 // runtimeLock is now unlocked
2214 // fixme Swift can't relocate the class today,
2215 // but someday it will:
2216 cls = object_getClass(nonmeta);
2217 }
2218
2219 // runtimeLock is now unlocked, for +initialize dispatch
2220 ASSERT(nonmeta->isRealized());
2221 initializeNonMetaClass(nonmeta);
2222
2223 if (leaveLocked) runtimeLock.lock();
2224 return cls;
2225 }
2226
2227 // Locking: acquires runtimeLock
2228 Class class_initialize(Class cls, id obj)
2229 {
2230 runtimeLock.lock();
2231 return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
2232 }
2233
2234 // Locking: caller must hold runtimeLock; this may drop and re-acquire it
2235 static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
2236 {
2237 return initializeAndMaybeRelock(cls, obj, lock, true);
2238 }
2239
2240
2241 /***********************************************************************
2242 * addRootClass
2243 * Adds cls as a new realized root class.
2244 * Locking: runtimeLock must be held by the caller.
2245 **********************************************************************/
2246 static void addRootClass(Class cls)
2247 {
2248 runtimeLock.assertLocked();
2249
2250 ASSERT(cls->isRealized());
2251
2252 objc_debug_realized_class_generation_count++;
2253
2254 cls->data()->nextSiblingClass = _firstRealizedClass;
2255 _firstRealizedClass = cls;
2256 }
2257
2258 static void removeRootClass(Class cls)
2259 {
2260 runtimeLock.assertLocked();
2261
2262 objc_debug_realized_class_generation_count++;
2263
2264 Class *classp;
2265 for (classp = &_firstRealizedClass;
2266 *classp != cls;
2267 classp = &(*classp)->data()->nextSiblingClass)
2268 { }
2269
2270 *classp = (*classp)->data()->nextSiblingClass;
2271 }
2272
2273
2274 /***********************************************************************
2275 * addSubclass
2276 * Adds subcls as a subclass of supercls.
2277 * Locking: runtimeLock must be held by the caller.
2278 **********************************************************************/
2279 static void addSubclass(Class supercls, Class subcls)
2280 {
2281 runtimeLock.assertLocked();
2282
2283 if (supercls && subcls) {
2284 ASSERT(supercls->isRealized());
2285 ASSERT(subcls->isRealized());
2286
2287 objc_debug_realized_class_generation_count++;
2288
2289 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2290 supercls->data()->firstSubclass = subcls;
2291
2292 if (supercls->hasCxxCtor()) {
2293 subcls->setHasCxxCtor();
2294 }
2295
2296 if (supercls->hasCxxDtor()) {
2297 subcls->setHasCxxDtor();
2298 }
2299
2300 objc::AWZScanner::scanAddedSubClass(subcls, supercls);
2301 objc::RRScanner::scanAddedSubClass(subcls, supercls);
2302 objc::CoreScanner::scanAddedSubClass(subcls, supercls);
2303
2304 if (!supercls->allowsPreoptCaches()) {
2305 subcls->setDisallowPreoptCachesRecursively(__func__);
2306 } else if (!supercls->allowsPreoptInlinedSels()) {
2307 subcls->setDisallowPreoptInlinedSelsRecursively(__func__);
2308 }
2309
2310 // Special case: instancesRequireRawIsa does not propagate
2311 // from root class to root metaclass
2312 if (supercls->instancesRequireRawIsa() && supercls->getSuperclass()) {
2313 subcls->setInstancesRequireRawIsaRecursively(true);
2314 }
2315 }
2316 }
2317
2318
2319 /***********************************************************************
2320 * removeSubclass
2321 * Removes subcls as a subclass of supercls.
2322 * Locking: runtimeLock must be held by the caller.
2323 **********************************************************************/
2324 static void removeSubclass(Class supercls, Class subcls)
2325 {
2326 runtimeLock.assertLocked();
2327 ASSERT(supercls->isRealized());
2328 ASSERT(subcls->isRealized());
2329 ASSERT(subcls->getSuperclass() == supercls);
2330
2331 objc_debug_realized_class_generation_count++;
2332
2333 Class *cp;
2334 for (cp = &supercls->data()->firstSubclass;
2335 *cp && *cp != subcls;
2336 cp = &(*cp)->data()->nextSiblingClass)
2337 ;
2338 ASSERT(*cp == subcls);
2339 *cp = subcls->data()->nextSiblingClass;
2340 }
2341
2342
2343
2344 /***********************************************************************
2345 * protocols
2346 * Returns the protocol name => protocol map for protocols.
2347 * Locking: runtimeLock must read- or write-locked by the caller
2348 **********************************************************************/
2349 static NXMapTable *protocols(void)
2350 {
2351 static NXMapTable *protocol_map = nil;
2352
2353 runtimeLock.assertLocked();
2354
2355 INIT_ONCE_PTR(protocol_map,
2356 NXCreateMapTable(NXStrValueMapPrototype, 16),
2357 NXFreeMapTable(v) );
2358
2359 return protocol_map;
2360 }
2361
2362
2363 /***********************************************************************
2364 * getProtocol
2365 * Looks up a protocol by name. Demangled Swift names are recognized.
2366 * Locking: runtimeLock must be read- or write-locked by the caller.
2367 **********************************************************************/
2368 static NEVER_INLINE Protocol *getProtocol(const char *name)
2369 {
2370 runtimeLock.assertLocked();
2371
2372 // Try name as-is.
2373 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
2374 if (result) return result;
2375
2376 // Try table from dyld3 closure and dyld shared cache
2377 result = getPreoptimizedProtocol(name);
2378 if (result) return result;
2379
2380 // Try Swift-mangled equivalent of the given name.
2381 if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
2382 result = (Protocol *)NXMapGet(protocols(), swName);
2383
2384 // Try table from dyld3 closure and dyld shared cache
2385 if (!result)
2386 result = getPreoptimizedProtocol(swName);
2387
2388 free(swName);
2389 return result;
2390 }
2391
2392 return nullptr;
2393 }
2394
2395
2396 /***********************************************************************
2397 * remapProtocol
2398 * Returns the live protocol pointer for proto, which may be pointing to
2399 * a protocol struct that has been reallocated.
2400 * Locking: runtimeLock must be read- or write-locked by the caller
2401 **********************************************************************/
2402 static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
2403 {
2404 runtimeLock.assertLocked();
2405
2406 // Protocols in shared cache images have a canonical bit to mark that they
2407 // are the definition we should use
2408 if (((protocol_t *)proto)->isCanonical())
2409 return (protocol_t *)proto;
2410
2411 protocol_t *newproto = (protocol_t *)
2412 getProtocol(((protocol_t *)proto)->mangledName);
2413 return newproto ? newproto : (protocol_t *)proto;
2414 }
2415
2416
2417 /***********************************************************************
2418 * remapProtocolRef
2419 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2420 * Locking: runtimeLock must be read- or write-locked by the caller
2421 **********************************************************************/
2422 static size_t UnfixedProtocolReferences;
2423 static void remapProtocolRef(protocol_t **protoref)
2424 {
2425 runtimeLock.assertLocked();
2426
2427 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2428 if (*protoref != newproto) {
2429 *protoref = newproto;
2430 UnfixedProtocolReferences++;
2431 }
2432 }
2433
2434
2435 /***********************************************************************
2436 * moveIvars
2437 * Slides a class's ivars to accommodate the given superclass size.
2438 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2439 * Locking: runtimeLock must be held by the caller.
2440 **********************************************************************/
2441 static void moveIvars(class_ro_t *ro, uint32_t superSize)
2442 {
2443 runtimeLock.assertLocked();
2444
2445 uint32_t diff;
2446
2447 ASSERT(superSize > ro->instanceStart);
2448 diff = superSize - ro->instanceStart;
2449
2450 if (ro->ivars) {
2451 // Find maximum alignment in this class's ivars
2452 uint32_t maxAlignment = 1;
2453 for (const auto& ivar : *ro->ivars) {
2454 if (!ivar.offset) continue; // anonymous bitfield
2455
2456 uint32_t alignment = ivar.alignment();
2457 if (alignment > maxAlignment) maxAlignment = alignment;
2458 }
2459
2460 // Compute a slide value that preserves that alignment
2461 uint32_t alignMask = maxAlignment - 1;
2462 diff = (diff + alignMask) & ~alignMask;
2463
2464 // Slide all of this class's ivars en masse
2465 for (const auto& ivar : *ro->ivars) {
2466 if (!ivar.offset) continue; // anonymous bitfield
2467
2468 uint32_t oldOffset = (uint32_t)*ivar.offset;
2469 uint32_t newOffset = oldOffset + diff;
2470 *ivar.offset = newOffset;
2471
2472 if (PrintIvars) {
2473 _objc_inform("IVARS: offset %u -> %u for %s "
2474 "(size %u, align %u)",
2475 oldOffset, newOffset, ivar.name,
2476 ivar.size, ivar.alignment());
2477 }
2478 }
2479 }
2480
2481 *(uint32_t *)&ro->instanceStart += diff;
2482 *(uint32_t *)&ro->instanceSize += diff;
2483 }
2484
2485
2486 static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
2487 {
2488 class_rw_t *rw = cls->data();
2489
2490 ASSERT(supercls);
2491 ASSERT(!cls->isMetaClass());
2492
2493 /* debug: print them all before sliding
2494 if (ro->ivars) {
2495 for (const auto& ivar : *ro->ivars) {
2496 if (!ivar.offset) continue; // anonymous bitfield
2497
2498 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2499 ro->name, ivar.name,
2500 *ivar.offset, ivar.size, ivar.alignment());
2501 }
2502 }
2503 */
2504
2505 // Non-fragile ivars - reconcile this class with its superclass
2506 const class_ro_t *super_ro = supercls->data()->ro();
2507
2508 if (DebugNonFragileIvars) {
2509 // Debugging: Force non-fragile ivars to slide.
2510 // Intended to find compiler, runtime, and program bugs.
2511 // If it fails with this and works without, you have a problem.
2512
2513 // Operation: Reset everything to 0 + misalignment.
2514 // Then force the normal sliding logic to push everything back.
2515
2516 // Exceptions: root classes, metaclasses, *NSCF* classes,
2517 // __CF* classes, NSConstantString, NSSimpleCString
2518
2519 // (already know it's not root because supercls != nil)
2520 const char *clsname = cls->mangledName();
2521 if (!strstr(clsname, "NSCF") &&
2522 0 != strncmp(clsname, "__CF", 4) &&
2523 0 != strcmp(clsname, "NSConstantString") &&
2524 0 != strcmp(clsname, "NSSimpleCString"))
2525 {
2526 uint32_t oldStart = ro->instanceStart;
2527 class_ro_t *ro_w = make_ro_writeable(rw);
2528 ro = rw->ro();
2529
2530 // Find max ivar alignment in class.
2531 // default to word size to simplify ivar update
2532 uint32_t alignment = 1<<WORD_SHIFT;
2533 if (ro->ivars) {
2534 for (const auto& ivar : *ro->ivars) {
2535 if (ivar.alignment() > alignment) {
2536 alignment = ivar.alignment();
2537 }
2538 }
2539 }
2540 uint32_t misalignment = ro->instanceStart % alignment;
2541 uint32_t delta = ro->instanceStart - misalignment;
2542 ro_w->instanceStart = misalignment;
2543 ro_w->instanceSize -= delta;
2544
2545 if (PrintIvars) {
2546 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2547 "to slide (instanceStart %zu -> %zu)",
2548 cls->nameForLogging(), (size_t)oldStart,
2549 (size_t)ro->instanceStart);
2550 }
2551
2552 if (ro->ivars) {
2553 for (const auto& ivar : *ro->ivars) {
2554 if (!ivar.offset) continue; // anonymous bitfield
2555 *ivar.offset -= delta;
2556 }
2557 }
2558 }
2559 }
2560
2561 if (ro->instanceStart >= super_ro->instanceSize) {
2562 // Superclass has not overgrown its space. We're done here.
2563 return;
2564 }
2565 // fixme can optimize for "class has no new ivars", etc
2566
2567 if (ro->instanceStart < super_ro->instanceSize) {
2568 // Superclass has changed size. This class's ivars must move.
2569 // Also slide layout bits in parallel.
2570 // This code is incapable of compacting the subclass to
2571 // compensate for a superclass that shrunk, so don't do that.
2572 if (PrintIvars) {
2573 _objc_inform("IVARS: sliding ivars for class %s "
2574 "(superclass was %u bytes, now %u)",
2575 cls->nameForLogging(), ro->instanceStart,
2576 super_ro->instanceSize);
2577 }
2578 class_ro_t *ro_w = make_ro_writeable(rw);
2579 ro = rw->ro();
2580 moveIvars(ro_w, super_ro->instanceSize);
2581 gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->getName());
2582 }
2583 }
2584
2585 static void validateAlreadyRealizedClass(Class cls) {
2586 ASSERT(cls->isRealized());
2587 #if TARGET_OS_OSX
2588 class_rw_t *rw = cls->data();
2589 size_t rwSize = malloc_size(rw);
2590
2591 // Note: this check will need some adjustment if class_rw_t's
2592 // size changes to not match the malloc bucket.
2593 if (rwSize != sizeof(class_rw_t))
2594 _objc_fatal("realized class %p has corrupt data pointer %p", cls, rw);
2595 #endif
2596 }
2597
2598 /***********************************************************************
2599 * realizeClassWithoutSwift
2600 * Performs first-time initialization on class cls,
2601 * including allocating its read-write data.
2602 * Does not perform any Swift-side initialization.
2603 * Returns the real class structure for the class.
2604 * Locking: runtimeLock must be write-locked by the caller
2605 **********************************************************************/
2606 static Class realizeClassWithoutSwift(Class cls, Class previously)
2607 {
2608 runtimeLock.assertLocked();
2609
2610 class_rw_t *rw;
2611 Class supercls;
2612 Class metacls;
2613
2614 if (!cls) return nil;
2615 if (cls->isRealized()) {
2616 validateAlreadyRealizedClass(cls);
2617 return cls;
2618 }
2619 ASSERT(cls == remapClass(cls));
2620
2621 // fixme verify class is not in an un-dlopened part of the shared cache?
2622
2623 auto ro = (const class_ro_t *)cls->data();
2624 auto isMeta = ro->flags & RO_META;
2625 if (ro->flags & RO_FUTURE) {
2626 // This was a future class. rw data is already allocated.
2627 rw = cls->data();
2628 ro = cls->data()->ro();
2629 ASSERT(!isMeta);
2630 cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
2631 } else {
2632 // Normal class. Allocate writeable class data.
2633 rw = objc::zalloc<class_rw_t>();
2634 rw->set_ro(ro);
2635 rw->flags = RW_REALIZED|RW_REALIZING|isMeta;
2636 cls->setData(rw);
2637 }
2638
2639 cls->cache.initializeToEmptyOrPreoptimizedInDisguise();
2640
2641 #if FAST_CACHE_META
2642 if (isMeta) cls->cache.setBit(FAST_CACHE_META);
2643 #endif
2644
2645 // Choose an index for this class.
2646 // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
2647 cls->chooseClassArrayIndex();
2648
2649 if (PrintConnecting) {
2650 _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
2651 cls->nameForLogging(), isMeta ? " (meta)" : "",
2652 (void*)cls, ro, cls->classArrayIndex(),
2653 cls->isSwiftStable() ? "(swift)" : "",
2654 cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
2655 }
2656
2657 // Realize superclass and metaclass, if they aren't already.
2658 // This needs to be done after RW_REALIZED is set above, for root classes.
2659 // This needs to be done after class index is chosen, for root metaclasses.
2660 // This assumes that none of those classes have Swift contents,
2661 // or that Swift's initializers have already been called.
2662 // fixme that assumption will be wrong if we add support
2663 // for ObjC subclasses of Swift classes.
2664 supercls = realizeClassWithoutSwift(remapClass(cls->getSuperclass()), nil);
2665 metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
2666
2667 #if SUPPORT_NONPOINTER_ISA
2668 if (isMeta) {
2669 // Metaclasses do not need any features from non pointer ISA
2670 // This allows for a faspath for classes in objc_retain/objc_release.
2671 cls->setInstancesRequireRawIsa();
2672 } else {
2673 // Disable non-pointer isa for some classes and/or platforms.
2674 // Set instancesRequireRawIsa.
2675 bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
2676 bool rawIsaIsInherited = false;
2677 static bool hackedDispatch = false;
2678
2679 if (DisableNonpointerIsa) {
2680 // Non-pointer isa disabled by environment or app SDK version
2681 instancesRequireRawIsa = true;
2682 }
2683 else if (!hackedDispatch && 0 == strcmp(ro->getName(), "OS_object"))
2684 {
2685 // hack for libdispatch et al - isa also acts as vtable pointer
2686 hackedDispatch = true;
2687 instancesRequireRawIsa = true;
2688 }
2689 else if (supercls && supercls->getSuperclass() &&
2690 supercls->instancesRequireRawIsa())
2691 {
2692 // This is also propagated by addSubclass()
2693 // but nonpointer isa setup needs it earlier.
2694 // Special case: instancesRequireRawIsa does not propagate
2695 // from root class to root metaclass
2696 instancesRequireRawIsa = true;
2697 rawIsaIsInherited = true;
2698 }
2699
2700 if (instancesRequireRawIsa) {
2701 cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
2702 }
2703 }
2704 // SUPPORT_NONPOINTER_ISA
2705 #endif
2706
2707 // Update superclass and metaclass in case of remapping
2708 cls->setSuperclass(supercls);
2709 cls->initClassIsa(metacls);
2710
2711 // Reconcile instance variable offsets / layout.
2712 // This may reallocate class_ro_t, updating our ro variable.
2713 if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
2714
2715 // Set fastInstanceSize if it wasn't set already.
2716 cls->setInstanceSize(ro->instanceSize);
2717
2718 // Copy some flags from ro to rw
2719 if (ro->flags & RO_HAS_CXX_STRUCTORS) {
2720 cls->setHasCxxDtor();
2721 if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
2722 cls->setHasCxxCtor();
2723 }
2724 }
2725
2726 // Propagate the associated objects forbidden flag from ro or from
2727 // the superclass.
2728 if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
2729 (supercls && supercls->forbidsAssociatedObjects()))
2730 {
2731 rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
2732 }
2733
2734 // Connect this class to its superclass's subclass lists
2735 if (supercls) {
2736 addSubclass(supercls, cls);
2737 } else {
2738 addRootClass(cls);
2739 }
2740
2741 // Attach categories
2742 methodizeClass(cls, previously);
2743
2744 return cls;
2745 }
2746
2747
2748 /***********************************************************************
2749 * _objc_realizeClassFromSwift
2750 * Called by Swift when it needs the ObjC part of a class to be realized.
2751 * There are four cases:
2752 * 1. cls != nil; previously == cls
2753 * Class cls is being realized in place
2754 * 2. cls != nil; previously == nil
2755 * Class cls is being constructed at runtime
2756 * 3. cls != nil; previously != cls
2757 * The class that was at previously has been reallocated to cls
2758 * 4. cls == nil, previously != nil
2759 * The class at previously is hereby disavowed
2760 *
2761 * Only variants #1 and #2 are supported today.
2762 *
2763 * Locking: acquires runtimeLock
2764 **********************************************************************/
2765 Class _objc_realizeClassFromSwift(Class cls, void *previously)
2766 {
2767 if (cls) {
2768 if (previously && previously != (void*)cls) {
2769 // #3: relocation
2770 mutex_locker_t lock(runtimeLock);
2771 addRemappedClass((Class)previously, cls);
2772 addClassTableEntry(cls);
2773 addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
2774 return realizeClassWithoutSwift(cls, (Class)previously);
2775 } else {
2776 // #1 and #2: realization in place, or new class
2777 mutex_locker_t lock(runtimeLock);
2778
2779 if (!previously) {
2780 // #2: new class
2781 cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
2782 }
2783
2784 // #1 and #2: realization in place, or new class
2785 // We ignore the Swift metadata initializer callback.
2786 // We assume that's all handled since we're being called from Swift.
2787 return realizeClassWithoutSwift(cls, nil);
2788 }
2789 }
2790 else {
2791 // #4: disavowal
2792 // In the future this will mean remapping the old address to nil
2793 // and if necessary removing the old address from any other tables.
2794 _objc_fatal("Swift requested that class %p be ignored, "
2795 "but libobjc does not support that.", previously);
2796 }
2797 }
2798
2799 /***********************************************************************
2800 * realizeSwiftClass
2801 * Performs first-time initialization on class cls,
2802 * including allocating its read-write data,
2803 * and any Swift-side initialization.
2804 * Returns the real class structure for the class.
2805 * Locking: acquires runtimeLock indirectly
2806 **********************************************************************/
2807 static Class realizeSwiftClass(Class cls)
2808 {
2809 runtimeLock.assertUnlocked();
2810
2811 // Some assumptions:
2812 // * Metaclasses never have a Swift initializer.
2813 // * Root classes never have a Swift initializer.
2814 // (These two together avoid initialization order problems at the root.)
2815 // * Unrealized non-Swift classes have no Swift ancestry.
2816 // * Unrealized Swift classes with no initializer have no ancestry that
2817 // does have the initializer.
2818 // (These two together mean we don't need to scan superclasses here
2819 // and we don't need to worry about Swift superclasses inside
2820 // realizeClassWithoutSwift()).
2821
2822 // fixme some of these assumptions will be wrong
2823 // if we add support for ObjC sublasses of Swift classes.
2824
2825 #if DEBUG
2826 runtimeLock.lock();
2827 ASSERT(remapClass(cls) == cls);
2828 ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
2829 ASSERT(!cls->isMetaClassMaybeUnrealized());
2830 ASSERT(cls->getSuperclass());
2831 runtimeLock.unlock();
2832 #endif
2833
2834 // Look for a Swift metadata initialization function
2835 // installed on the class. If it is present we call it.
2836 // That function in turn initializes the Swift metadata,
2837 // prepares the "compiler-generated" ObjC metadata if not
2838 // already present, and calls _objc_realizeSwiftClass() to finish
2839 // our own initialization.
2840
2841 if (auto init = cls->swiftMetadataInitializer()) {
2842 if (PrintConnecting) {
2843 _objc_inform("CLASS: calling Swift metadata initializer "
2844 "for class '%s' (%p)", cls->nameForLogging(), cls);
2845 }
2846
2847 Class newcls = init(cls, nil);
2848
2849 // fixme someday Swift will need to relocate classes at this point,
2850 // but we don't accept that yet.
2851 if (cls != newcls) {
2852 mutex_locker_t lock(runtimeLock);
2853 addRemappedClass(cls, newcls);
2854 }
2855
2856 return newcls;
2857 }
2858 else {
2859 // No Swift-side initialization callback.
2860 // Perform our own realization directly.
2861 mutex_locker_t lock(runtimeLock);
2862 return realizeClassWithoutSwift(cls, nil);
2863 }
2864 }
2865
2866
2867 /***********************************************************************
2868 * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
2869 * Realize a class that might be a Swift class.
2870 * Returns the real class structure for the class.
2871 * Locking:
2872 * runtimeLock must be held on entry
2873 * runtimeLock may be dropped during execution
2874 * ...AndUnlock function leaves runtimeLock unlocked on exit
2875 * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
2876 * This complication avoids repeated lock transitions in some cases.
2877 **********************************************************************/
2878 static Class
2879 realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
2880 {
2881 lock.assertLocked();
2882
2883 if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
2884 // Non-Swift class. Realize it now with the lock still held.
2885 // fixme wrong in the future for objc subclasses of swift classes
2886 realizeClassWithoutSwift(cls, nil);
2887 if (!leaveLocked) lock.unlock();
2888 } else {
2889 // Swift class. We need to drop locks and call the Swift
2890 // runtime to initialize it.
2891 lock.unlock();
2892 cls = realizeSwiftClass(cls);
2893 ASSERT(cls->isRealized()); // callback must have provoked realization
2894 if (leaveLocked) lock.lock();
2895 }
2896
2897 return cls;
2898 }
2899
2900 static Class
2901 realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
2902 {
2903 return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
2904 }
2905
2906 static Class
2907 realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
2908 {
2909 return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
2910 }
2911
2912
2913 /***********************************************************************
2914 * missingWeakSuperclass
2915 * Return YES if some superclass of cls was weak-linked and is missing.
2916 **********************************************************************/
2917 static bool
2918 missingWeakSuperclass(Class cls)
2919 {
2920 ASSERT(!cls->isRealized());
2921
2922 if (!cls->getSuperclass()) {
2923 // superclass nil. This is normal for root classes only.
2924 return (!(cls->data()->flags & RO_ROOT));
2925 } else {
2926 // superclass not nil. Check if a higher superclass is missing.
2927 Class supercls = remapClass(cls->getSuperclass());
2928 ASSERT(cls != cls->getSuperclass());
2929 ASSERT(cls != supercls);
2930 if (!supercls) return YES;
2931 if (supercls->isRealized()) return NO;
2932 return missingWeakSuperclass(supercls);
2933 }
2934 }
2935
2936
2937 /***********************************************************************
2938 * realizeAllClassesInImage
2939 * Non-lazily realizes all unrealized classes in the given image.
2940 * Locking: runtimeLock must be held by the caller.
2941 * Locking: this function may drop and re-acquire the lock.
2942 **********************************************************************/
2943 static void realizeAllClassesInImage(header_info *hi)
2944 {
2945 runtimeLock.assertLocked();
2946
2947 size_t count, i;
2948 classref_t const *classlist;
2949
2950 if (hi->areAllClassesRealized()) return;
2951
2952 classlist = _getObjc2ClassList(hi, &count);
2953
2954 for (i = 0; i < count; i++) {
2955 Class cls = remapClass(classlist[i]);
2956 if (cls) {
2957 realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
2958 }
2959 }
2960
2961 hi->setAllClassesRealized(YES);
2962 }
2963
2964
2965 /***********************************************************************
2966 * realizeAllClasses
2967 * Non-lazily realizes all unrealized classes in all known images.
2968 * Locking: runtimeLock must be held by the caller.
2969 * Locking: this function may drop and re-acquire the lock.
2970 * Dropping the lock makes this function thread-unsafe with respect
2971 * to concurrent image unload, but the callers of this function
2972 * already ultimately do something that is also thread-unsafe with
2973 * respect to image unload (such as using the list of all classes).
2974 **********************************************************************/
2975 static void realizeAllClasses(void)
2976 {
2977 runtimeLock.assertLocked();
2978
2979 header_info *hi;
2980 for (hi = FirstHeader; hi; hi = hi->getNext()) {
2981 realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
2982 }
2983 }
2984
2985
2986 /***********************************************************************
2987 * _objc_allocateFutureClass
2988 * Allocate an unresolved future class for the given class name.
2989 * Returns any existing allocation if one was already made.
2990 * Assumes the named class doesn't exist yet.
2991 * Locking: acquires runtimeLock
2992 **********************************************************************/
2993 Class _objc_allocateFutureClass(const char *name)
2994 {
2995 mutex_locker_t lock(runtimeLock);
2996
2997 Class cls;
2998 NXMapTable *map = futureNamedClasses();
2999
3000 if ((cls = (Class)NXMapGet(map, name))) {
3001 // Already have a future class for this name.
3002 return cls;
3003 }
3004
3005 cls = _calloc_class(sizeof(objc_class));
3006 addFutureNamedClass(name, cls);
3007
3008 return cls;
3009 }
3010
3011
3012 /***********************************************************************
3013 * objc_getFutureClass. Return the id of the named class.
3014 * If the class does not exist, return an uninitialized class
3015 * structure that will be used for the class when and if it
3016 * does get loaded.
3017 * Not thread safe.
3018 **********************************************************************/
3019 Class objc_getFutureClass(const char *name)
3020 {
3021 Class cls;
3022
3023 // YES unconnected, NO class handler
3024 // (unconnected is OK because it will someday be the real class)
3025 cls = look_up_class(name, YES, NO);
3026 if (cls) {
3027 if (PrintFuture) {
3028 _objc_inform("FUTURE: found %p already in use for %s",
3029 (void*)cls, name);
3030 }
3031
3032 return cls;
3033 }
3034
3035 // No class or future class with that name yet. Make one.
3036 // fixme not thread-safe with respect to
3037 // simultaneous library load or getFutureClass.
3038 return _objc_allocateFutureClass(name);
3039 }
3040
3041
3042 BOOL _class_isFutureClass(Class cls)
3043 {
3044 return cls && cls->isFuture();
3045 }
3046
3047 BOOL _class_isSwift(Class _Nullable cls)
3048 {
3049 return cls && cls->isSwiftStable();
3050 }
3051
3052 /***********************************************************************
3053 * _objc_flush_caches
3054 * Flushes all caches.
3055 * (Historical behavior: flush caches for cls, its metaclass,
3056 * and subclasses thereof. Nil flushes all classes.)
3057 * Locking: acquires runtimeLock
3058 **********************************************************************/
3059 static void flushCaches(Class cls, const char *func, bool (^predicate)(Class))
3060 {
3061 runtimeLock.assertLocked();
3062 #if CONFIG_USE_CACHE_LOCK
3063 mutex_locker_t lock(cacheUpdateLock);
3064 #endif
3065
3066 const auto handler = ^(Class c) {
3067 if (predicate(c)) {
3068 c->cache.eraseNolock(func);
3069 }
3070
3071 return true;
3072 };
3073
3074 if (cls) {
3075 foreach_realized_class_and_subclass(cls, handler);
3076 } else {
3077 foreach_realized_class_and_metaclass(handler);
3078 }
3079 }
3080
3081
3082 void _objc_flush_caches(Class cls)
3083 {
3084 {
3085 mutex_locker_t lock(runtimeLock);
3086 flushCaches(cls, __func__, [](Class c){
3087 return !c->cache.isConstantOptimizedCache();
3088 });
3089 if (cls && !cls->isMetaClass() && !cls->isRootClass()) {
3090 flushCaches(cls->ISA(), __func__, [](Class c){
3091 return !c->cache.isConstantOptimizedCache();
3092 });
3093 } else {
3094 // cls is a root class or root metaclass. Its metaclass is itself
3095 // or a subclass so the metaclass caches were already flushed.
3096 }
3097 }
3098
3099 if (!cls) {
3100 // collectALot if cls==nil
3101 #if CONFIG_USE_CACHE_LOCK
3102 mutex_locker_t lock(cacheUpdateLock);
3103 #else
3104 mutex_locker_t lock(runtimeLock);
3105 #endif
3106 cache_t::collectNolock(true);
3107 }
3108 }
3109
3110
3111 /***********************************************************************
3112 * map_images
3113 * Process the given images which are being mapped in by dyld.
3114 * Calls ABI-agnostic code after taking ABI-specific locks.
3115 *
3116 * Locking: write-locks runtimeLock
3117 **********************************************************************/
3118 void
3119 map_images(unsigned count, const char * const paths[],
3120 const struct mach_header * const mhdrs[])
3121 {
3122 mutex_locker_t lock(runtimeLock);
3123 return map_images_nolock(count, paths, mhdrs);
3124 }
3125
3126
3127 static void load_categories_nolock(header_info *hi) {
3128 bool hasClassProperties = hi->info()->hasCategoryClassProperties();
3129
3130 size_t count;
3131 auto processCatlist = [&](category_t * const *catlist) {
3132 for (unsigned i = 0; i < count; i++) {
3133 category_t *cat = catlist[i];
3134 Class cls = remapClass(cat->cls);
3135 locstamped_category_t lc{cat, hi};
3136
3137 if (!cls) {
3138 // Category's target class is missing (probably weak-linked).
3139 // Ignore the category.
3140 if (PrintConnecting) {
3141 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3142 "missing weak-linked target class",
3143 cat->name, cat);
3144 }
3145 continue;
3146 }
3147
3148 // Process this category.
3149 if (cls->isStubClass()) {
3150 // Stub classes are never realized. Stub classes
3151 // don't know their metaclass until they're
3152 // initialized, so we have to add categories with
3153 // class methods or properties to the stub itself.
3154 // methodizeClass() will find them and add them to
3155 // the metaclass as appropriate.
3156 if (cat->instanceMethods ||
3157 cat->protocols ||
3158 cat->instanceProperties ||
3159 cat->classMethods ||
3160 cat->protocols ||
3161 (hasClassProperties && cat->_classProperties))
3162 {
3163 objc::unattachedCategories.addForClass(lc, cls);
3164 }
3165 } else {
3166 // First, register the category with its target class.
3167 // Then, rebuild the class's method lists (etc) if
3168 // the class is realized.
3169 if (cat->instanceMethods || cat->protocols
3170 || cat->instanceProperties)
3171 {
3172 if (cls->isRealized()) {
3173 attachCategories(cls, &lc, 1, ATTACH_EXISTING);
3174 } else {
3175 objc::unattachedCategories.addForClass(lc, cls);
3176 }
3177 }
3178
3179 if (cat->classMethods || cat->protocols
3180 || (hasClassProperties && cat->_classProperties))
3181 {
3182 if (cls->ISA()->isRealized()) {
3183 attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
3184 } else {
3185 objc::unattachedCategories.addForClass(lc, cls->ISA());
3186 }
3187 }
3188 }
3189 }
3190 };
3191
3192 processCatlist(hi->catlist(&count));
3193 processCatlist(hi->catlist2(&count));
3194 }
3195
3196 static void loadAllCategories() {
3197 mutex_locker_t lock(runtimeLock);
3198
3199 for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
3200 load_categories_nolock(hi);
3201 }
3202 }
3203
3204 /***********************************************************************
3205 * load_images
3206 * Process +load in the given images which are being mapped in by dyld.
3207 *
3208 * Locking: write-locks runtimeLock and loadMethodLock
3209 **********************************************************************/
3210 extern bool hasLoadMethods(const headerType *mhdr);
3211 extern void prepare_load_methods(const headerType *mhdr);
3212
3213 void
3214 load_images(const char *path __unused, const struct mach_header *mh)
3215 {
3216 if (!didInitialAttachCategories && didCallDyldNotifyRegister) {
3217 didInitialAttachCategories = true;
3218 loadAllCategories();
3219 }
3220
3221 // Return without taking locks if there are no +load methods here.
3222 if (!hasLoadMethods((const headerType *)mh)) return;
3223
3224 recursive_mutex_locker_t lock(loadMethodLock);
3225
3226 // Discover load methods
3227 {
3228 mutex_locker_t lock2(runtimeLock);
3229 prepare_load_methods((const headerType *)mh);
3230 }
3231
3232 // Call +load methods (without runtimeLock - re-entrant)
3233 call_load_methods();
3234 }
3235
3236
3237 /***********************************************************************
3238 * unmap_image
3239 * Process the given image which is about to be unmapped by dyld.
3240 *
3241 * Locking: write-locks runtimeLock and loadMethodLock
3242 **********************************************************************/
3243 void
3244 unmap_image(const char *path __unused, const struct mach_header *mh)
3245 {
3246 recursive_mutex_locker_t lock(loadMethodLock);
3247 mutex_locker_t lock2(runtimeLock);
3248 unmap_image_nolock(mh);
3249 }
3250
3251
3252 /***********************************************************************
3253 * mustReadClasses
3254 * Preflight check in advance of readClass() from an image.
3255 **********************************************************************/
3256 bool mustReadClasses(header_info *hi, bool hasDyldRoots)
3257 {
3258 const char *reason;
3259
3260 // If the image is not preoptimized then we must read classes.
3261 if (!hi->hasPreoptimizedClasses()) {
3262 reason = nil; // Don't log this one because it is noisy.
3263 goto readthem;
3264 }
3265
3266 // If iOS simulator then we must read classes.
3267 #if TARGET_OS_SIMULATOR
3268 reason = "the image is for iOS simulator";
3269 goto readthem;
3270 #endif
3271
3272 ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
3273
3274 // If the image may have missing weak superclasses then we must read classes
3275 if (!noMissingWeakSuperclasses() || hasDyldRoots) {
3276 reason = "the image may contain classes with missing weak superclasses";
3277 goto readthem;
3278 }
3279
3280 // If there are unresolved future classes then we must read classes.
3281 if (haveFutureNamedClasses()) {
3282 reason = "there are unresolved future classes pending";
3283 goto readthem;
3284 }
3285
3286 // readClass() rewrites bits in backward-deploying Swift stable ABI code.
3287 // The assumption here is there there are no such classes
3288 // in the dyld shared cache.
3289 #if DEBUG
3290 {
3291 size_t count;
3292 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3293 for (size_t i = 0; i < count; i++) {
3294 Class cls = remapClass(classlist[i]);
3295 ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
3296 }
3297 }
3298 #endif
3299
3300 // readClass() does not need to do anything.
3301 return NO;
3302
3303 readthem:
3304 if (PrintPreopt && reason) {
3305 _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
3306 "because %s", hi->fname(), reason);
3307 }
3308 return YES;
3309 }
3310
3311
3312 /***********************************************************************
3313 * readClass
3314 * Read a class and metaclass as written by a compiler.
3315 * Returns the new class pointer. This could be:
3316 * - cls
3317 * - nil (cls has a missing weak-linked superclass)
3318 * - something else (space for this class was reserved by a future class)
3319 *
3320 * Note that all work performed by this function is preflighted by
3321 * mustReadClasses(). Do not change this function without updating that one.
3322 *
3323 * Locking: runtimeLock acquired by map_images or objc_readClassPair
3324 **********************************************************************/
3325 Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
3326 {
3327 const char *mangledName = cls->nonlazyMangledName();
3328
3329 if (missingWeakSuperclass(cls)) {
3330 // No superclass (probably weak-linked).
3331 // Disavow any knowledge of this subclass.
3332 if (PrintConnecting) {
3333 _objc_inform("CLASS: IGNORING class '%s' with "
3334 "missing weak-linked superclass",
3335 cls->nameForLogging());
3336 }
3337 addRemappedClass(cls, nil);
3338 cls->setSuperclass(nil);
3339 return nil;
3340 }
3341
3342 cls->fixupBackwardDeployingStableSwift();
3343
3344 Class replacing = nil;
3345 if (mangledName != nullptr) {
3346 if (Class newCls = popFutureNamedClass(mangledName)) {
3347 // This name was previously allocated as a future class.
3348 // Copy objc_class to future class's struct.
3349 // Preserve future's rw data block.
3350
3351 if (newCls->isAnySwift()) {
3352 _objc_fatal("Can't complete future class request for '%s' "
3353 "because the real class is too big.",
3354 cls->nameForLogging());
3355 }
3356
3357 class_rw_t *rw = newCls->data();
3358 const class_ro_t *old_ro = rw->ro();
3359 memcpy(newCls, cls, sizeof(objc_class));
3360
3361 // Manually set address-discriminated ptrauthed fields
3362 // so that newCls gets the correct signatures.
3363 newCls->setSuperclass(cls->getSuperclass());
3364 newCls->initIsa(cls->getIsa());
3365
3366 rw->set_ro((class_ro_t *)newCls->data());
3367 newCls->setData(rw);
3368 freeIfMutable((char *)old_ro->getName());
3369 free((void *)old_ro);
3370
3371 addRemappedClass(cls, newCls);
3372
3373 replacing = cls;
3374 cls = newCls;
3375 }
3376 }
3377
3378 if (headerIsPreoptimized && !replacing) {
3379 // class list built in shared cache
3380 // fixme strict assert doesn't work because of duplicates
3381 // ASSERT(cls == getClass(name));
3382 ASSERT(mangledName == nullptr || getClassExceptSomeSwift(mangledName));
3383 } else {
3384 if (mangledName) { //some Swift generic classes can lazily generate their names
3385 addNamedClass(cls, mangledName, replacing);
3386 } else {
3387 Class meta = cls->ISA();
3388 const class_ro_t *metaRO = meta->bits.safe_ro();
3389 ASSERT(metaRO->getNonMetaclass() && "Metaclass with lazy name must have a pointer to the corresponding nonmetaclass.");
3390 ASSERT(metaRO->getNonMetaclass() == cls && "Metaclass nonmetaclass pointer must equal the original class.");
3391 }
3392 addClassTableEntry(cls);
3393 }
3394
3395 // for future reference: shared cache never contains MH_BUNDLEs
3396 if (headerIsBundle) {
3397 cls->data()->flags |= RO_FROM_BUNDLE;
3398 cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
3399 }
3400
3401 return cls;
3402 }
3403
3404
3405 /***********************************************************************
3406 * readProtocol
3407 * Read a protocol as written by a compiler.
3408 **********************************************************************/
3409 static void
3410 readProtocol(protocol_t *newproto, Class protocol_class,
3411 NXMapTable *protocol_map,
3412 bool headerIsPreoptimized, bool headerIsBundle)
3413 {
3414 // This is not enough to make protocols in unloaded bundles safe,
3415 // but it does prevent crashes when looking up unrelated protocols.
3416 auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
3417
3418 protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
3419
3420 if (oldproto) {
3421 if (oldproto != newproto) {
3422 // Some other definition already won.
3423 if (PrintProtocols) {
3424 _objc_inform("PROTOCOLS: protocol at %p is %s "
3425 "(duplicate of %p)",
3426 newproto, oldproto->nameForLogging(), oldproto);
3427 }
3428
3429 // If we are a shared cache binary then we have a definition of this
3430 // protocol, but if another one was chosen then we need to clear our
3431 // isCanonical bit so that no-one trusts it.
3432 // Note, if getProtocol returned a shared cache protocol then the
3433 // canonical definition is already in the shared cache and we don't
3434 // need to do anything.
3435 if (headerIsPreoptimized && !oldproto->isCanonical()) {
3436 // Note newproto is an entry in our __objc_protolist section which
3437 // for shared cache binaries points to the original protocol in
3438 // that binary, not the shared cache uniqued one.
3439 auto cacheproto = (protocol_t *)
3440 getSharedCachePreoptimizedProtocol(newproto->mangledName);
3441 if (cacheproto && cacheproto->isCanonical())
3442 cacheproto->clearIsCanonical();
3443 }
3444 }
3445 }
3446 else if (headerIsPreoptimized) {
3447 // Shared cache initialized the protocol object itself,
3448 // but in order to allow out-of-cache replacement we need
3449 // to add it to the protocol table now.
3450
3451 protocol_t *cacheproto = (protocol_t *)
3452 getPreoptimizedProtocol(newproto->mangledName);
3453 protocol_t *installedproto;
3454 if (cacheproto && cacheproto != newproto) {
3455 // Another definition in the shared cache wins (because
3456 // everything in the cache was fixed up to point to it).
3457 installedproto = cacheproto;
3458 }
3459 else {
3460 // This definition wins.
3461 installedproto = newproto;
3462 }
3463
3464 ASSERT(installedproto->getIsa() == protocol_class);
3465 ASSERT(installedproto->size >= sizeof(protocol_t));
3466 insertFn(protocol_map, installedproto->mangledName,
3467 installedproto);
3468
3469 if (PrintProtocols) {
3470 _objc_inform("PROTOCOLS: protocol at %p is %s",
3471 installedproto, installedproto->nameForLogging());
3472 if (newproto != installedproto) {
3473 _objc_inform("PROTOCOLS: protocol at %p is %s "
3474 "(duplicate of %p)",
3475 newproto, installedproto->nameForLogging(),
3476 installedproto);
3477 }
3478 }
3479 }
3480 else {
3481 // New protocol from an un-preoptimized image. Fix it up in place.
3482 // fixme duplicate protocols from unloadable bundle
3483 newproto->initIsa(protocol_class); // fixme pinned
3484 insertFn(protocol_map, newproto->mangledName, newproto);
3485 if (PrintProtocols) {
3486 _objc_inform("PROTOCOLS: protocol at %p is %s",
3487 newproto, newproto->nameForLogging());
3488 }
3489 }
3490 }
3491
3492 /***********************************************************************
3493 * _read_images
3494 * Perform initial processing of the headers in the linked
3495 * list beginning with headerList.
3496 *
3497 * Called by: map_images_nolock
3498 *
3499 * Locking: runtimeLock acquired by map_images
3500 **********************************************************************/
3501 void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
3502 {
3503 header_info *hi;
3504 uint32_t hIndex;
3505 size_t count;
3506 size_t i;
3507 Class *resolvedFutureClasses = nil;
3508 size_t resolvedFutureClassCount = 0;
3509 static bool doneOnce;
3510 bool launchTime = NO;
3511 TimeLogger ts(PrintImageTimes);
3512
3513 runtimeLock.assertLocked();
3514
3515 #define EACH_HEADER \
3516 hIndex = 0; \
3517 hIndex < hCount && (hi = hList[hIndex]); \
3518 hIndex++
3519
3520 if (!doneOnce) {
3521 doneOnce = YES;
3522 launchTime = YES;
3523
3524 #if SUPPORT_NONPOINTER_ISA
3525 // Disable non-pointer isa under some conditions.
3526
3527 # if SUPPORT_INDEXED_ISA
3528 // Disable nonpointer isa if any image contains old Swift code
3529 for (EACH_HEADER) {
3530 if (hi->info()->containsSwift() &&
3531 hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
3532 {
3533 DisableNonpointerIsa = true;
3534 if (PrintRawIsa) {
3535 _objc_inform("RAW ISA: disabling non-pointer isa because "
3536 "the app or a framework contains Swift code "
3537 "older than Swift 3.0");
3538 }
3539 break;
3540 }
3541 }
3542 # endif
3543
3544 # if TARGET_OS_OSX
3545 // Disable non-pointer isa if the app is too old
3546 // (linked before OS X 10.11)
3547 if (!dyld_program_sdk_at_least(dyld_platform_version_macOS_10_11)) {
3548 DisableNonpointerIsa = true;
3549 if (PrintRawIsa) {
3550 _objc_inform("RAW ISA: disabling non-pointer isa because "
3551 "the app is too old.");
3552 }
3553 }
3554
3555 // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
3556 // New apps that load old extensions may need this.
3557 for (EACH_HEADER) {
3558 if (hi->mhdr()->filetype != MH_EXECUTE) continue;
3559 unsigned long size;
3560 if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
3561 DisableNonpointerIsa = true;
3562 if (PrintRawIsa) {
3563 _objc_inform("RAW ISA: disabling non-pointer isa because "
3564 "the app has a __DATA,__objc_rawisa section");
3565 }
3566 }
3567 break; // assume only one MH_EXECUTE image
3568 }
3569 # endif
3570
3571 #endif
3572
3573 if (DisableTaggedPointers) {
3574 disableTaggedPointers();
3575 }
3576
3577 initializeTaggedPointerObfuscator();
3578
3579 if (PrintConnecting) {
3580 _objc_inform("CLASS: found %d classes during launch", totalClasses);
3581 }
3582
3583 // namedClasses
3584 // Preoptimized classes don't go in this table.
3585 // 4/3 is NXMapTable's load factor
3586 int namedClassesSize =
3587 (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
3588 gdb_objc_realized_classes =
3589 NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
3590
3591 ts.log("IMAGE TIMES: first time tasks");
3592 }
3593
3594 // Fix up @selector references
3595 static size_t UnfixedSelectors;
3596 {
3597 mutex_locker_t lock(selLock);
3598 for (EACH_HEADER) {
3599 if (hi->hasPreoptimizedSelectors()) continue;
3600
3601 bool isBundle = hi->isBundle();
3602 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3603 UnfixedSelectors += count;
3604 for (i = 0; i < count; i++) {
3605 const char *name = sel_cname(sels[i]);
3606 SEL sel = sel_registerNameNoLock(name, isBundle);
3607 if (sels[i] != sel) {
3608 sels[i] = sel;
3609 }
3610 }
3611 }
3612 }
3613
3614 ts.log("IMAGE TIMES: fix up selector references");
3615
3616 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
3617 bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
3618
3619 for (EACH_HEADER) {
3620 if (! mustReadClasses(hi, hasDyldRoots)) {
3621 // Image is sufficiently optimized that we need not call readClass()
3622 continue;
3623 }
3624
3625 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3626
3627 bool headerIsBundle = hi->isBundle();
3628 bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
3629
3630 for (i = 0; i < count; i++) {
3631 Class cls = (Class)classlist[i];
3632 Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
3633
3634 if (newCls != cls && newCls) {
3635 // Class was moved but not deleted. Currently this occurs
3636 // only when the new class resolved a future class.
3637 // Non-lazily realize the class below.
3638 resolvedFutureClasses = (Class *)
3639 realloc(resolvedFutureClasses,
3640 (resolvedFutureClassCount+1) * sizeof(Class));
3641 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3642 }
3643 }
3644 }
3645
3646 ts.log("IMAGE TIMES: discover classes");
3647
3648 // Fix up remapped classes
3649 // Class list and nonlazy class list remain unremapped.
3650 // Class refs and super refs are remapped for message dispatching.
3651
3652 if (!noClassesRemapped()) {
3653 for (EACH_HEADER) {
3654 Class *classrefs = _getObjc2ClassRefs(hi, &count);
3655 for (i = 0; i < count; i++) {
3656 remapClassRef(&classrefs[i]);
3657 }
3658 // fixme why doesn't test future1 catch the absence of this?
3659 classrefs = _getObjc2SuperRefs(hi, &count);
3660 for (i = 0; i < count; i++) {
3661 remapClassRef(&classrefs[i]);
3662 }
3663 }
3664 }
3665
3666 ts.log("IMAGE TIMES: remap classes");
3667
3668 #if SUPPORT_FIXUP
3669 // Fix up old objc_msgSend_fixup call sites
3670 for (EACH_HEADER) {
3671 message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
3672 if (count == 0) continue;
3673
3674 if (PrintVtables) {
3675 _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
3676 "call sites in %s", count, hi->fname());
3677 }
3678 for (i = 0; i < count; i++) {
3679 fixupMessageRef(refs+i);
3680 }
3681 }
3682
3683 ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
3684 #endif
3685
3686
3687 // Discover protocols. Fix up protocol refs.
3688 for (EACH_HEADER) {
3689 extern objc_class OBJC_CLASS_$_Protocol;
3690 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3691 ASSERT(cls);
3692 NXMapTable *protocol_map = protocols();
3693 bool isPreoptimized = hi->hasPreoptimizedProtocols();
3694
3695 // Skip reading protocols if this is an image from the shared cache
3696 // and we support roots
3697 // Note, after launch we do need to walk the protocol as the protocol
3698 // in the shared cache is marked with isCanonical() and that may not
3699 // be true if some non-shared cache binary was chosen as the canonical
3700 // definition
3701 if (launchTime && isPreoptimized) {
3702 if (PrintProtocols) {
3703 _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
3704 hi->fname());
3705 }
3706 continue;
3707 }
3708
3709 bool isBundle = hi->isBundle();
3710
3711 protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
3712 for (i = 0; i < count; i++) {
3713 readProtocol(protolist[i], cls, protocol_map,
3714 isPreoptimized, isBundle);
3715 }
3716 }
3717
3718 ts.log("IMAGE TIMES: discover protocols");
3719
3720 // Fix up @protocol references
3721 // Preoptimized images may have the right
3722 // answer already but we don't know for sure.
3723 for (EACH_HEADER) {
3724 // At launch time, we know preoptimized image refs are pointing at the
3725 // shared cache definition of a protocol. We can skip the check on
3726 // launch, but have to visit @protocol refs for shared cache images
3727 // loaded later.
3728 if (launchTime && hi->isPreoptimized())
3729 continue;
3730 protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
3731 for (i = 0; i < count; i++) {
3732 remapProtocolRef(&protolist[i]);
3733 }
3734 }
3735
3736 ts.log("IMAGE TIMES: fix up @protocol references");
3737
3738 // Discover categories. Only do this after the initial category
3739 // attachment has been done. For categories present at startup,
3740 // discovery is deferred until the first load_images call after
3741 // the call to _dyld_objc_notify_register completes. rdar://problem/53119145
3742 if (didInitialAttachCategories) {
3743 for (EACH_HEADER) {
3744 load_categories_nolock(hi);
3745 }
3746 }
3747
3748 ts.log("IMAGE TIMES: discover categories");
3749
3750 // Category discovery MUST BE Late to avoid potential races
3751 // when other threads call the new category code before
3752 // this thread finishes its fixups.
3753
3754 // +load handled by prepare_load_methods()
3755
3756 // Realize non-lazy classes (for +load methods and static instances)
3757 for (EACH_HEADER) {
3758 classref_t const *classlist = hi->nlclslist(&count);
3759 for (i = 0; i < count; i++) {
3760 Class cls = remapClass(classlist[i]);
3761 if (!cls) continue;
3762
3763 addClassTableEntry(cls);
3764
3765 if (cls->isSwiftStable()) {
3766 if (cls->swiftMetadataInitializer()) {
3767 _objc_fatal("Swift class %s with a metadata initializer "
3768 "is not allowed to be non-lazy",
3769 cls->nameForLogging());
3770 }
3771 // fixme also disallow relocatable classes
3772 // We can't disallow all Swift classes because of
3773 // classes like Swift.__EmptyArrayStorage
3774 }
3775 realizeClassWithoutSwift(cls, nil);
3776 }
3777 }
3778
3779 ts.log("IMAGE TIMES: realize non-lazy classes");
3780
3781 // Realize newly-resolved future classes, in case CF manipulates them
3782 if (resolvedFutureClasses) {
3783 for (i = 0; i < resolvedFutureClassCount; i++) {
3784 Class cls = resolvedFutureClasses[i];
3785 if (cls->isSwiftStable()) {
3786 _objc_fatal("Swift class is not allowed to be future");
3787 }
3788 realizeClassWithoutSwift(cls, nil);
3789 cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
3790 }
3791 free(resolvedFutureClasses);
3792 }
3793
3794 ts.log("IMAGE TIMES: realize future classes");
3795
3796 if (DebugNonFragileIvars) {
3797 realizeAllClasses();
3798 }
3799
3800
3801 // Print preoptimization statistics
3802 if (PrintPreopt) {
3803 static unsigned int PreoptTotalMethodLists;
3804 static unsigned int PreoptOptimizedMethodLists;
3805 static unsigned int PreoptTotalClasses;
3806 static unsigned int PreoptOptimizedClasses;
3807
3808 for (EACH_HEADER) {
3809 if (hi->hasPreoptimizedSelectors()) {
3810 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
3811 "in %s", hi->fname());
3812 }
3813 else if (hi->info()->optimizedByDyld()) {
3814 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
3815 "in %s", hi->fname());
3816 }
3817
3818 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3819 for (i = 0; i < count; i++) {
3820 Class cls = remapClass(classlist[i]);
3821 if (!cls) continue;
3822
3823 PreoptTotalClasses++;
3824 if (hi->hasPreoptimizedClasses()) {
3825 PreoptOptimizedClasses++;
3826 }
3827
3828 const method_list_t *mlist;
3829 if ((mlist = cls->bits.safe_ro()->baseMethods())) {
3830 PreoptTotalMethodLists++;
3831 if (mlist->isFixedUp()) {
3832 PreoptOptimizedMethodLists++;
3833 }
3834 }
3835 if ((mlist = cls->ISA()->bits.safe_ro()->baseMethods())) {
3836 PreoptTotalMethodLists++;
3837 if (mlist->isFixedUp()) {
3838 PreoptOptimizedMethodLists++;
3839 }
3840 }
3841 }
3842 }
3843
3844 _objc_inform("PREOPTIMIZATION: %zu selector references not "
3845 "pre-optimized", UnfixedSelectors);
3846 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3847 PreoptOptimizedMethodLists, PreoptTotalMethodLists,
3848 PreoptTotalMethodLists
3849 ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
3850 : 0.0);
3851 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3852 PreoptOptimizedClasses, PreoptTotalClasses,
3853 PreoptTotalClasses
3854 ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
3855 : 0.0);
3856 _objc_inform("PREOPTIMIZATION: %zu protocol references not "
3857 "pre-optimized", UnfixedProtocolReferences);
3858 }
3859
3860 #undef EACH_HEADER
3861 }
3862
3863
3864 /***********************************************************************
3865 * prepare_load_methods
3866 * Schedule +load for classes in this image, any un-+load-ed
3867 * superclasses in other images, and any categories in this image.
3868 **********************************************************************/
3869 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3870 // cls must already be connected.
3871 static void schedule_class_load(Class cls)
3872 {
3873 if (!cls) return;
3874 ASSERT(cls->isRealized()); // _read_images should realize
3875
3876 if (cls->data()->flags & RW_LOADED) return;
3877
3878 // Ensure superclass-first ordering
3879 schedule_class_load(cls->getSuperclass());
3880
3881 add_class_to_loadable_list(cls);
3882 cls->setInfo(RW_LOADED);
3883 }
3884
3885 // Quick scan for +load methods that doesn't take a lock.
3886 bool hasLoadMethods(const headerType *mhdr)
3887 {
3888 size_t count;
3889 if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
3890 if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
3891 return false;
3892 }
3893
3894 void prepare_load_methods(const headerType *mhdr)
3895 {
3896 size_t count, i;
3897
3898 runtimeLock.assertLocked();
3899
3900 classref_t const *classlist =
3901 _getObjc2NonlazyClassList(mhdr, &count);
3902 for (i = 0; i < count; i++) {
3903 schedule_class_load(remapClass(classlist[i]));
3904 }
3905
3906 category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
3907 for (i = 0; i < count; i++) {
3908 category_t *cat = categorylist[i];
3909 Class cls = remapClass(cat->cls);
3910 if (!cls) continue; // category for ignored weak-linked class
3911 if (cls->isSwiftStable()) {
3912 _objc_fatal("Swift class extensions and categories on Swift "
3913 "classes are not allowed to have +load methods");
3914 }
3915 realizeClassWithoutSwift(cls, nil);
3916 ASSERT(cls->ISA()->isRealized());
3917 add_category_to_loadable_list(cat);
3918 }
3919 }
3920
3921
3922 /***********************************************************************
3923 * _unload_image
3924 * Only handles MH_BUNDLE for now.
3925 * Locking: write-lock and loadMethodLock acquired by unmap_image
3926 **********************************************************************/
3927 void _unload_image(header_info *hi)
3928 {
3929 size_t count, i;
3930
3931 loadMethodLock.assertLocked();
3932 runtimeLock.assertLocked();
3933
3934 // Unload unattached categories and categories waiting for +load.
3935
3936 // Ignore __objc_catlist2. We don't support unloading Swift
3937 // and we never will.
3938 category_t * const *catlist = hi->catlist(&count);
3939 for (i = 0; i < count; i++) {
3940 category_t *cat = catlist[i];
3941 Class cls = remapClass(cat->cls);
3942 if (!cls) continue; // category for ignored weak-linked class
3943
3944 // fixme for MH_DYLIB cat's class may have been unloaded already
3945
3946 // unattached list
3947 objc::unattachedCategories.eraseCategoryForClass(cat, cls);
3948
3949 // +load queue
3950 remove_category_from_loadable_list(cat);
3951 }
3952
3953 // Unload classes.
3954
3955 // Gather classes from both __DATA,__objc_clslist
3956 // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
3957 // only, and we need to unload that class if we unload an arclite image.
3958
3959 objc::DenseSet<Class> classes{};
3960 classref_t const *classlist;
3961
3962 classlist = _getObjc2ClassList(hi, &count);
3963 for (i = 0; i < count; i++) {
3964 Class cls = remapClass(classlist[i]);
3965 if (cls) classes.insert(cls);
3966 }
3967
3968 classlist = hi->nlclslist(&count);
3969 for (i = 0; i < count; i++) {
3970 Class cls = remapClass(classlist[i]);
3971 if (cls) classes.insert(cls);
3972 }
3973
3974 // First detach classes from each other. Then free each class.
3975 // This avoid bugs where this loop unloads a subclass before its superclass
3976
3977 for (Class cls: classes) {
3978 remove_class_from_loadable_list(cls);
3979 detach_class(cls->ISA(), YES);
3980 detach_class(cls, NO);
3981 }
3982 for (Class cls: classes) {
3983 free_class(cls->ISA());
3984 free_class(cls);
3985 }
3986
3987 // XXX FIXME -- Clean up protocols:
3988 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3989
3990 // fixme DebugUnload
3991 }
3992
3993
3994 /***********************************************************************
3995 * method_getDescription
3996 * Returns a pointer to this method's objc_method_description.
3997 * Locking: none
3998 **********************************************************************/
3999 struct objc_method_description *
4000 method_getDescription(Method m)
4001 {
4002 if (!m) return nil;
4003 return m->getDescription();
4004 }
4005
4006
4007 IMP
4008 method_getImplementation(Method m)
4009 {
4010 return m ? m->imp(true) : nil;
4011 }
4012
4013 IMPAndSEL _method_getImplementationAndName(Method m)
4014 {
4015 return { m->imp(true), m->name() };
4016 }
4017
4018
4019 /***********************************************************************
4020 * method_getName
4021 * Returns this method's selector.
4022 * The method must not be nil.
4023 * The method must already have been fixed-up.
4024 * Locking: none
4025 **********************************************************************/
4026 SEL
4027 method_getName(Method m)
4028 {
4029 if (!m) return nil;
4030
4031 ASSERT(m->name() == sel_registerName(sel_getName(m->name())));
4032 return m->name();
4033 }
4034
4035
4036 /***********************************************************************
4037 * method_getTypeEncoding
4038 * Returns this method's old-style type encoding string.
4039 * The method must not be nil.
4040 * Locking: none
4041 **********************************************************************/
4042 const char *
4043 method_getTypeEncoding(Method m)
4044 {
4045 if (!m) return nil;
4046 return m->types();
4047 }
4048
4049
4050 /***********************************************************************
4051 * method_setImplementation
4052 * Sets this method's implementation to imp.
4053 * The previous implementation is returned.
4054 **********************************************************************/
4055 static IMP
4056 _method_setImplementation(Class cls, method_t *m, IMP imp)
4057 {
4058 runtimeLock.assertLocked();
4059
4060 if (!m) return nil;
4061 if (!imp) return nil;
4062
4063 IMP old = m->imp(false);
4064 SEL sel = m->name();
4065
4066 m->setImp(imp);
4067
4068 // Cache updates are slow if cls is nil (i.e. unknown)
4069 // RR/AWZ updates are slow if cls is nil (i.e. unknown)
4070 // fixme build list of classes whose Methods are known externally?
4071
4072 flushCaches(cls, __func__, [sel, old](Class c){
4073 return c->cache.shouldFlush(sel, old);
4074 });
4075
4076 adjustCustomFlagsForMethodChange(cls, m);
4077
4078 return old;
4079 }
4080
4081 IMP
4082 method_setImplementation(Method m, IMP imp)
4083 {
4084 // Don't know the class - will be slow if RR/AWZ are affected
4085 // fixme build list of classes whose Methods are known externally?
4086 mutex_locker_t lock(runtimeLock);
4087 return _method_setImplementation(Nil, m, imp);
4088 }
4089
4090 extern void _method_setImplementationRawUnsafe(Method m, IMP imp)
4091 {
4092 mutex_locker_t lock(runtimeLock);
4093 m->setImp(imp);
4094 }
4095
4096
4097 void method_exchangeImplementations(Method m1, Method m2)
4098 {
4099 if (!m1 || !m2) return;
4100
4101 mutex_locker_t lock(runtimeLock);
4102
4103 IMP imp1 = m1->imp(false);
4104 IMP imp2 = m2->imp(false);
4105 SEL sel1 = m1->name();
4106 SEL sel2 = m2->name();
4107
4108 m1->setImp(imp2);
4109 m2->setImp(imp1);
4110
4111
4112 // RR/AWZ updates are slow because class is unknown
4113 // Cache updates are slow because class is unknown
4114 // fixme build list of classes whose Methods are known externally?
4115
4116 flushCaches(nil, __func__, [sel1, sel2, imp1, imp2](Class c){
4117 return c->cache.shouldFlush(sel1, imp1) || c->cache.shouldFlush(sel2, imp2);
4118 });
4119
4120 adjustCustomFlagsForMethodChange(nil, m1);
4121 adjustCustomFlagsForMethodChange(nil, m2);
4122 }
4123
4124
4125 /***********************************************************************
4126 * ivar_getOffset
4127 * fixme
4128 * Locking: none
4129 **********************************************************************/
4130 ptrdiff_t
4131 ivar_getOffset(Ivar ivar)
4132 {
4133 if (!ivar) return 0;
4134 return *ivar->offset;
4135 }
4136
4137
4138 /***********************************************************************
4139 * ivar_getName
4140 * fixme
4141 * Locking: none
4142 **********************************************************************/
4143 const char *
4144 ivar_getName(Ivar ivar)
4145 {
4146 if (!ivar) return nil;
4147 return ivar->name;
4148 }
4149
4150
4151 /***********************************************************************
4152 * ivar_getTypeEncoding
4153 * fixme
4154 * Locking: none
4155 **********************************************************************/
4156 const char *
4157 ivar_getTypeEncoding(Ivar ivar)
4158 {
4159 if (!ivar) return nil;
4160 return ivar->type;
4161 }
4162
4163
4164
4165 const char *property_getName(objc_property_t prop)
4166 {
4167 return prop->name;
4168 }
4169
4170 const char *property_getAttributes(objc_property_t prop)
4171 {
4172 return prop->attributes;
4173 }
4174
4175 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
4176 unsigned int *outCount)
4177 {
4178 if (!prop) {
4179 if (outCount) *outCount = 0;
4180 return nil;
4181 }
4182
4183 mutex_locker_t lock(runtimeLock);
4184 return copyPropertyAttributeList(prop->attributes,outCount);
4185 }
4186
4187 char * property_copyAttributeValue(objc_property_t prop, const char *name)
4188 {
4189 if (!prop || !name || *name == '\0') return nil;
4190
4191 mutex_locker_t lock(runtimeLock);
4192 return copyPropertyAttributeValue(prop->attributes, name);
4193 }
4194
4195
4196 /***********************************************************************
4197 * getExtendedTypesIndexesForMethod
4198 * Returns:
4199 * a is the count of methods in all method lists before m's method list
4200 * b is the index of m in m's method list
4201 * a+b is the index of m's extended types in the extended types array
4202 **********************************************************************/
4203 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
4204 {
4205 a = 0;
4206
4207 if (proto->instanceMethods) {
4208 if (isRequiredMethod && isInstanceMethod) {
4209 b = proto->instanceMethods->indexOfMethod(m);
4210 return;
4211 }
4212 a += proto->instanceMethods->count;
4213 }
4214
4215 if (proto->classMethods) {
4216 if (isRequiredMethod && !isInstanceMethod) {
4217 b = proto->classMethods->indexOfMethod(m);
4218 return;
4219 }
4220 a += proto->classMethods->count;
4221 }
4222
4223 if (proto->optionalInstanceMethods) {
4224 if (!isRequiredMethod && isInstanceMethod) {
4225 b = proto->optionalInstanceMethods->indexOfMethod(m);
4226 return;
4227 }
4228 a += proto->optionalInstanceMethods->count;
4229 }
4230
4231 if (proto->optionalClassMethods) {
4232 if (!isRequiredMethod && !isInstanceMethod) {
4233 b = proto->optionalClassMethods->indexOfMethod(m);
4234 return;
4235 }
4236 a += proto->optionalClassMethods->count;
4237 }
4238 }
4239
4240
4241 /***********************************************************************
4242 * getExtendedTypesIndexForMethod
4243 * Returns the index of m's extended types in proto's extended types array.
4244 **********************************************************************/
4245 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
4246 {
4247 uint32_t a;
4248 uint32_t b;
4249 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
4250 isInstanceMethod, a, b);
4251 return a + b;
4252 }
4253
4254
4255 /***********************************************************************
4256 * fixupProtocolMethodList
4257 * Fixes up a single method list in a protocol.
4258 **********************************************************************/
4259 static void
4260 fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
4261 bool required, bool instance)
4262 {
4263 runtimeLock.assertLocked();
4264
4265 if (!mlist) return;
4266 if (mlist->isFixedUp()) return;
4267
4268 const char **extTypes = proto->extendedMethodTypes();
4269 fixupMethodList(mlist, true/*always copy for simplicity*/,
4270 !extTypes/*sort if no extended method types*/);
4271
4272 if (extTypes && !mlist->isSmallList()) {
4273 // Sort method list and extended method types together.
4274 // fixupMethodList() can't do this.
4275 // fixme COW stomp
4276 uint32_t count = mlist->count;
4277 uint32_t prefix;
4278 uint32_t junk;
4279 getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
4280 required, instance, prefix, junk);
4281 for (uint32_t i = 0; i < count; i++) {
4282 for (uint32_t j = i+1; j < count; j++) {
4283 auto& mi = mlist->get(i).big();
4284 auto& mj = mlist->get(j).big();
4285 if (mi.name > mj.name) {
4286 std::swap(mi, mj);
4287 std::swap(extTypes[prefix+i], extTypes[prefix+j]);
4288 }
4289 }
4290 }
4291 }
4292 }
4293
4294
4295 /***********************************************************************
4296 * fixupProtocol
4297 * Fixes up all of a protocol's method lists.
4298 **********************************************************************/
4299 static void
4300 fixupProtocol(protocol_t *proto)
4301 {
4302 runtimeLock.assertLocked();
4303
4304 if (proto->protocols) {
4305 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4306 protocol_t *sub = remapProtocol(proto->protocols->list[i]);
4307 if (!sub->isFixedUp()) fixupProtocol(sub);
4308 }
4309 }
4310
4311 fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
4312 fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
4313 fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
4314 fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
4315
4316 // fixme memory barrier so we can check this with no lock
4317 proto->setFixedUp();
4318 }
4319
4320
4321 /***********************************************************************
4322 * fixupProtocolIfNeeded
4323 * Fixes up all of a protocol's method lists if they aren't fixed up already.
4324 * Locking: write-locks runtimeLock.
4325 **********************************************************************/
4326 static void
4327 fixupProtocolIfNeeded(protocol_t *proto)
4328 {
4329 runtimeLock.assertUnlocked();
4330 ASSERT(proto);
4331
4332 if (!proto->isFixedUp()) {
4333 mutex_locker_t lock(runtimeLock);
4334 fixupProtocol(proto);
4335 }
4336 }
4337
4338
4339 static method_list_t *
4340 getProtocolMethodList(protocol_t *proto, bool required, bool instance)
4341 {
4342 method_list_t **mlistp = nil;
4343 if (required) {
4344 if (instance) {
4345 mlistp = &proto->instanceMethods;
4346 } else {
4347 mlistp = &proto->classMethods;
4348 }
4349 } else {
4350 if (instance) {
4351 mlistp = &proto->optionalInstanceMethods;
4352 } else {
4353 mlistp = &proto->optionalClassMethods;
4354 }
4355 }
4356
4357 return *mlistp;
4358 }
4359
4360
4361 /***********************************************************************
4362 * protocol_getMethod_nolock
4363 * Locking: runtimeLock must be held by the caller
4364 **********************************************************************/
4365 static method_t *
4366 protocol_getMethod_nolock(protocol_t *proto, SEL sel,
4367 bool isRequiredMethod, bool isInstanceMethod,
4368 bool recursive)
4369 {
4370 runtimeLock.assertLocked();
4371
4372 if (!proto || !sel) return nil;
4373
4374 ASSERT(proto->isFixedUp());
4375
4376 method_list_t *mlist =
4377 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4378 if (mlist) {
4379 method_t *m = search_method_list(mlist, sel);
4380 if (m) return m;
4381 }
4382
4383 if (recursive && proto->protocols) {
4384 method_t *m;
4385 for (uint32_t i = 0; i < proto->protocols->count; i++) {
4386 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
4387 m = protocol_getMethod_nolock(realProto, sel,
4388 isRequiredMethod, isInstanceMethod,
4389 true);
4390 if (m) return m;
4391 }
4392 }
4393
4394 return nil;
4395 }
4396
4397
4398 /***********************************************************************
4399 * protocol_getMethod
4400 * fixme
4401 * Locking: acquires runtimeLock
4402 **********************************************************************/
4403 Method
4404 protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
4405 {
4406 if (!proto) return nil;
4407 fixupProtocolIfNeeded(proto);
4408
4409 mutex_locker_t lock(runtimeLock);
4410 return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
4411 isInstanceMethod, recursive);
4412 }
4413
4414
4415 /***********************************************************************
4416 * protocol_getMethodTypeEncoding_nolock
4417 * Return the @encode string for the requested protocol method.
4418 * Returns nil if the compiler did not emit any extended @encode data.
4419 * Locking: runtimeLock must be held by the caller
4420 **********************************************************************/
4421 const char *
4422 protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
4423 bool isRequiredMethod,
4424 bool isInstanceMethod)
4425 {
4426 runtimeLock.assertLocked();
4427
4428 if (!proto) return nil;
4429 if (!proto->extendedMethodTypes()) return nil;
4430
4431 ASSERT(proto->isFixedUp());
4432
4433 method_t *m =
4434 protocol_getMethod_nolock(proto, sel,
4435 isRequiredMethod, isInstanceMethod, false);
4436 if (m) {
4437 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
4438 isRequiredMethod,
4439 isInstanceMethod);
4440 return proto->extendedMethodTypes()[i];
4441 }
4442
4443 // No method with that name. Search incorporated protocols.
4444 if (proto->protocols) {
4445 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4446 const char *enc =
4447 protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
4448 if (enc) return enc;
4449 }
4450 }
4451
4452 return nil;
4453 }
4454
4455 /***********************************************************************
4456 * _protocol_getMethodTypeEncoding
4457 * Return the @encode string for the requested protocol method.
4458 * Returns nil if the compiler did not emit any extended @encode data.
4459 * Locking: acquires runtimeLock
4460 **********************************************************************/
4461 const char *
4462 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
4463 BOOL isRequiredMethod, BOOL isInstanceMethod)
4464 {
4465 protocol_t *proto = newprotocol(proto_gen);
4466
4467 if (!proto) return nil;
4468 fixupProtocolIfNeeded(proto);
4469
4470 mutex_locker_t lock(runtimeLock);
4471 return protocol_getMethodTypeEncoding_nolock(proto, sel,
4472 isRequiredMethod,
4473 isInstanceMethod);
4474 }
4475
4476
4477 /***********************************************************************
4478 * protocol_t::demangledName
4479 * Returns the (Swift-demangled) name of the given protocol.
4480 * Locking: none
4481 **********************************************************************/
4482 const char *
4483 protocol_t::demangledName()
4484 {
4485 if (!hasDemangledNameField())
4486 return mangledName;
4487
4488 if (! _demangledName) {
4489 char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
4490 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
4491 (void**)&_demangledName))
4492 {
4493 if (de) free(de);
4494 }
4495 }
4496 return _demangledName;
4497 }
4498
4499 /***********************************************************************
4500 * protocol_getName
4501 * Returns the (Swift-demangled) name of the given protocol.
4502 * Locking: runtimeLock must not be held by the caller
4503 **********************************************************************/
4504 const char *
4505 protocol_getName(Protocol *proto)
4506 {
4507 if (!proto) return "nil";
4508 else return newprotocol(proto)->demangledName();
4509 }
4510
4511
4512 /***********************************************************************
4513 * protocol_getInstanceMethodDescription
4514 * Returns the description of a named instance method.
4515 * Locking: runtimeLock must not be held by the caller
4516 **********************************************************************/
4517 struct objc_method_description
4518 protocol_getMethodDescription(Protocol *p, SEL aSel,
4519 BOOL isRequiredMethod, BOOL isInstanceMethod)
4520 {
4521 Method m =
4522 protocol_getMethod(newprotocol(p), aSel,
4523 isRequiredMethod, isInstanceMethod, true);
4524 // method_getDescription is inefficient for small methods. Don't bother
4525 // trying to use it, just make our own.
4526 if (m) return (struct objc_method_description){m->name(), (char *)m->types()};
4527 else return (struct objc_method_description){nil, nil};
4528 }
4529
4530
4531 /***********************************************************************
4532 * protocol_conformsToProtocol_nolock
4533 * Returns YES if self conforms to other.
4534 * Locking: runtimeLock must be held by the caller.
4535 **********************************************************************/
4536 static bool
4537 protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
4538 {
4539 runtimeLock.assertLocked();
4540
4541 if (!self || !other) {
4542 return NO;
4543 }
4544
4545 // protocols need not be fixed up
4546
4547 if (0 == strcmp(self->mangledName, other->mangledName)) {
4548 return YES;
4549 }
4550
4551 if (self->protocols) {
4552 uintptr_t i;
4553 for (i = 0; i < self->protocols->count; i++) {
4554 protocol_t *proto = remapProtocol(self->protocols->list[i]);
4555 if (other == proto) {
4556 return YES;
4557 }
4558 if (0 == strcmp(other->mangledName, proto->mangledName)) {
4559 return YES;
4560 }
4561 if (protocol_conformsToProtocol_nolock(proto, other)) {
4562 return YES;
4563 }
4564 }
4565 }
4566
4567 return NO;
4568 }
4569
4570
4571 /***********************************************************************
4572 * protocol_conformsToProtocol
4573 * Returns YES if self conforms to other.
4574 * Locking: acquires runtimeLock
4575 **********************************************************************/
4576 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
4577 {
4578 mutex_locker_t lock(runtimeLock);
4579 return protocol_conformsToProtocol_nolock(newprotocol(self),
4580 newprotocol(other));
4581 }
4582
4583
4584 /***********************************************************************
4585 * protocol_isEqual
4586 * Return YES if two protocols are equal (i.e. conform to each other)
4587 * Locking: acquires runtimeLock
4588 **********************************************************************/
4589 BOOL protocol_isEqual(Protocol *self, Protocol *other)
4590 {
4591 if (self == other) return YES;
4592 if (!self || !other) return NO;
4593
4594 if (!protocol_conformsToProtocol(self, other)) return NO;
4595 if (!protocol_conformsToProtocol(other, self)) return NO;
4596
4597 return YES;
4598 }
4599
4600
4601 /***********************************************************************
4602 * protocol_copyMethodDescriptionList
4603 * Returns descriptions of a protocol's methods.
4604 * Locking: acquires runtimeLock
4605 **********************************************************************/
4606 struct objc_method_description *
4607 protocol_copyMethodDescriptionList(Protocol *p,
4608 BOOL isRequiredMethod,BOOL isInstanceMethod,
4609 unsigned int *outCount)
4610 {
4611 protocol_t *proto = newprotocol(p);
4612 struct objc_method_description *result = nil;
4613 unsigned int count = 0;
4614
4615 if (!proto) {
4616 if (outCount) *outCount = 0;
4617 return nil;
4618 }
4619
4620 fixupProtocolIfNeeded(proto);
4621
4622 mutex_locker_t lock(runtimeLock);
4623
4624 method_list_t *mlist =
4625 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4626
4627 if (mlist) {
4628 result = (struct objc_method_description *)
4629 calloc(mlist->count + 1, sizeof(struct objc_method_description));
4630 for (const auto& meth : *mlist) {
4631 result[count].name = meth.name();
4632 result[count].types = (char *)meth.types();
4633 count++;
4634 }
4635 }
4636
4637 if (outCount) *outCount = count;
4638 return result;
4639 }
4640
4641
4642 /***********************************************************************
4643 * protocol_getProperty
4644 * fixme
4645 * Locking: runtimeLock must be held by the caller
4646 **********************************************************************/
4647 static property_t *
4648 protocol_getProperty_nolock(protocol_t *proto, const char *name,
4649 bool isRequiredProperty, bool isInstanceProperty)
4650 {
4651 runtimeLock.assertLocked();
4652
4653 if (!isRequiredProperty) {
4654 // Only required properties are currently supported.
4655 return nil;
4656 }
4657
4658 property_list_t *plist = isInstanceProperty ?
4659 proto->instanceProperties : proto->classProperties();
4660 if (plist) {
4661 for (auto& prop : *plist) {
4662 if (0 == strcmp(name, prop.name)) {
4663 return &prop;
4664 }
4665 }
4666 }
4667
4668 if (proto->protocols) {
4669 uintptr_t i;
4670 for (i = 0; i < proto->protocols->count; i++) {
4671 protocol_t *p = remapProtocol(proto->protocols->list[i]);
4672 property_t *prop =
4673 protocol_getProperty_nolock(p, name,
4674 isRequiredProperty,
4675 isInstanceProperty);
4676 if (prop) return prop;
4677 }
4678 }
4679
4680 return nil;
4681 }
4682
4683 objc_property_t protocol_getProperty(Protocol *p, const char *name,
4684 BOOL isRequiredProperty, BOOL isInstanceProperty)
4685 {
4686 if (!p || !name) return nil;
4687
4688 mutex_locker_t lock(runtimeLock);
4689 return (objc_property_t)
4690 protocol_getProperty_nolock(newprotocol(p), name,
4691 isRequiredProperty, isInstanceProperty);
4692 }
4693
4694
4695 /***********************************************************************
4696 * protocol_copyPropertyList
4697 * protocol_copyPropertyList2
4698 * fixme
4699 * Locking: acquires runtimeLock
4700 **********************************************************************/
4701 static property_t **
4702 copyPropertyList(property_list_t *plist, unsigned int *outCount)
4703 {
4704 property_t **result = nil;
4705 unsigned int count = 0;
4706
4707 if (plist) {
4708 count = plist->count;
4709 }
4710
4711 if (count > 0) {
4712 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4713
4714 count = 0;
4715 for (auto& prop : *plist) {
4716 result[count++] = &prop;
4717 }
4718 result[count] = nil;
4719 }
4720
4721 if (outCount) *outCount = count;
4722 return result;
4723 }
4724
4725 objc_property_t *
4726 protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
4727 BOOL isRequiredProperty, BOOL isInstanceProperty)
4728 {
4729 if (!proto || !isRequiredProperty) {
4730 // Optional properties are not currently supported.
4731 if (outCount) *outCount = 0;
4732 return nil;
4733 }
4734
4735 mutex_locker_t lock(runtimeLock);
4736
4737 property_list_t *plist = isInstanceProperty
4738 ? newprotocol(proto)->instanceProperties
4739 : newprotocol(proto)->classProperties();
4740 return (objc_property_t *)copyPropertyList(plist, outCount);
4741 }
4742
4743 objc_property_t *
4744 protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4745 {
4746 return protocol_copyPropertyList2(proto, outCount,
4747 YES/*required*/, YES/*instance*/);
4748 }
4749
4750
4751 /***********************************************************************
4752 * protocol_copyProtocolList
4753 * Copies this protocol's incorporated protocols.
4754 * Does not copy those protocol's incorporated protocols in turn.
4755 * Locking: acquires runtimeLock
4756 **********************************************************************/
4757 Protocol * __unsafe_unretained *
4758 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4759 {
4760 unsigned int count = 0;
4761 Protocol **result = nil;
4762 protocol_t *proto = newprotocol(p);
4763
4764 if (!proto) {
4765 if (outCount) *outCount = 0;
4766 return nil;
4767 }
4768
4769 mutex_locker_t lock(runtimeLock);
4770
4771 if (proto->protocols) {
4772 count = (unsigned int)proto->protocols->count;
4773 }
4774 if (count > 0) {
4775 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4776
4777 unsigned int i;
4778 for (i = 0; i < count; i++) {
4779 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4780 }
4781 result[i] = nil;
4782 }
4783
4784 if (outCount) *outCount = count;
4785 return result;
4786 }
4787
4788
4789 /***********************************************************************
4790 * objc_allocateProtocol
4791 * Creates a new protocol. The protocol may not be used until
4792 * objc_registerProtocol() is called.
4793 * Returns nil if a protocol with the same name already exists.
4794 * Locking: acquires runtimeLock
4795 **********************************************************************/
4796 Protocol *
4797 objc_allocateProtocol(const char *name)
4798 {
4799 mutex_locker_t lock(runtimeLock);
4800
4801 if (getProtocol(name)) {
4802 return nil;
4803 }
4804
4805 protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
4806
4807 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4808 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4809 result->initProtocolIsa(cls);
4810 result->size = sizeof(protocol_t);
4811 // fixme mangle the name if it looks swift-y?
4812 result->mangledName = strdupIfMutable(name);
4813
4814 // fixme reserve name without installing
4815
4816 return (Protocol *)result;
4817 }
4818
4819
4820 /***********************************************************************
4821 * objc_registerProtocol
4822 * Registers a newly-constructed protocol. The protocol is now
4823 * ready for use and immutable.
4824 * Locking: acquires runtimeLock
4825 **********************************************************************/
4826 void objc_registerProtocol(Protocol *proto_gen)
4827 {
4828 protocol_t *proto = newprotocol(proto_gen);
4829
4830 mutex_locker_t lock(runtimeLock);
4831
4832 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4833 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4834 extern objc_class OBJC_CLASS_$_Protocol;
4835 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4836
4837 if (proto->ISA() == cls) {
4838 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4839 "registered!", proto->nameForLogging());
4840 return;
4841 }
4842 if (proto->ISA() != oldcls) {
4843 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4844 "with objc_allocateProtocol!", proto->nameForLogging());
4845 return;
4846 }
4847
4848 // NOT initProtocolIsa(). The protocol object may already
4849 // have been retained and we must preserve that count.
4850 proto->changeIsa(cls);
4851
4852 // Don't add this protocol if we already have it.
4853 // Should we warn on duplicates?
4854 if (getProtocol(proto->mangledName) == nil) {
4855 NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
4856 }
4857 }
4858
4859
4860 /***********************************************************************
4861 * protocol_addProtocol
4862 * Adds an incorporated protocol to another protocol.
4863 * No method enforcement is performed.
4864 * `proto` must be under construction. `addition` must not.
4865 * Locking: acquires runtimeLock
4866 **********************************************************************/
4867 void
4868 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4869 {
4870 protocol_t *proto = newprotocol(proto_gen);
4871 protocol_t *addition = newprotocol(addition_gen);
4872
4873 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4874 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4875
4876 if (!proto_gen) return;
4877 if (!addition_gen) return;
4878
4879 mutex_locker_t lock(runtimeLock);
4880
4881 if (proto->ISA() != cls) {
4882 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4883 "under construction!", proto->nameForLogging());
4884 return;
4885 }
4886 if (addition->ISA() == cls) {
4887 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4888 "under construction!", addition->nameForLogging());
4889 return;
4890 }
4891
4892 protocol_list_t *protolist = proto->protocols;
4893 if (!protolist) {
4894 protolist = (protocol_list_t *)
4895 calloc(1, sizeof(protocol_list_t)
4896 + sizeof(protolist->list[0]));
4897 } else {
4898 protolist = (protocol_list_t *)
4899 realloc(protolist, protocol_list_size(protolist)
4900 + sizeof(protolist->list[0]));
4901 }
4902
4903 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4904 proto->protocols = protolist;
4905 }
4906
4907
4908 /***********************************************************************
4909 * protocol_addMethodDescription
4910 * Adds a method to a protocol. The protocol must be under construction.
4911 * Locking: acquires runtimeLock
4912 **********************************************************************/
4913 static void
4914 protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
4915 {
4916 if (!list) {
4917 list = (method_list_t *)calloc(method_list_t::byteSize(sizeof(struct method_t::big), 1), 1);
4918 list->entsizeAndFlags = sizeof(struct method_t::big);
4919 list->setFixedUp();
4920 } else {
4921 size_t size = list->byteSize() + list->entsize();
4922 list = (method_list_t *)realloc(list, size);
4923 }
4924
4925 auto &meth = list->get(list->count++).big();
4926 meth.name = name;
4927 meth.types = types ? strdupIfMutable(types) : "";
4928 meth.imp = nil;
4929 }
4930
4931 void
4932 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4933 BOOL isRequiredMethod, BOOL isInstanceMethod)
4934 {
4935 protocol_t *proto = newprotocol(proto_gen);
4936
4937 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4938 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4939
4940 if (!proto_gen) return;
4941
4942 mutex_locker_t lock(runtimeLock);
4943
4944 if (proto->ISA() != cls) {
4945 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4946 "under construction!", proto->nameForLogging());
4947 return;
4948 }
4949
4950 if (isRequiredMethod && isInstanceMethod) {
4951 protocol_addMethod_nolock(proto->instanceMethods, name, types);
4952 } else if (isRequiredMethod && !isInstanceMethod) {
4953 protocol_addMethod_nolock(proto->classMethods, name, types);
4954 } else if (!isRequiredMethod && isInstanceMethod) {
4955 protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
4956 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4957 protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
4958 }
4959 }
4960
4961
4962 /***********************************************************************
4963 * protocol_addProperty
4964 * Adds a property to a protocol. The protocol must be under construction.
4965 * Locking: acquires runtimeLock
4966 **********************************************************************/
4967 static void
4968 protocol_addProperty_nolock(property_list_t *&plist, const char *name,
4969 const objc_property_attribute_t *attrs,
4970 unsigned int count)
4971 {
4972 if (!plist) {
4973 plist = (property_list_t *)calloc(property_list_t::byteSize(sizeof(property_t), 1), 1);
4974 plist->entsizeAndFlags = sizeof(property_t);
4975 plist->count = 1;
4976 } else {
4977 plist->count++;
4978 plist = (property_list_t *)realloc(plist, plist->byteSize());
4979 }
4980
4981 property_t& prop = plist->get(plist->count - 1);
4982 prop.name = strdupIfMutable(name);
4983 prop.attributes = copyPropertyAttributeString(attrs, count);
4984 }
4985
4986 void
4987 protocol_addProperty(Protocol *proto_gen, const char *name,
4988 const objc_property_attribute_t *attrs,
4989 unsigned int count,
4990 BOOL isRequiredProperty, BOOL isInstanceProperty)
4991 {
4992 protocol_t *proto = newprotocol(proto_gen);
4993
4994 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4995 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4996
4997 if (!proto) return;
4998 if (!name) return;
4999
5000 mutex_locker_t lock(runtimeLock);
5001
5002 if (proto->ISA() != cls) {
5003 _objc_inform("protocol_addProperty: protocol '%s' is not "
5004 "under construction!", proto->nameForLogging());
5005 return;
5006 }
5007
5008 if (isRequiredProperty && isInstanceProperty) {
5009 protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
5010 }
5011 else if (isRequiredProperty && !isInstanceProperty) {
5012 protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
5013 }
5014 //else if (!isRequiredProperty && isInstanceProperty) {
5015 // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
5016 //}
5017 //else /* !isRequiredProperty && !isInstanceProperty) */ {
5018 // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
5019 //}
5020 }
5021
5022 static int
5023 objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
5024 {
5025 int count = 0;
5026
5027 if (buffer) {
5028 int c = 0;
5029 foreach_realized_class([=, &count, &c](Class cls) {
5030 count++;
5031 if (c < bufferLen) {
5032 buffer[c++] = cls;
5033 }
5034 return true;
5035 });
5036 } else {
5037 foreach_realized_class([&count](Class cls) {
5038 count++;
5039 return true;
5040 });
5041 }
5042
5043 return count;
5044 }
5045
5046 static Class *
5047 objc_copyRealizedClassList_nolock(unsigned int *outCount)
5048 {
5049 Class *result = nil;
5050 unsigned int count = 0;
5051
5052 foreach_realized_class([&count](Class cls) {
5053 count++;
5054 return true;
5055 });
5056
5057 if (count > 0) {
5058 unsigned int c = 0;
5059
5060 result = (Class *)malloc((1+count) * sizeof(Class));
5061 foreach_realized_class([=, &c](Class cls) {
5062 result[c++] = cls;
5063 return true;
5064 });
5065 result[c] = nil;
5066 }
5067
5068 if (outCount) *outCount = count;
5069 return result;
5070 }
5071
5072 /***********************************************************************
5073 * objc_getClassList
5074 * Returns pointers to all classes.
5075 * This requires all classes be realized, which is regretfully non-lazy.
5076 * Locking: acquires runtimeLock
5077 **********************************************************************/
5078 int
5079 objc_getClassList(Class *buffer, int bufferLen)
5080 {
5081 mutex_locker_t lock(runtimeLock);
5082
5083 realizeAllClasses();
5084
5085 return objc_getRealizedClassList_nolock(buffer, bufferLen);
5086 }
5087
5088 /***********************************************************************
5089 * objc_copyClassList
5090 * Returns pointers to Realized classes.
5091 *
5092 * outCount may be nil. *outCount is the number of classes returned.
5093 * If the returned array is not nil, it is nil-terminated and must be
5094 * freed with free().
5095 * Locking: write-locks runtimeLock
5096 **********************************************************************/
5097 Class *
5098 objc_copyRealizedClassList(unsigned int *outCount)
5099 {
5100 mutex_locker_t lock(runtimeLock);
5101
5102 return objc_copyRealizedClassList_nolock(outCount);
5103 }
5104
5105
5106 /***********************************************************************
5107 * objc_copyClassList
5108 * Returns pointers to all classes.
5109 * This requires all classes be realized, which is regretfully non-lazy.
5110 *
5111 * outCount may be nil. *outCount is the number of classes returned.
5112 * If the returned array is not nil, it is nil-terminated and must be
5113 * freed with free().
5114 * Locking: write-locks runtimeLock
5115 **********************************************************************/
5116 Class *
5117 objc_copyClassList(unsigned int *outCount)
5118 {
5119 mutex_locker_t lock(runtimeLock);
5120
5121 realizeAllClasses();
5122
5123 return objc_copyRealizedClassList_nolock(outCount);
5124 }
5125
5126 /***********************************************************************
5127 * class_copyImpCache
5128 * Returns the current content of the Class IMP Cache
5129 *
5130 * outCount may be nil. *outCount is the number of entries returned.
5131 * If the returned array is not nil, it is nil-terminated and must be
5132 * freed with free().
5133 * Locking: write-locks cacheUpdateLock
5134 **********************************************************************/
5135 objc_imp_cache_entry *
5136 class_copyImpCache(Class cls, int *outCount)
5137 {
5138 objc_imp_cache_entry *buffer = nullptr;
5139
5140 #if CONFIG_USE_CACHE_LOCK
5141 mutex_locker_t lock(cacheUpdateLock);
5142 #else
5143 mutex_locker_t lock(runtimeLock);
5144 #endif
5145
5146 cache_t &cache = cls->cache;
5147 int count = (int)cache.occupied();
5148
5149 if (count) {
5150 buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
5151 cache.copyCacheNolock(buffer, count);
5152 }
5153
5154 if (outCount) *outCount = count;
5155 return buffer;
5156 }
5157
5158
5159 /***********************************************************************
5160 * objc_copyProtocolList
5161 * Returns pointers to all protocols.
5162 * Locking: read-locks runtimeLock
5163 **********************************************************************/
5164 Protocol * __unsafe_unretained *
5165 objc_copyProtocolList(unsigned int *outCount)
5166 {
5167 mutex_locker_t lock(runtimeLock);
5168
5169 NXMapTable *protocol_map = protocols();
5170
5171 // Find all the protocols from the pre-optimized images. These protocols
5172 // won't be in the protocol map.
5173 objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
5174 {
5175 header_info *hi;
5176 for (hi = FirstHeader; hi; hi = hi->getNext()) {
5177 if (!hi->hasPreoptimizedProtocols())
5178 continue;
5179
5180 size_t count, i;
5181 const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
5182 for (i = 0; i < count; i++) {
5183 const protocol_t* protocol = protolist[i];
5184
5185 // Skip protocols we have in the run time map. These likely
5186 // correspond to protocols added dynamically which have the same
5187 // name as a protocol found later in a dlopen'ed shared cache image.
5188 if (NXMapGet(protocol_map, protocol->mangledName) != nil)
5189 continue;
5190
5191 // The protocols in the shared cache protolist point to their
5192 // original on-disk object, not the optimized one. We can use the name
5193 // to find the optimized one.
5194 Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
5195 preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
5196 }
5197 }
5198 }
5199
5200 unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
5201 if (count == 0) {
5202 if (outCount) *outCount = 0;
5203 return nil;
5204 }
5205
5206 Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
5207
5208 unsigned int i = 0;
5209 Protocol *proto;
5210 const char *name;
5211 NXMapState state = NXInitMapState(protocol_map);
5212 while (NXNextMapState(protocol_map, &state,
5213 (const void **)&name, (const void **)&proto))
5214 {
5215 result[i++] = proto;
5216 }
5217
5218 // Add any protocols found in the pre-optimized table
5219 for (auto it : preoptimizedProtocols) {
5220 result[i++] = it.second;
5221 }
5222
5223 result[i++] = nil;
5224 ASSERT(i == count+1);
5225
5226 if (outCount) *outCount = count;
5227 return result;
5228 }
5229
5230
5231 /***********************************************************************
5232 * objc_getProtocol
5233 * Get a protocol by name, or return nil
5234 * Locking: read-locks runtimeLock
5235 **********************************************************************/
5236 Protocol *objc_getProtocol(const char *name)
5237 {
5238 mutex_locker_t lock(runtimeLock);
5239 return getProtocol(name);
5240 }
5241
5242
5243 /***********************************************************************
5244 * class_copyMethodList
5245 * fixme
5246 * Locking: read-locks runtimeLock
5247 **********************************************************************/
5248 Method *
5249 class_copyMethodList(Class cls, unsigned int *outCount)
5250 {
5251 unsigned int count = 0;
5252 Method *result = nil;
5253
5254 if (!cls) {
5255 if (outCount) *outCount = 0;
5256 return nil;
5257 }
5258
5259 mutex_locker_t lock(runtimeLock);
5260 const auto methods = cls->data()->methods();
5261
5262 ASSERT(cls->isRealized());
5263
5264 count = methods.count();
5265
5266 if (count > 0) {
5267 result = (Method *)malloc((count + 1) * sizeof(Method));
5268
5269 count = 0;
5270 for (auto& meth : methods) {
5271 result[count++] = &meth;
5272 }
5273 result[count] = nil;
5274 }
5275
5276 if (outCount) *outCount = count;
5277 return result;
5278 }
5279
5280
5281 /***********************************************************************
5282 * class_copyIvarList
5283 * fixme
5284 * Locking: read-locks runtimeLock
5285 **********************************************************************/
5286 Ivar *
5287 class_copyIvarList(Class cls, unsigned int *outCount)
5288 {
5289 const ivar_list_t *ivars;
5290 Ivar *result = nil;
5291 unsigned int count = 0;
5292
5293 if (!cls) {
5294 if (outCount) *outCount = 0;
5295 return nil;
5296 }
5297
5298 mutex_locker_t lock(runtimeLock);
5299
5300 ASSERT(cls->isRealized());
5301
5302 if ((ivars = cls->data()->ro()->ivars) && ivars->count) {
5303 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
5304
5305 for (auto& ivar : *ivars) {
5306 if (!ivar.offset) continue; // anonymous bitfield
5307 result[count++] = &ivar;
5308 }
5309 result[count] = nil;
5310 }
5311
5312 if (outCount) *outCount = count;
5313 return result;
5314 }
5315
5316
5317 /***********************************************************************
5318 * class_copyPropertyList. Returns a heap block containing the
5319 * properties declared in the class, or nil if the class
5320 * declares no properties. Caller must free the block.
5321 * Does not copy any superclass's properties.
5322 * Locking: read-locks runtimeLock
5323 **********************************************************************/
5324 objc_property_t *
5325 class_copyPropertyList(Class cls, unsigned int *outCount)
5326 {
5327 if (!cls) {
5328 if (outCount) *outCount = 0;
5329 return nil;
5330 }
5331
5332 mutex_locker_t lock(runtimeLock);
5333
5334 checkIsKnownClass(cls);
5335 ASSERT(cls->isRealized());
5336
5337 auto rw = cls->data();
5338
5339 property_t **result = nil;
5340 auto const properties = rw->properties();
5341 unsigned int count = properties.count();
5342 if (count > 0) {
5343 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
5344
5345 count = 0;
5346 for (auto& prop : properties) {
5347 result[count++] = &prop;
5348 }
5349 result[count] = nil;
5350 }
5351
5352 if (outCount) *outCount = count;
5353 return (objc_property_t *)result;
5354 }
5355
5356
5357 /***********************************************************************
5358 * objc_class::getLoadMethod
5359 * fixme
5360 * Called only from add_class_to_loadable_list.
5361 * Locking: runtimeLock must be read- or write-locked by the caller.
5362 **********************************************************************/
5363 IMP
5364 objc_class::getLoadMethod()
5365 {
5366 runtimeLock.assertLocked();
5367
5368 const method_list_t *mlist;
5369
5370 ASSERT(isRealized());
5371 ASSERT(ISA()->isRealized());
5372 ASSERT(!isMetaClass());
5373 ASSERT(ISA()->isMetaClass());
5374
5375 mlist = ISA()->data()->ro()->baseMethods();
5376 if (mlist) {
5377 for (const auto& meth : *mlist) {
5378 const char *name = sel_cname(meth.name());
5379 if (0 == strcmp(name, "load")) {
5380 return meth.imp(false);
5381 }
5382 }
5383 }
5384
5385 return nil;
5386 }
5387
5388
5389 /***********************************************************************
5390 * _category_getName
5391 * Returns a category's name.
5392 * Locking: none
5393 **********************************************************************/
5394 const char *
5395 _category_getName(Category cat)
5396 {
5397 return cat->name;
5398 }
5399
5400
5401 /***********************************************************************
5402 * _category_getClassName
5403 * Returns a category's class's name
5404 * Called only from add_category_to_loadable_list and
5405 * remove_category_from_loadable_list for logging purposes.
5406 * Locking: runtimeLock must be read- or write-locked by the caller
5407 **********************************************************************/
5408 const char *
5409 _category_getClassName(Category cat)
5410 {
5411 runtimeLock.assertLocked();
5412 return remapClass(cat->cls)->nameForLogging();
5413 }
5414
5415
5416 /***********************************************************************
5417 * _category_getClass
5418 * Returns a category's class
5419 * Called only by call_category_loads.
5420 * Locking: read-locks runtimeLock
5421 **********************************************************************/
5422 Class
5423 _category_getClass(Category cat)
5424 {
5425 mutex_locker_t lock(runtimeLock);
5426 Class result = remapClass(cat->cls);
5427 ASSERT(result->isRealized()); // ok for call_category_loads' usage
5428 return result;
5429 }
5430
5431
5432 /***********************************************************************
5433 * _category_getLoadMethod
5434 * fixme
5435 * Called only from add_category_to_loadable_list
5436 * Locking: runtimeLock must be read- or write-locked by the caller
5437 **********************************************************************/
5438 IMP
5439 _category_getLoadMethod(Category cat)
5440 {
5441 runtimeLock.assertLocked();
5442
5443 const method_list_t *mlist;
5444
5445 mlist = cat->classMethods;
5446 if (mlist) {
5447 for (const auto& meth : *mlist) {
5448 const char *name = sel_cname(meth.name());
5449 if (0 == strcmp(name, "load")) {
5450 return meth.imp(false);
5451 }
5452 }
5453 }
5454
5455 return nil;
5456 }
5457
5458
5459 /***********************************************************************
5460 * category_t::propertiesForMeta
5461 * Return a category's instance or class properties.
5462 * hi is the image containing the category.
5463 **********************************************************************/
5464 property_list_t *
5465 category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
5466 {
5467 if (!isMeta) return instanceProperties;
5468 else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
5469 else return nil;
5470 }
5471
5472
5473 /***********************************************************************
5474 * class_copyProtocolList
5475 * fixme
5476 * Locking: read-locks runtimeLock
5477 **********************************************************************/
5478 Protocol * __unsafe_unretained *
5479 class_copyProtocolList(Class cls, unsigned int *outCount)
5480 {
5481 unsigned int count = 0;
5482 Protocol **result = nil;
5483
5484 if (!cls) {
5485 if (outCount) *outCount = 0;
5486 return nil;
5487 }
5488
5489 mutex_locker_t lock(runtimeLock);
5490 const auto protocols = cls->data()->protocols();
5491
5492 checkIsKnownClass(cls);
5493
5494 ASSERT(cls->isRealized());
5495
5496 count = protocols.count();
5497
5498 if (count > 0) {
5499 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
5500
5501 count = 0;
5502 for (const auto& proto : protocols) {
5503 result[count++] = (Protocol *)remapProtocol(proto);
5504 }
5505 result[count] = nil;
5506 }
5507
5508 if (outCount) *outCount = count;
5509 return result;
5510 }
5511
5512
5513 /***********************************************************************
5514 * objc_copyImageNames
5515 * Copies names of loaded images with ObjC contents.
5516 *
5517 * Locking: acquires runtimeLock
5518 **********************************************************************/
5519 const char **objc_copyImageNames(unsigned int *outCount)
5520 {
5521 mutex_locker_t lock(runtimeLock);
5522
5523 int HeaderCount = 0;
5524 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5525 HeaderCount++;
5526 }
5527
5528 #if TARGET_OS_WIN32
5529 const TCHAR **names = (const TCHAR **)
5530 malloc((HeaderCount+1) * sizeof(TCHAR *));
5531 #else
5532 const char **names = (const char **)
5533 malloc((HeaderCount+1) * sizeof(char *));
5534 #endif
5535
5536 unsigned int count = 0;
5537 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5538 #if TARGET_OS_WIN32
5539 if (hi->moduleName) {
5540 names[count++] = hi->moduleName;
5541 }
5542 #else
5543 const char *fname = hi->fname();
5544 if (fname) {
5545 names[count++] = fname;
5546 }
5547 #endif
5548 }
5549 names[count] = nil;
5550
5551 if (count == 0) {
5552 // Return nil instead of empty list if there are no images
5553 free((void *)names);
5554 names = nil;
5555 }
5556
5557 if (outCount) *outCount = count;
5558 return names;
5559 }
5560
5561
5562 /***********************************************************************
5563 * copyClassNamesForImage_nolock
5564 * Copies class names from the given image.
5565 * Missing weak-import classes are omitted.
5566 * Swift class names are demangled.
5567 *
5568 * Locking: runtimeLock must be held by the caller
5569 **********************************************************************/
5570 const char **
5571 copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
5572 {
5573 runtimeLock.assertLocked();
5574 ASSERT(hi);
5575
5576 size_t count;
5577 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5578 const char **names = (const char **)
5579 malloc((count+1) * sizeof(const char *));
5580
5581 size_t shift = 0;
5582 for (size_t i = 0; i < count; i++) {
5583 Class cls = remapClass(classlist[i]);
5584 if (cls) {
5585 names[i-shift] = cls->demangledName(/* needs lock */false);
5586 } else {
5587 shift++; // ignored weak-linked class
5588 }
5589 }
5590 count -= shift;
5591 names[count] = nil;
5592
5593 if (outCount) *outCount = (unsigned int)count;
5594 return names;
5595 }
5596
5597 Class *
5598 copyClassesForImage_nolock(header_info *hi, unsigned int *outCount)
5599 {
5600 runtimeLock.assertLocked();
5601 ASSERT(hi);
5602
5603 size_t count;
5604 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5605 Class *classes = (Class *)
5606 malloc((count+1) * sizeof(Class));
5607
5608 size_t shift = 0;
5609 for (size_t i = 0; i < count; i++) {
5610 Class cls = remapClass(classlist[i]);
5611 if (cls) {
5612 classes[i-shift] = cls;
5613 } else {
5614 shift++; // ignored weak-linked class
5615 }
5616 }
5617 count -= shift;
5618 classes[count] = nil;
5619
5620 if (outCount) *outCount = (unsigned int)count;
5621 return classes;
5622 }
5623
5624
5625 /***********************************************************************
5626 * objc_copyClassNamesForImage
5627 * Copies class names from the named image.
5628 * The image name must be identical to dladdr's dli_fname value.
5629 * Missing weak-import classes are omitted.
5630 * Swift class names are demangled.
5631 *
5632 * Locking: acquires runtimeLock
5633 **********************************************************************/
5634 const char **
5635 objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
5636 {
5637 if (!image) {
5638 if (outCount) *outCount = 0;
5639 return nil;
5640 }
5641
5642 mutex_locker_t lock(runtimeLock);
5643
5644 // Find the image.
5645 header_info *hi;
5646 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5647 #if TARGET_OS_WIN32
5648 if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
5649 #else
5650 if (0 == strcmp(image, hi->fname())) break;
5651 #endif
5652 }
5653
5654 if (!hi) {
5655 if (outCount) *outCount = 0;
5656 return nil;
5657 }
5658
5659 return copyClassNamesForImage_nolock(hi, outCount);
5660 }
5661
5662 Class *
5663 objc_copyClassesForImage(const char *image, unsigned int *outCount)
5664 {
5665 if (!image) {
5666 if (outCount) *outCount = 0;
5667 return nil;
5668 }
5669
5670 mutex_locker_t lock(runtimeLock);
5671
5672 // Find the image.
5673 header_info *hi;
5674 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5675 if (0 == strcmp(image, hi->fname())) break;
5676 }
5677
5678 if (!hi) {
5679 if (outCount) *outCount = 0;
5680 return nil;
5681 }
5682
5683 return copyClassesForImage_nolock(hi, outCount);
5684 }
5685
5686 /***********************************************************************
5687 * objc_copyClassNamesForImageHeader
5688 * Copies class names from the given image.
5689 * Missing weak-import classes are omitted.
5690 * Swift class names are demangled.
5691 *
5692 * Locking: acquires runtimeLock
5693 **********************************************************************/
5694 const char **
5695 objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
5696 {
5697 if (!mh) {
5698 if (outCount) *outCount = 0;
5699 return nil;
5700 }
5701
5702 mutex_locker_t lock(runtimeLock);
5703
5704 // Find the image.
5705 header_info *hi;
5706 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5707 if (hi->mhdr() == (const headerType *)mh) break;
5708 }
5709
5710 if (!hi) {
5711 if (outCount) *outCount = 0;
5712 return nil;
5713 }
5714
5715 return copyClassNamesForImage_nolock(hi, outCount);
5716 }
5717
5718
5719 /***********************************************************************
5720 * saveTemporaryString
5721 * Save a string in a thread-local FIFO buffer.
5722 * This is suitable for temporary strings generated for logging purposes.
5723 **********************************************************************/
5724 static void
5725 saveTemporaryString(char *str)
5726 {
5727 // Fixed-size FIFO. We free the first string, shift
5728 // the rest, and add the new string to the end.
5729 _objc_pthread_data *data = _objc_fetch_pthread_data(true);
5730 if (data->printableNames[0]) {
5731 free(data->printableNames[0]);
5732 }
5733 int last = countof(data->printableNames) - 1;
5734 for (int i = 0; i < last; i++) {
5735 data->printableNames[i] = data->printableNames[i+1];
5736 }
5737 data->printableNames[last] = str;
5738 }
5739
5740
5741 /***********************************************************************
5742 * objc_class::nameForLogging
5743 * Returns the class's name, suitable for display.
5744 * The returned memory is TEMPORARY. Print it or copy it immediately.
5745 * Locking: none
5746 **********************************************************************/
5747 const char *
5748 objc_class::nameForLogging()
5749 {
5750 // Handle the easy case directly.
5751 if (isRealized() || isFuture()) {
5752 if (!isAnySwift()) {
5753 return data()->ro()->getName();
5754 }
5755 auto rwe = data()->ext();
5756 if (rwe && rwe->demangledName) {
5757 return rwe->demangledName;
5758 }
5759 }
5760
5761 char *result;
5762
5763 if (isStubClass()) {
5764 asprintf(&result, "<stub class %p>", this);
5765 } else if (const char *name = nonlazyMangledName()) {
5766 char *de = copySwiftV1DemangledName(name);
5767 if (de) result = de;
5768 else result = strdup(name);
5769 } else {
5770 asprintf(&result, "<lazily named class %p>", this);
5771 }
5772 saveTemporaryString(result);
5773 return result;
5774 }
5775
5776
5777 /***********************************************************************
5778 * objc_class::demangledName
5779 * If realize=false, the class must already be realized or future.
5780 * Locking: runtimeLock may or may not be held by the caller.
5781 **********************************************************************/
5782 mutex_t DemangleCacheLock;
5783 static objc::DenseSet<const char *> *DemangleCache;
5784 const char *
5785 objc_class::demangledName(bool needsLock)
5786 {
5787 if (!needsLock) {
5788 runtimeLock.assertLocked();
5789 }
5790
5791 // Return previously demangled name if available.
5792 if (isRealized() || isFuture()) {
5793 // Swift metaclasses don't have the is-Swift bit.
5794 // We can't take this shortcut for them.
5795 if (isFuture() || (!isMetaClass() && !isAnySwift())) {
5796 return data()->ro()->getName();
5797 }
5798 auto rwe = data()->ext();
5799 if (rwe && rwe->demangledName) {
5800 return rwe->demangledName;
5801 }
5802 }
5803
5804 // Try demangling the mangled name.
5805 const char *mangled = mangledName();
5806 char *de = copySwiftV1DemangledName(mangled);
5807 class_rw_ext_t *rwe;
5808
5809 if (isRealized() || isFuture()) {
5810 if (needsLock) {
5811 mutex_locker_t lock(runtimeLock);
5812 rwe = data()->extAllocIfNeeded();
5813 } else {
5814 rwe = data()->extAllocIfNeeded();
5815 }
5816 // Class is already realized or future.
5817 // Save demangling result in rw data.
5818 // We may not own runtimeLock so use an atomic operation instead.
5819 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
5820 (void**)&rwe->demangledName))
5821 {
5822 if (de) free(de);
5823 }
5824 return rwe->demangledName;
5825 }
5826
5827 // Class is not yet realized.
5828 if (!de) {
5829 // Name is not mangled. Return it without caching.
5830 return mangled;
5831 }
5832
5833 // Class is not yet realized and name is mangled.
5834 // Allocate the name but don't save it in the class.
5835 // Save the name in a side cache instead to prevent leaks.
5836 // When the class is actually realized we may allocate a second
5837 // copy of the name, but we don't care.
5838 // (Previously we would try to realize the class now and save the
5839 // name there, but realization is more complicated for Swift classes.)
5840
5841 // Only objc_copyClassNamesForImage() should get here.
5842 // fixme lldb's calls to class_getName() can also get here when
5843 // interrogating the dyld shared cache. (rdar://27258517)
5844 // fixme ASSERT(realize);
5845
5846 const char *cached;
5847 {
5848 mutex_locker_t lock(DemangleCacheLock);
5849 if (!DemangleCache) {
5850 DemangleCache = new objc::DenseSet<const char *>{};
5851 }
5852 cached = *DemangleCache->insert(de).first;
5853 }
5854 if (cached != de) free(de);
5855 return cached;
5856 }
5857
5858
5859 /***********************************************************************
5860 * class_getName
5861 * fixme
5862 * Locking: may acquire DemangleCacheLock
5863 **********************************************************************/
5864 const char *class_getName(Class cls)
5865 {
5866 if (!cls) return "nil";
5867 // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
5868 // ASSERT(cls->isRealized() || cls->isFuture());
5869 return cls->demangledName(/* needs lock */true);
5870 }
5871
5872 /***********************************************************************
5873 * objc_debug_class_getNameRaw
5874 * fixme
5875 * Locking: none
5876 **********************************************************************/
5877 const char *objc_debug_class_getNameRaw(Class cls)
5878 {
5879 if (!cls) return "nil";
5880 return cls->mangledName();
5881 }
5882
5883
5884 /***********************************************************************
5885 * class_getVersion
5886 * fixme
5887 * Locking: none
5888 **********************************************************************/
5889 int
5890 class_getVersion(Class cls)
5891 {
5892 if (!cls) return 0;
5893 ASSERT(cls->isRealized());
5894 auto rwe = cls->data()->ext();
5895 if (rwe) {
5896 return rwe->version;
5897 }
5898 return cls->isMetaClass() ? 7 : 0;
5899 }
5900
5901
5902 /***********************************************************************
5903 * class_setVersion
5904 * fixme
5905 * Locking: none
5906 **********************************************************************/
5907 void
5908 class_setVersion(Class cls, int version)
5909 {
5910 if (!cls) return;
5911 ASSERT(cls->isRealized());
5912 auto rwe = cls->data()->ext();
5913 if (!rwe) {
5914 mutex_locker_t lock(runtimeLock);
5915 rwe = cls->data()->extAllocIfNeeded();
5916 }
5917
5918 rwe->version = version;
5919 }
5920
5921 /***********************************************************************
5922 * search_method_list_inline
5923 **********************************************************************/
5924 template<class getNameFunc>
5925 ALWAYS_INLINE static method_t *
5926 findMethodInSortedMethodList(SEL key, const method_list_t *list, const getNameFunc &getName)
5927 {
5928 ASSERT(list);
5929
5930 auto first = list->begin();
5931 auto base = first;
5932 decltype(first) probe;
5933
5934 uintptr_t keyValue = (uintptr_t)key;
5935 uint32_t count;
5936
5937 for (count = list->count; count != 0; count >>= 1) {
5938 probe = base + (count >> 1);
5939
5940 uintptr_t probeValue = (uintptr_t)getName(probe);
5941
5942 if (keyValue == probeValue) {
5943 // `probe` is a match.
5944 // Rewind looking for the *first* occurrence of this value.
5945 // This is required for correct category overrides.
5946 while (probe > first && keyValue == (uintptr_t)getName((probe - 1))) {
5947 probe--;
5948 }
5949 return &*probe;
5950 }
5951
5952 if (keyValue > probeValue) {
5953 base = probe + 1;
5954 count--;
5955 }
5956 }
5957
5958 return nil;
5959 }
5960
5961 ALWAYS_INLINE static method_t *
5962 findMethodInSortedMethodList(SEL key, const method_list_t *list)
5963 {
5964 if (list->isSmallList()) {
5965 if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) {
5966 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); });
5967 } else {
5968 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); });
5969 }
5970 } else {
5971 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.big().name; });
5972 }
5973 }
5974
5975 template<class getNameFunc>
5976 ALWAYS_INLINE static method_t *
5977 findMethodInUnsortedMethodList(SEL sel, const method_list_t *list, const getNameFunc &getName)
5978 {
5979 for (auto& meth : *list) {
5980 if (getName(meth) == sel) return &meth;
5981 }
5982 return nil;
5983 }
5984
5985 ALWAYS_INLINE static method_t *
5986 findMethodInUnsortedMethodList(SEL key, const method_list_t *list)
5987 {
5988 if (list->isSmallList()) {
5989 if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) {
5990 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); });
5991 } else {
5992 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); });
5993 }
5994 } else {
5995 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.big().name; });
5996 }
5997 }
5998
5999 ALWAYS_INLINE static method_t *
6000 search_method_list_inline(const method_list_t *mlist, SEL sel)
6001 {
6002 int methodListIsFixedUp = mlist->isFixedUp();
6003 int methodListHasExpectedSize = mlist->isExpectedSize();
6004
6005 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
6006 return findMethodInSortedMethodList(sel, mlist);
6007 } else {
6008 // Linear search of unsorted method list
6009 if (auto *m = findMethodInUnsortedMethodList(sel, mlist))
6010 return m;
6011 }
6012
6013 #if DEBUG
6014 // sanity-check negative results
6015 if (mlist->isFixedUp()) {
6016 for (auto& meth : *mlist) {
6017 if (meth.name() == sel) {
6018 _objc_fatal("linear search worked when binary search did not");
6019 }
6020 }
6021 }
6022 #endif
6023
6024 return nil;
6025 }
6026
6027 NEVER_INLINE static method_t *
6028 search_method_list(const method_list_t *mlist, SEL sel)
6029 {
6030 return search_method_list_inline(mlist, sel);
6031 }
6032
6033 /***********************************************************************
6034 * method_lists_contains_any
6035 **********************************************************************/
6036 template<typename T>
6037 static NEVER_INLINE bool
6038 method_lists_contains_any(T *mlists, T *end,
6039 SEL sels[], size_t selcount)
6040 {
6041 while (mlists < end) {
6042 const method_list_t *mlist = *mlists++;
6043 int methodListIsFixedUp = mlist->isFixedUp();
6044 int methodListHasExpectedSize = mlist->entsize() == sizeof(struct method_t::big);
6045
6046 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
6047 for (size_t i = 0; i < selcount; i++) {
6048 if (findMethodInSortedMethodList(sels[i], mlist)) {
6049 return true;
6050 }
6051 }
6052 } else {
6053 for (size_t i = 0; i < selcount; i++) {
6054 if (findMethodInUnsortedMethodList(sels[i], mlist)) {
6055 return true;
6056 }
6057 }
6058 }
6059 }
6060 return false;
6061 }
6062
6063
6064 /***********************************************************************
6065 * getMethodNoSuper_nolock
6066 * fixme
6067 * Locking: runtimeLock must be read- or write-locked by the caller
6068 **********************************************************************/
6069 static method_t *
6070 getMethodNoSuper_nolock(Class cls, SEL sel)
6071 {
6072 runtimeLock.assertLocked();
6073
6074 ASSERT(cls->isRealized());
6075 // fixme nil cls?
6076 // fixme nil sel?
6077
6078 auto const methods = cls->data()->methods();
6079 for (auto mlists = methods.beginLists(),
6080 end = methods.endLists();
6081 mlists != end;
6082 ++mlists)
6083 {
6084 // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
6085 // caller of search_method_list, inlining it turns
6086 // getMethodNoSuper_nolock into a frame-less function and eliminates
6087 // any store from this codepath.
6088 method_t *m = search_method_list_inline(*mlists, sel);
6089 if (m) return m;
6090 }
6091
6092 return nil;
6093 }
6094
6095
6096 /***********************************************************************
6097 * getMethod_nolock
6098 * fixme
6099 * Locking: runtimeLock must be read- or write-locked by the caller
6100 **********************************************************************/
6101 static method_t *
6102 getMethod_nolock(Class cls, SEL sel)
6103 {
6104 method_t *m = nil;
6105
6106 runtimeLock.assertLocked();
6107
6108 // fixme nil cls?
6109 // fixme nil sel?
6110
6111 ASSERT(cls->isRealized());
6112
6113 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
6114 cls = cls->getSuperclass();
6115 }
6116
6117 return m;
6118 }
6119
6120
6121 /***********************************************************************
6122 * _class_getMethod
6123 * fixme
6124 * Locking: read-locks runtimeLock
6125 **********************************************************************/
6126 static Method _class_getMethod(Class cls, SEL sel)
6127 {
6128 mutex_locker_t lock(runtimeLock);
6129 return getMethod_nolock(cls, sel);
6130 }
6131
6132
6133 /***********************************************************************
6134 * class_getInstanceMethod. Return the instance method for the
6135 * specified class and selector.
6136 **********************************************************************/
6137 Method class_getInstanceMethod(Class cls, SEL sel)
6138 {
6139 if (!cls || !sel) return nil;
6140
6141 // This deliberately avoids +initialize because it historically did so.
6142
6143 // This implementation is a bit weird because it's the only place that
6144 // wants a Method instead of an IMP.
6145
6146 #warning fixme build and search caches
6147
6148 // Search method lists, try method resolver, etc.
6149 lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
6150
6151 #warning fixme build and search caches
6152
6153 return _class_getMethod(cls, sel);
6154 }
6155
6156
6157 /***********************************************************************
6158 * resolveClassMethod
6159 * Call +resolveClassMethod, looking for a method to be added to class cls.
6160 * cls should be a metaclass.
6161 * Does not check if the method already exists.
6162 **********************************************************************/
6163 static void resolveClassMethod(id inst, SEL sel, Class cls)
6164 {
6165 runtimeLock.assertUnlocked();
6166 ASSERT(cls->isRealized());
6167 ASSERT(cls->isMetaClass());
6168
6169 if (!lookUpImpOrNilTryCache(inst, @selector(resolveClassMethod:), cls)) {
6170 // Resolver not implemented.
6171 return;
6172 }
6173
6174 Class nonmeta;
6175 {
6176 mutex_locker_t lock(runtimeLock);
6177 nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
6178 // +initialize path should have realized nonmeta already
6179 if (!nonmeta->isRealized()) {
6180 _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
6181 nonmeta->nameForLogging(), nonmeta);
6182 }
6183 }
6184 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6185 bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
6186
6187 // Cache the result (good or bad) so the resolver doesn't fire next time.
6188 // +resolveClassMethod adds to self->ISA() a.k.a. cls
6189 IMP imp = lookUpImpOrNilTryCache(inst, sel, cls);
6190
6191 if (resolved && PrintResolving) {
6192 if (imp) {
6193 _objc_inform("RESOLVE: method %c[%s %s] "
6194 "dynamically resolved to %p",
6195 cls->isMetaClass() ? '+' : '-',
6196 cls->nameForLogging(), sel_getName(sel), imp);
6197 }
6198 else {
6199 // Method resolver didn't add anything?
6200 _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
6201 ", but no new implementation of %c[%s %s] was found",
6202 cls->nameForLogging(), sel_getName(sel),
6203 cls->isMetaClass() ? '+' : '-',
6204 cls->nameForLogging(), sel_getName(sel));
6205 }
6206 }
6207 }
6208
6209
6210 /***********************************************************************
6211 * resolveInstanceMethod
6212 * Call +resolveInstanceMethod, looking for a method to be added to class cls.
6213 * cls may be a metaclass or a non-meta class.
6214 * Does not check if the method already exists.
6215 **********************************************************************/
6216 static void resolveInstanceMethod(id inst, SEL sel, Class cls)
6217 {
6218 runtimeLock.assertUnlocked();
6219 ASSERT(cls->isRealized());
6220 SEL resolve_sel = @selector(resolveInstanceMethod:);
6221
6222 if (!lookUpImpOrNilTryCache(cls, resolve_sel, cls->ISA(/*authenticated*/true))) {
6223 // Resolver not implemented.
6224 return;
6225 }
6226
6227 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6228 bool resolved = msg(cls, resolve_sel, sel);
6229
6230 // Cache the result (good or bad) so the resolver doesn't fire next time.
6231 // +resolveInstanceMethod adds to self a.k.a. cls
6232 IMP imp = lookUpImpOrNilTryCache(inst, sel, cls);
6233
6234 if (resolved && PrintResolving) {
6235 if (imp) {
6236 _objc_inform("RESOLVE: method %c[%s %s] "
6237 "dynamically resolved to %p",
6238 cls->isMetaClass() ? '+' : '-',
6239 cls->nameForLogging(), sel_getName(sel), imp);
6240 }
6241 else {
6242 // Method resolver didn't add anything?
6243 _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
6244 ", but no new implementation of %c[%s %s] was found",
6245 cls->nameForLogging(), sel_getName(sel),
6246 cls->isMetaClass() ? '+' : '-',
6247 cls->nameForLogging(), sel_getName(sel));
6248 }
6249 }
6250 }
6251
6252
6253 /***********************************************************************
6254 * resolveMethod_locked
6255 * Call +resolveClassMethod or +resolveInstanceMethod.
6256 *
6257 * Called with the runtimeLock held to avoid pressure in the caller
6258 * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
6259 **********************************************************************/
6260 static NEVER_INLINE IMP
6261 resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
6262 {
6263 runtimeLock.assertLocked();
6264 ASSERT(cls->isRealized());
6265
6266 runtimeLock.unlock();
6267
6268 if (! cls->isMetaClass()) {
6269 // try [cls resolveInstanceMethod:sel]
6270 resolveInstanceMethod(inst, sel, cls);
6271 }
6272 else {
6273 // try [nonMetaClass resolveClassMethod:sel]
6274 // and [cls resolveInstanceMethod:sel]
6275 resolveClassMethod(inst, sel, cls);
6276 if (!lookUpImpOrNilTryCache(inst, sel, cls)) {
6277 resolveInstanceMethod(inst, sel, cls);
6278 }
6279 }
6280
6281 // chances are that calling the resolver have populated the cache
6282 // so attempt using it
6283 return lookUpImpOrForwardTryCache(inst, sel, cls, behavior);
6284 }
6285
6286
6287 /***********************************************************************
6288 * log_and_fill_cache
6289 * Log this method call. If the logger permits it, fill the method cache.
6290 * cls is the method whose cache should be filled.
6291 * implementer is the class that owns the implementation in question.
6292 **********************************************************************/
6293 static void
6294 log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
6295 {
6296 #if SUPPORT_MESSAGE_LOGGING
6297 if (slowpath(objcMsgLogEnabled && implementer)) {
6298 bool cacheIt = logMessageSend(implementer->isMetaClass(),
6299 cls->nameForLogging(),
6300 implementer->nameForLogging(),
6301 sel);
6302 if (!cacheIt) return;
6303 }
6304 #endif
6305 cls->cache.insert(sel, imp, receiver);
6306 }
6307
6308
6309 /***********************************************************************
6310 * realizeAndInitializeIfNeeded_locked
6311 * Realize the given class if not already realized, and initialize it if
6312 * not already initialized.
6313 * inst is an instance of cls or a subclass, or nil if none is known.
6314 * cls is the class to initialize and realize.
6315 * initializer is true to initialize the class, false to skip initialization.
6316 **********************************************************************/
6317 static Class
6318 realizeAndInitializeIfNeeded_locked(id inst, Class cls, bool initialize)
6319 {
6320 runtimeLock.assertLocked();
6321 if (slowpath(!cls->isRealized())) {
6322 cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
6323 // runtimeLock may have been dropped but is now locked again
6324 }
6325
6326 if (slowpath(initialize && !cls->isInitialized())) {
6327 cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
6328 // runtimeLock may have been dropped but is now locked again
6329
6330 // If sel == initialize, class_initialize will send +initialize and
6331 // then the messenger will send +initialize again after this
6332 // procedure finishes. Of course, if this is not being called
6333 // from the messenger then it won't happen. 2778172
6334 }
6335 return cls;
6336 }
6337
6338 /***********************************************************************
6339 * lookUpImpOrForward / lookUpImpOrForwardTryCache / lookUpImpOrNilTryCache
6340 * The standard IMP lookup.
6341 *
6342 * The TryCache variant attempts a fast-path lookup in the IMP Cache.
6343 * Most callers should use lookUpImpOrForwardTryCache with LOOKUP_INITIALIZE
6344 *
6345 * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
6346 * With LOOKUP_NIL: returns nil on negative cache hits
6347 *
6348 * inst is an instance of cls or a subclass thereof, or nil if none is known.
6349 * If cls is an un-initialized metaclass then a non-nil inst is faster.
6350 * May return _objc_msgForward_impcache. IMPs destined for external use
6351 * must be converted to _objc_msgForward or _objc_msgForward_stret.
6352 * If you don't want forwarding at all, use LOOKUP_NIL.
6353 **********************************************************************/
6354 ALWAYS_INLINE
6355 static IMP _lookUpImpTryCache(id inst, SEL sel, Class cls, int behavior)
6356 {
6357 runtimeLock.assertUnlocked();
6358
6359 if (slowpath(!cls->isInitialized())) {
6360 // see comment in lookUpImpOrForward
6361 return lookUpImpOrForward(inst, sel, cls, behavior);
6362 }
6363
6364 IMP imp = cache_getImp(cls, sel);
6365 if (imp != NULL) goto done;
6366 #if CONFIG_USE_PREOPT_CACHES
6367 if (fastpath(cls->cache.isConstantOptimizedCache(/* strict */true))) {
6368 imp = cache_getImp(cls->cache.preoptFallbackClass(), sel);
6369 }
6370 #endif
6371 if (slowpath(imp == NULL)) {
6372 return lookUpImpOrForward(inst, sel, cls, behavior);
6373 }
6374
6375 done:
6376 if ((behavior & LOOKUP_NIL) && imp == (IMP)_objc_msgForward_impcache) {
6377 return nil;
6378 }
6379 return imp;
6380 }
6381
6382 IMP lookUpImpOrForwardTryCache(id inst, SEL sel, Class cls, int behavior)
6383 {
6384 return _lookUpImpTryCache(inst, sel, cls, behavior);
6385 }
6386
6387 IMP lookUpImpOrNilTryCache(id inst, SEL sel, Class cls, int behavior)
6388 {
6389 return _lookUpImpTryCache(inst, sel, cls, behavior | LOOKUP_NIL);
6390 }
6391
6392 NEVER_INLINE
6393 IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
6394 {
6395 const IMP forward_imp = (IMP)_objc_msgForward_impcache;
6396 IMP imp = nil;
6397 Class curClass;
6398
6399 runtimeLock.assertUnlocked();
6400
6401 if (slowpath(!cls->isInitialized())) {
6402 // The first message sent to a class is often +new or +alloc, or +self
6403 // which goes through objc_opt_* or various optimized entry points.
6404 //
6405 // However, the class isn't realized/initialized yet at this point,
6406 // and the optimized entry points fall down through objc_msgSend,
6407 // which ends up here.
6408 //
6409 // We really want to avoid caching these, as it can cause IMP caches
6410 // to be made with a single entry forever.
6411 //
6412 // Note that this check is racy as several threads might try to
6413 // message a given class for the first time at the same time,
6414 // in which case we might cache anyway.
6415 behavior |= LOOKUP_NOCACHE;
6416 }
6417
6418 // runtimeLock is held during isRealized and isInitialized checking
6419 // to prevent races against concurrent realization.
6420
6421 // runtimeLock is held during method search to make
6422 // method-lookup + cache-fill atomic with respect to method addition.
6423 // Otherwise, a category could be added but ignored indefinitely because
6424 // the cache was re-filled with the old value after the cache flush on
6425 // behalf of the category.
6426
6427 runtimeLock.lock();
6428
6429 // We don't want people to be able to craft a binary blob that looks like
6430 // a class but really isn't one and do a CFI attack.
6431 //
6432 // To make these harder we want to make sure this is a class that was
6433 // either built into the binary or legitimately registered through
6434 // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
6435 checkIsKnownClass(cls);
6436
6437 cls = realizeAndInitializeIfNeeded_locked(inst, cls, behavior & LOOKUP_INITIALIZE);
6438 // runtimeLock may have been dropped but is now locked again
6439 runtimeLock.assertLocked();
6440 curClass = cls;
6441
6442 // The code used to lookup the class's cache again right after
6443 // we take the lock but for the vast majority of the cases
6444 // evidence shows this is a miss most of the time, hence a time loss.
6445 //
6446 // The only codepath calling into this without having performed some
6447 // kind of cache lookup is class_getInstanceMethod().
6448
6449 for (unsigned attempts = unreasonableClassCount();;) {
6450 if (curClass->cache.isConstantOptimizedCache(/* strict */true)) {
6451 #if CONFIG_USE_PREOPT_CACHES
6452 imp = cache_getImp(curClass, sel);
6453 if (imp) goto done_unlock;
6454 curClass = curClass->cache.preoptFallbackClass();
6455 #endif
6456 } else {
6457 // curClass method list.
6458 Method meth = getMethodNoSuper_nolock(curClass, sel);
6459 if (meth) {
6460 imp = meth->imp(false);
6461 goto done;
6462 }
6463
6464 if (slowpath((curClass = curClass->getSuperclass()) == nil)) {
6465 // No implementation found, and method resolver didn't help.
6466 // Use forwarding.
6467 imp = forward_imp;
6468 break;
6469 }
6470 }
6471
6472 // Halt if there is a cycle in the superclass chain.
6473 if (slowpath(--attempts == 0)) {
6474 _objc_fatal("Memory corruption in class list.");
6475 }
6476
6477 // Superclass cache.
6478 imp = cache_getImp(curClass, sel);
6479 if (slowpath(imp == forward_imp)) {
6480 // Found a forward:: entry in a superclass.
6481 // Stop searching, but don't cache yet; call method
6482 // resolver for this class first.
6483 break;
6484 }
6485 if (fastpath(imp)) {
6486 // Found the method in a superclass. Cache it in this class.
6487 goto done;
6488 }
6489 }
6490
6491 // No implementation found. Try method resolver once.
6492
6493 if (slowpath(behavior & LOOKUP_RESOLVER)) {
6494 behavior ^= LOOKUP_RESOLVER;
6495 return resolveMethod_locked(inst, sel, cls, behavior);
6496 }
6497
6498 done:
6499 if (fastpath((behavior & LOOKUP_NOCACHE) == 0)) {
6500 #if CONFIG_USE_PREOPT_CACHES
6501 while (cls->cache.isConstantOptimizedCache(/* strict */true)) {
6502 cls = cls->cache.preoptFallbackClass();
6503 }
6504 #endif
6505 log_and_fill_cache(cls, imp, sel, inst, curClass);
6506 }
6507 done_unlock:
6508 runtimeLock.unlock();
6509 if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
6510 return nil;
6511 }
6512 return imp;
6513 }
6514
6515 /***********************************************************************
6516 * lookupMethodInClassAndLoadCache.
6517 * Like lookUpImpOrForward, but does not search superclasses.
6518 * Caches and returns objc_msgForward if the method is not found in the class.
6519 **********************************************************************/
6520 IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
6521 {
6522 IMP imp;
6523
6524 // fixme this is incomplete - no resolver, +initialize -
6525 // but it's only used for .cxx_construct/destruct so we don't care
6526 ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
6527
6528 // Search cache first.
6529 //
6530 // If the cache used for the lookup is preoptimized,
6531 // we ask for `_objc_msgForward_impcache` to be returned on cache misses,
6532 // so that there's no TOCTOU race between using `isConstantOptimizedCache`
6533 // and calling cache_getImp() when not under the runtime lock.
6534 //
6535 // For dynamic caches, a miss will return `nil`
6536 imp = cache_getImp(cls, sel, _objc_msgForward_impcache);
6537
6538 if (slowpath(imp == nil)) {
6539 // Cache miss. Search method list.
6540
6541 mutex_locker_t lock(runtimeLock);
6542
6543 if (auto meth = getMethodNoSuper_nolock(cls, sel)) {
6544 // Hit in method list. Cache it.
6545 imp = meth->imp(false);
6546 } else {
6547 imp = _objc_msgForward_impcache;
6548 }
6549
6550 // Note, because we do not hold the runtime lock above
6551 // isConstantOptimizedCache might flip, so we need to double check
6552 if (!cls->cache.isConstantOptimizedCache(true /* strict */)) {
6553 cls->cache.insert(sel, imp, nil);
6554 }
6555 }
6556
6557 return imp;
6558 }
6559
6560
6561 /***********************************************************************
6562 * class_getProperty
6563 * fixme
6564 * Locking: read-locks runtimeLock
6565 **********************************************************************/
6566 objc_property_t class_getProperty(Class cls, const char *name)
6567 {
6568 if (!cls || !name) return nil;
6569
6570 mutex_locker_t lock(runtimeLock);
6571
6572 checkIsKnownClass(cls);
6573
6574 ASSERT(cls->isRealized());
6575
6576 for ( ; cls; cls = cls->getSuperclass()) {
6577 for (auto& prop : cls->data()->properties()) {
6578 if (0 == strcmp(name, prop.name)) {
6579 return (objc_property_t)&prop;
6580 }
6581 }
6582 }
6583
6584 return nil;
6585 }
6586
6587
6588 /***********************************************************************
6589 * Locking: fixme
6590 **********************************************************************/
6591
6592 Class gdb_class_getClass(Class cls)
6593 {
6594 const char *className = cls->mangledName();
6595 if(!className || !strlen(className)) return Nil;
6596 Class rCls = look_up_class(className, NO, NO);
6597 return rCls;
6598 }
6599
6600 Class gdb_object_getClass(id obj)
6601 {
6602 if (!obj) return nil;
6603 return gdb_class_getClass(obj->getIsa());
6604 }
6605
6606
6607 /***********************************************************************
6608 * Locking: write-locks runtimeLock
6609 **********************************************************************/
6610 void
6611 objc_class::setInitialized()
6612 {
6613 Class metacls;
6614 Class cls;
6615
6616 ASSERT(!isMetaClass());
6617
6618 cls = (Class)this;
6619 metacls = cls->ISA();
6620
6621 mutex_locker_t lock(runtimeLock);
6622
6623 // Special cases:
6624 // - NSObject AWZ class methods are default.
6625 // - NSObject RR class and instance methods are default.
6626 // - NSObject Core class and instance methods are default.
6627 // adjustCustomFlagsForMethodChange() also knows these special cases.
6628 // attachMethodLists() also knows these special cases.
6629
6630 objc::AWZScanner::scanInitializedClass(cls, metacls);
6631 objc::RRScanner::scanInitializedClass(cls, metacls);
6632 objc::CoreScanner::scanInitializedClass(cls, metacls);
6633
6634 #if CONFIG_USE_PREOPT_CACHES
6635 cls->cache.maybeConvertToPreoptimized();
6636 metacls->cache.maybeConvertToPreoptimized();
6637 #endif
6638
6639 if (PrintInitializing) {
6640 _objc_inform("INITIALIZE: thread %p: setInitialized(%s)",
6641 objc_thread_self(), cls->nameForLogging());
6642 }
6643 // Update the +initialize flags.
6644 // Do this last.
6645 metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
6646 }
6647
6648
6649 void
6650 objc_class::printInstancesRequireRawIsa(bool inherited)
6651 {
6652 ASSERT(PrintRawIsa);
6653 ASSERT(instancesRequireRawIsa());
6654 _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
6655 isMetaClass() ? " (meta)" : "",
6656 inherited ? " (inherited)" : "");
6657 }
6658
6659 /***********************************************************************
6660 * Mark this class and all of its subclasses as requiring raw isa pointers
6661 **********************************************************************/
6662 void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
6663 {
6664 Class cls = (Class)this;
6665 runtimeLock.assertLocked();
6666
6667 if (instancesRequireRawIsa()) return;
6668
6669 foreach_realized_class_and_subclass(cls, [=](Class c){
6670 if (c->instancesRequireRawIsa()) {
6671 return false;
6672 }
6673
6674 c->setInstancesRequireRawIsa();
6675
6676 if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
6677 return true;
6678 });
6679 }
6680
6681 #if CONFIG_USE_PREOPT_CACHES
6682 void objc_class::setDisallowPreoptCachesRecursively(const char *why)
6683 {
6684 Class cls = (Class)this;
6685 runtimeLock.assertLocked();
6686
6687 if (!allowsPreoptCaches()) return;
6688
6689 foreach_realized_class_and_subclass(cls, [=](Class c){
6690 if (!c->allowsPreoptCaches()) {
6691 return false;
6692 }
6693
6694 if (c->cache.isConstantOptimizedCache(/* strict */true)) {
6695 c->cache.eraseNolock(why);
6696 } else {
6697 if (PrintCaches) {
6698 _objc_inform("CACHES: %sclass %s: disallow preopt cache (from %s)",
6699 isMetaClass() ? "meta" : "",
6700 nameForLogging(), why);
6701 }
6702 c->setDisallowPreoptCaches();
6703 }
6704 return true;
6705 });
6706 }
6707
6708 void objc_class::setDisallowPreoptInlinedSelsRecursively(const char *why)
6709 {
6710 Class cls = (Class)this;
6711 runtimeLock.assertLocked();
6712
6713 if (!allowsPreoptInlinedSels()) return;
6714
6715 foreach_realized_class_and_subclass(cls, [=](Class c){
6716 if (!c->allowsPreoptInlinedSels()) {
6717 return false;
6718 }
6719
6720 if (PrintCaches) {
6721 _objc_inform("CACHES: %sclass %s: disallow sel-inlined preopt cache (from %s)",
6722 isMetaClass() ? "meta" : "",
6723 nameForLogging(), why);
6724 }
6725
6726 c->setDisallowPreoptInlinedSels();
6727 if (c->cache.isConstantOptimizedCacheWithInlinedSels()) {
6728 c->cache.eraseNolock(why);
6729 }
6730 return true;
6731 });
6732 }
6733 #endif
6734
6735 /***********************************************************************
6736 * Choose a class index.
6737 * Set instancesRequireRawIsa if no more class indexes are available.
6738 **********************************************************************/
6739 void objc_class::chooseClassArrayIndex()
6740 {
6741 #if SUPPORT_INDEXED_ISA
6742 Class cls = (Class)this;
6743 runtimeLock.assertLocked();
6744
6745 if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
6746 // No more indexes available.
6747 ASSERT(cls->classArrayIndex() == 0);
6748 cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
6749 return;
6750 }
6751
6752 unsigned index = objc_indexed_classes_count++;
6753 if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
6754 classForIndex(index) = cls;
6755 cls->setClassArrayIndex(index);
6756 #endif
6757 }
6758
6759 static const char *empty_lazyClassNamer(Class cls __unused) {
6760 return nullptr;
6761 }
6762
6763 static ChainedHookFunction<objc_hook_lazyClassNamer> LazyClassNamerHook{empty_lazyClassNamer};
6764
6765 void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue,
6766 _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue) {
6767 LazyClassNamerHook.set(newValue, oldOutValue);
6768 }
6769
6770 const char * objc_class::installMangledNameForLazilyNamedClass() {
6771 auto lazyClassNamer = LazyClassNamerHook.get();
6772 if (!*lazyClassNamer) {
6773 _objc_fatal("Lazily named class %p with no lazy name handler registered", this);
6774 }
6775
6776 // If this is called on a metaclass, extract the original class
6777 // and make it do the installation instead. It will install
6778 // the metaclass's name too.
6779 if (isMetaClass()) {
6780 Class nonMeta = bits.safe_ro()->getNonMetaclass();
6781 return nonMeta->installMangledNameForLazilyNamedClass();
6782 }
6783
6784 Class cls = (Class)this;
6785 Class metaclass = ISA();
6786
6787 const char *name = lazyClassNamer((Class)this);
6788 if (!name) {
6789 _objc_fatal("Lazily named class %p wasn't named by lazy name handler", this);
6790 }
6791
6792 // Emplace the name into the class_ro_t. If we lose the race,
6793 // then we'll free our name and use whatever got placed there
6794 // instead of our name.
6795 const char *previously = NULL;
6796 class_ro_t *ro = (class_ro_t *)cls->bits.safe_ro();
6797 bool wonRace = ro->name.compare_exchange_strong(previously, name, std::memory_order_release, std::memory_order_acquire);
6798 if (!wonRace) {
6799 free((void *)name);
6800 name = previously;
6801 }
6802
6803 // Emplace whatever name won the race in the metaclass too.
6804 class_ro_t *metaRO = (class_ro_t *)metaclass->bits.safe_ro();
6805
6806 // Write our pointer if the current value is NULL. There's no
6807 // need to loop or check success, since the only way this can
6808 // fail is if another thread succeeded in writing the exact
6809 // same pointer.
6810 const char *expected = NULL;
6811 metaRO->name.compare_exchange_strong(expected, name, std::memory_order_release, std::memory_order_acquire);
6812
6813 return name;
6814 }
6815
6816 /***********************************************************************
6817 * Update custom RR and AWZ when a method changes its IMP
6818 **********************************************************************/
6819 static void
6820 adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
6821 {
6822 objc::AWZScanner::scanChangedMethod(cls, meth);
6823 objc::RRScanner::scanChangedMethod(cls, meth);
6824 objc::CoreScanner::scanChangedMethod(cls, meth);
6825 }
6826
6827
6828 /***********************************************************************
6829 * class_getIvarLayout
6830 * Called by the garbage collector.
6831 * The class must be nil or already realized.
6832 * Locking: none
6833 **********************************************************************/
6834 const uint8_t *
6835 class_getIvarLayout(Class cls)
6836 {
6837 if (cls) return cls->data()->ro()->getIvarLayout();
6838 else return nil;
6839 }
6840
6841
6842 /***********************************************************************
6843 * class_getWeakIvarLayout
6844 * Called by the garbage collector.
6845 * The class must be nil or already realized.
6846 * Locking: none
6847 **********************************************************************/
6848 const uint8_t *
6849 class_getWeakIvarLayout(Class cls)
6850 {
6851 if (cls) return cls->data()->ro()->weakIvarLayout;
6852 else return nil;
6853 }
6854
6855
6856 /***********************************************************************
6857 * class_setIvarLayout
6858 * Changes the class's ivar layout.
6859 * nil layout means no unscanned ivars
6860 * The class must be under construction.
6861 * fixme: sanity-check layout vs instance size?
6862 * fixme: sanity-check layout vs superclass?
6863 * Locking: acquires runtimeLock
6864 **********************************************************************/
6865 void
6866 class_setIvarLayout(Class cls, const uint8_t *layout)
6867 {
6868 if (!cls) return;
6869
6870 ASSERT(!cls->isMetaClass());
6871
6872 mutex_locker_t lock(runtimeLock);
6873
6874 checkIsKnownClass(cls);
6875
6876 // Can only change layout of in-construction classes.
6877 // note: if modifications to post-construction classes were
6878 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6879 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6880 _objc_inform("*** Can't set ivar layout for already-registered "
6881 "class '%s'", cls->nameForLogging());
6882 return;
6883 }
6884
6885 class_ro_t *ro_w = make_ro_writeable(cls->data());
6886
6887 try_free(ro_w->getIvarLayout());
6888 ro_w->ivarLayout = ustrdupMaybeNil(layout);
6889 }
6890
6891
6892 /***********************************************************************
6893 * class_setWeakIvarLayout
6894 * Changes the class's weak ivar layout.
6895 * nil layout means no weak ivars
6896 * The class must be under construction.
6897 * fixme: sanity-check layout vs instance size?
6898 * fixme: sanity-check layout vs superclass?
6899 * Locking: acquires runtimeLock
6900 **********************************************************************/
6901 void
6902 class_setWeakIvarLayout(Class cls, const uint8_t *layout)
6903 {
6904 if (!cls) return;
6905
6906 mutex_locker_t lock(runtimeLock);
6907
6908 checkIsKnownClass(cls);
6909
6910 // Can only change layout of in-construction classes.
6911 // note: if modifications to post-construction classes were
6912 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6913 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6914 _objc_inform("*** Can't set weak ivar layout for already-registered "
6915 "class '%s'", cls->nameForLogging());
6916 return;
6917 }
6918
6919 class_ro_t *ro_w = make_ro_writeable(cls->data());
6920
6921 try_free(ro_w->weakIvarLayout);
6922 ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
6923 }
6924
6925
6926 /***********************************************************************
6927 * getIvar
6928 * Look up an ivar by name.
6929 * Locking: runtimeLock must be read- or write-locked by the caller.
6930 **********************************************************************/
6931 static ivar_t *getIvar(Class cls, const char *name)
6932 {
6933 runtimeLock.assertLocked();
6934
6935 const ivar_list_t *ivars;
6936 ASSERT(cls->isRealized());
6937 if ((ivars = cls->data()->ro()->ivars)) {
6938 for (auto& ivar : *ivars) {
6939 if (!ivar.offset) continue; // anonymous bitfield
6940
6941 // ivar.name may be nil for anonymous bitfields etc.
6942 if (ivar.name && 0 == strcmp(name, ivar.name)) {
6943 return &ivar;
6944 }
6945 }
6946 }
6947
6948 return nil;
6949 }
6950
6951
6952 /***********************************************************************
6953 * _class_getClassForIvar
6954 * Given a class and an ivar that is in it or one of its superclasses,
6955 * find the actual class that defined the ivar.
6956 **********************************************************************/
6957 Class _class_getClassForIvar(Class cls, Ivar ivar)
6958 {
6959 mutex_locker_t lock(runtimeLock);
6960
6961 for ( ; cls; cls = cls->getSuperclass()) {
6962 if (auto ivars = cls->data()->ro()->ivars) {
6963 if (ivars->containsIvar(ivar)) {
6964 return cls;
6965 }
6966 }
6967 }
6968
6969 return nil;
6970 }
6971
6972
6973 /***********************************************************************
6974 * _class_getVariable
6975 * fixme
6976 * Locking: read-locks runtimeLock
6977 **********************************************************************/
6978 Ivar
6979 _class_getVariable(Class cls, const char *name)
6980 {
6981 mutex_locker_t lock(runtimeLock);
6982
6983 for ( ; cls; cls = cls->getSuperclass()) {
6984 ivar_t *ivar = getIvar(cls, name);
6985 if (ivar) {
6986 return ivar;
6987 }
6988 }
6989
6990 return nil;
6991 }
6992
6993
6994 /***********************************************************************
6995 * class_conformsToProtocol
6996 * fixme
6997 * Locking: read-locks runtimeLock
6998 **********************************************************************/
6999 BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
7000 {
7001 protocol_t *proto = newprotocol(proto_gen);
7002
7003 if (!cls) return NO;
7004 if (!proto_gen) return NO;
7005
7006 mutex_locker_t lock(runtimeLock);
7007
7008 checkIsKnownClass(cls);
7009
7010 ASSERT(cls->isRealized());
7011
7012 for (const auto& proto_ref : cls->data()->protocols()) {
7013 protocol_t *p = remapProtocol(proto_ref);
7014 if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
7015 return YES;
7016 }
7017 }
7018
7019 return NO;
7020 }
7021
7022 static void
7023 addMethods_finish(Class cls, method_list_t *newlist)
7024 {
7025 auto rwe = cls->data()->extAllocIfNeeded();
7026
7027 if (newlist->count > 1) {
7028 method_t::SortBySELAddress sorter;
7029 std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter);
7030 }
7031
7032 prepareMethodLists(cls, &newlist, 1, NO, NO, __func__);
7033 rwe->methods.attachLists(&newlist, 1);
7034
7035 // If the class being modified has a constant cache,
7036 // then all children classes are flattened constant caches
7037 // and need to be flushed as well.
7038 flushCaches(cls, __func__, [](Class c){
7039 // constant caches have been dealt with in prepareMethodLists
7040 // if the class still is constant here, it's fine to keep
7041 return !c->cache.isConstantOptimizedCache();
7042 });
7043 }
7044
7045
7046 /**********************************************************************
7047 * addMethod
7048 * fixme
7049 * Locking: runtimeLock must be held by the caller
7050 **********************************************************************/
7051 static IMP
7052 addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
7053 {
7054 IMP result = nil;
7055
7056 runtimeLock.assertLocked();
7057
7058 checkIsKnownClass(cls);
7059
7060 ASSERT(types);
7061 ASSERT(cls->isRealized());
7062
7063 method_t *m;
7064 if ((m = getMethodNoSuper_nolock(cls, name))) {
7065 // already exists
7066 if (!replace) {
7067 result = m->imp(false);
7068 } else {
7069 result = _method_setImplementation(cls, m, imp);
7070 }
7071 } else {
7072 // fixme optimize
7073 method_list_t *newlist;
7074 newlist = (method_list_t *)calloc(method_list_t::byteSize(method_t::bigSize, 1), 1);
7075 newlist->entsizeAndFlags =
7076 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
7077 newlist->count = 1;
7078 auto &first = newlist->begin()->big();
7079 first.name = name;
7080 first.types = strdupIfMutable(types);
7081 first.imp = imp;
7082
7083 addMethods_finish(cls, newlist);
7084 result = nil;
7085 }
7086
7087 return result;
7088 }
7089
7090 /**********************************************************************
7091 * addMethods
7092 * Add the given methods to a class in bulk.
7093 * Returns the selectors which could not be added, when replace == NO and a
7094 * method already exists. The returned selectors are NULL terminated and must be
7095 * freed by the caller. They are NULL if no failures occurred.
7096 * Locking: runtimeLock must be held by the caller
7097 **********************************************************************/
7098 static SEL *
7099 addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
7100 uint32_t count, bool replace, uint32_t *outFailedCount)
7101 {
7102 runtimeLock.assertLocked();
7103
7104 ASSERT(names);
7105 ASSERT(imps);
7106 ASSERT(types);
7107 ASSERT(cls->isRealized());
7108
7109 method_list_t *newlist;
7110 size_t newlistSize = method_list_t::byteSize(sizeof(struct method_t::big), count);
7111 newlist = (method_list_t *)calloc(newlistSize, 1);
7112 newlist->entsizeAndFlags =
7113 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
7114 newlist->count = 0;
7115
7116 SEL *failedNames = nil;
7117 uint32_t failedCount = 0;
7118
7119 for (uint32_t i = 0; i < count; i++) {
7120 method_t *m;
7121 if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
7122 // already exists
7123 if (!replace) {
7124 // report failure
7125 if (failedNames == nil) {
7126 // allocate an extra entry for a trailing NULL in case
7127 // every method fails
7128 failedNames = (SEL *)calloc(sizeof(*failedNames),
7129 count + 1);
7130 }
7131 failedNames[failedCount] = m->name();
7132 failedCount++;
7133 } else {
7134 _method_setImplementation(cls, m, imps[i]);
7135 }
7136 } else {
7137 auto &newmethod = newlist->end()->big();
7138 newmethod.name = names[i];
7139 newmethod.types = strdupIfMutable(types[i]);
7140 newmethod.imp = imps[i];
7141 newlist->count++;
7142 }
7143 }
7144
7145 if (newlist->count > 0) {
7146 // fixme resize newlist because it may have been over-allocated above.
7147 // Note that realloc() alone doesn't work due to ptrauth.
7148 addMethods_finish(cls, newlist);
7149 } else {
7150 // Attaching the method list to the class consumes it. If we don't
7151 // do that, we have to free the memory ourselves.
7152 free(newlist);
7153 }
7154
7155 if (outFailedCount) *outFailedCount = failedCount;
7156
7157 return failedNames;
7158 }
7159
7160
7161 BOOL
7162 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
7163 {
7164 if (!cls) return NO;
7165
7166 mutex_locker_t lock(runtimeLock);
7167 return ! addMethod(cls, name, imp, types ?: "", NO);
7168 }
7169
7170
7171 IMP
7172 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
7173 {
7174 if (!cls) return nil;
7175
7176 mutex_locker_t lock(runtimeLock);
7177 return addMethod(cls, name, imp, types ?: "", YES);
7178 }
7179
7180
7181 SEL *
7182 class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
7183 const char **types, uint32_t count,
7184 uint32_t *outFailedCount)
7185 {
7186 if (!cls) {
7187 if (outFailedCount) *outFailedCount = count;
7188 return (SEL *)memdup(names, count * sizeof(*names));
7189 }
7190
7191 mutex_locker_t lock(runtimeLock);
7192 return addMethods(cls, names, imps, types, count, NO, outFailedCount);
7193 }
7194
7195 void
7196 class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
7197 const char **types, uint32_t count)
7198 {
7199 if (!cls) return;
7200
7201 mutex_locker_t lock(runtimeLock);
7202 addMethods(cls, names, imps, types, count, YES, nil);
7203 }
7204
7205
7206 /***********************************************************************
7207 * class_addIvar
7208 * Adds an ivar to a class.
7209 * Locking: acquires runtimeLock
7210 **********************************************************************/
7211 BOOL
7212 class_addIvar(Class cls, const char *name, size_t size,
7213 uint8_t alignment, const char *type)
7214 {
7215 if (!cls) return NO;
7216
7217 if (!type) type = "";
7218 if (name && 0 == strcmp(name, "")) name = nil;
7219
7220 mutex_locker_t lock(runtimeLock);
7221
7222 checkIsKnownClass(cls);
7223 ASSERT(cls->isRealized());
7224
7225 // No class variables
7226 if (cls->isMetaClass()) {
7227 return NO;
7228 }
7229
7230 // Can only add ivars to in-construction classes.
7231 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
7232 return NO;
7233 }
7234
7235 // Check for existing ivar with this name, unless it's anonymous.
7236 // Check for too-big ivar.
7237 // fixme check for superclass ivar too?
7238 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
7239 return NO;
7240 }
7241
7242 class_ro_t *ro_w = make_ro_writeable(cls->data());
7243
7244 // fixme allocate less memory here
7245
7246 ivar_list_t *oldlist, *newlist;
7247 if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) {
7248 size_t oldsize = oldlist->byteSize();
7249 newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
7250 memcpy(newlist, oldlist, oldsize);
7251 free(oldlist);
7252 } else {
7253 newlist = (ivar_list_t *)calloc(ivar_list_t::byteSize(sizeof(ivar_t), 1), 1);
7254 newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
7255 }
7256
7257 uint32_t offset = cls->unalignedInstanceSize();
7258 uint32_t alignMask = (1<<alignment)-1;
7259 offset = (offset + alignMask) & ~alignMask;
7260
7261 ivar_t& ivar = newlist->get(newlist->count++);
7262 #if __x86_64__
7263 // Deliberately over-allocate the ivar offset variable.
7264 // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
7265 ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
7266 #else
7267 ivar.offset = (int32_t *)malloc(sizeof(int32_t));
7268 #endif
7269 *ivar.offset = offset;
7270 ivar.name = name ? strdupIfMutable(name) : nil;
7271 ivar.type = strdupIfMutable(type);
7272 ivar.alignment_raw = alignment;
7273 ivar.size = (uint32_t)size;
7274
7275 ro_w->ivars = newlist;
7276 cls->setInstanceSize((uint32_t)(offset + size));
7277
7278 // Ivar layout updated in registerClass.
7279
7280 return YES;
7281 }
7282
7283
7284 /***********************************************************************
7285 * class_addProtocol
7286 * Adds a protocol to a class.
7287 * Locking: acquires runtimeLock
7288 **********************************************************************/
7289 BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
7290 {
7291 protocol_t *protocol = newprotocol(protocol_gen);
7292
7293 if (!cls) return NO;
7294 if (class_conformsToProtocol(cls, protocol_gen)) return NO;
7295
7296 mutex_locker_t lock(runtimeLock);
7297 auto rwe = cls->data()->extAllocIfNeeded();
7298
7299 ASSERT(cls->isRealized());
7300
7301 // fixme optimize
7302 protocol_list_t *protolist = (protocol_list_t *)
7303 malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
7304 protolist->count = 1;
7305 protolist->list[0] = (protocol_ref_t)protocol;
7306
7307 rwe->protocols.attachLists(&protolist, 1);
7308
7309 // fixme metaclass?
7310
7311 return YES;
7312 }
7313
7314
7315 /***********************************************************************
7316 * class_addProperty
7317 * Adds a property to a class.
7318 * Locking: acquires runtimeLock
7319 **********************************************************************/
7320 static bool
7321 _class_addProperty(Class cls, const char *name,
7322 const objc_property_attribute_t *attrs, unsigned int count,
7323 bool replace)
7324 {
7325 if (!cls) return NO;
7326 if (!name) return NO;
7327
7328 property_t *prop = class_getProperty(cls, name);
7329 if (prop && !replace) {
7330 // already exists, refuse to replace
7331 return NO;
7332 }
7333 else if (prop) {
7334 // replace existing
7335 mutex_locker_t lock(runtimeLock);
7336 try_free(prop->attributes);
7337 prop->attributes = copyPropertyAttributeString(attrs, count);
7338 return YES;
7339 }
7340 else {
7341 mutex_locker_t lock(runtimeLock);
7342 auto rwe = cls->data()->extAllocIfNeeded();
7343
7344 ASSERT(cls->isRealized());
7345
7346 property_list_t *proplist = (property_list_t *)
7347 malloc(property_list_t::byteSize(sizeof(property_t), 1));
7348 proplist->count = 1;
7349 proplist->entsizeAndFlags = sizeof(property_t);
7350 proplist->begin()->name = strdupIfMutable(name);
7351 proplist->begin()->attributes = copyPropertyAttributeString(attrs, count);
7352
7353 rwe->properties.attachLists(&proplist, 1);
7354
7355 return YES;
7356 }
7357 }
7358
7359 BOOL
7360 class_addProperty(Class cls, const char *name,
7361 const objc_property_attribute_t *attrs, unsigned int n)
7362 {
7363 return _class_addProperty(cls, name, attrs, n, NO);
7364 }
7365
7366 void
7367 class_replaceProperty(Class cls, const char *name,
7368 const objc_property_attribute_t *attrs, unsigned int n)
7369 {
7370 _class_addProperty(cls, name, attrs, n, YES);
7371 }
7372
7373
7374 /***********************************************************************
7375 * look_up_class
7376 * Look up a class by name, and realize it.
7377 * Locking: acquires runtimeLock
7378 **********************************************************************/
7379 static BOOL empty_getClass(const char *name, Class *outClass)
7380 {
7381 *outClass = nil;
7382 return NO;
7383 }
7384
7385 static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
7386
7387 void objc_setHook_getClass(objc_hook_getClass newValue,
7388 objc_hook_getClass *outOldValue)
7389 {
7390 GetClassHook.set(newValue, outOldValue);
7391 }
7392
7393 Class
7394 look_up_class(const char *name,
7395 bool includeUnconnected __attribute__((unused)),
7396 bool includeClassHandler __attribute__((unused)))
7397 {
7398 if (!name) return nil;
7399
7400 Class result;
7401 bool unrealized;
7402 {
7403 runtimeLock.lock();
7404 result = getClassExceptSomeSwift(name);
7405 unrealized = result && !result->isRealized();
7406 if (unrealized) {
7407 result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
7408 // runtimeLock is now unlocked
7409 } else {
7410 runtimeLock.unlock();
7411 }
7412 }
7413
7414 if (!result) {
7415 // Ask Swift about its un-instantiated classes.
7416
7417 // We use thread-local storage to prevent infinite recursion
7418 // if the hook function provokes another lookup of the same name
7419 // (for example, if the hook calls objc_allocateClassPair)
7420
7421 auto *tls = _objc_fetch_pthread_data(true);
7422
7423 // Stop if this thread is already looking up this name.
7424 for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
7425 if (0 == strcmp(name, tls->classNameLookups[i])) {
7426 return nil;
7427 }
7428 }
7429
7430 // Save this lookup in tls.
7431 if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
7432 tls->classNameLookupsAllocated =
7433 (tls->classNameLookupsAllocated * 2 ?: 1);
7434 size_t size = tls->classNameLookupsAllocated *
7435 sizeof(tls->classNameLookups[0]);
7436 tls->classNameLookups = (const char **)
7437 realloc(tls->classNameLookups, size);
7438 }
7439 tls->classNameLookups[tls->classNameLookupsUsed++] = name;
7440
7441 // Call the hook.
7442 Class swiftcls = nil;
7443 if (GetClassHook.get()(name, &swiftcls)) {
7444 ASSERT(swiftcls->isRealized());
7445 result = swiftcls;
7446 }
7447
7448 // Erase the name from tls.
7449 unsigned slot = --tls->classNameLookupsUsed;
7450 ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
7451 ASSERT(name == tls->classNameLookups[slot]);
7452 tls->classNameLookups[slot] = nil;
7453 }
7454
7455 return result;
7456 }
7457
7458
7459 /***********************************************************************
7460 * objc_duplicateClass
7461 * fixme
7462 * Locking: acquires runtimeLock
7463 **********************************************************************/
7464 Class
7465 objc_duplicateClass(Class original, const char *name,
7466 size_t extraBytes)
7467 {
7468 Class duplicate;
7469
7470 mutex_locker_t lock(runtimeLock);
7471
7472 checkIsKnownClass(original);
7473
7474 auto orig_rw = original->data();
7475 auto orig_rwe = orig_rw->ext();
7476 auto orig_ro = orig_rw->ro();
7477
7478 ASSERT(original->isRealized());
7479 ASSERT(!original->isMetaClass());
7480
7481 duplicate = alloc_class_for_subclass(original, extraBytes);
7482
7483 duplicate->initClassIsa(original->ISA());
7484 duplicate->setSuperclass(original->getSuperclass());
7485
7486 duplicate->cache.initializeToEmpty();
7487
7488 class_rw_t *rw = objc::zalloc<class_rw_t>();
7489 rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING);
7490 rw->firstSubclass = nil;
7491 rw->nextSiblingClass = nil;
7492
7493 duplicate->bits = original->bits;
7494 duplicate->setData(rw);
7495
7496 auto ro = orig_ro->duplicate();
7497 *(char **)&ro->name = strdupIfMutable(name);
7498 rw->set_ro(ro);
7499
7500 if (orig_rwe) {
7501 auto rwe = rw->extAllocIfNeeded();
7502 rwe->version = orig_rwe->version;
7503 orig_rwe->methods.duplicateInto(rwe->methods);
7504
7505 // fixme dies when categories are added to the base
7506 rwe->properties = orig_rwe->properties;
7507 rwe->protocols = orig_rwe->protocols;
7508 } else if (ro->baseMethods()) {
7509 // if we have base methods, we need to make a deep copy
7510 // which requires a class_rw_ext_t to be allocated
7511 rw->deepCopy(ro);
7512 }
7513
7514 duplicate->chooseClassArrayIndex();
7515
7516 if (duplicate->getSuperclass()) {
7517 addSubclass(duplicate->getSuperclass(), duplicate);
7518 // duplicate->isa == original->isa so don't addSubclass() for it
7519 } else {
7520 addRootClass(duplicate);
7521 }
7522
7523 // Don't methodize class - construction above is correct
7524
7525 addNamedClass(duplicate, ro->getName());
7526 addClassTableEntry(duplicate, /*addMeta=*/false);
7527
7528 if (PrintConnecting) {
7529 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
7530 name, original->nameForLogging(), (void*)duplicate, ro);
7531 }
7532
7533 duplicate->clearInfo(RW_REALIZING);
7534
7535 return duplicate;
7536 }
7537
7538 /***********************************************************************
7539 * objc_initializeClassPair
7540 * Locking: runtimeLock must be write-locked by the caller
7541 **********************************************************************/
7542
7543 // &UnsetLayout is the default ivar layout during class construction
7544 static const uint8_t UnsetLayout = 0;
7545
7546 static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
7547 {
7548 runtimeLock.assertLocked();
7549
7550 class_ro_t *cls_ro_w, *meta_ro_w;
7551 class_rw_t *cls_rw_w, *meta_rw_w;
7552
7553 cls_rw_w = objc::zalloc<class_rw_t>();
7554 meta_rw_w = objc::zalloc<class_rw_t>();
7555 cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7556 meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7557
7558 cls->setData(cls_rw_w);
7559 cls_rw_w->set_ro(cls_ro_w);
7560 meta->setData(meta_rw_w);
7561 meta_rw_w->set_ro(meta_ro_w);
7562
7563 // Set basic info
7564
7565 cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
7566 meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META;
7567
7568 cls_ro_w->flags = 0;
7569 meta_ro_w->flags = RO_META;
7570 if (superclass) {
7571 uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
7572 cls_rw_w->flags |= superclass->data()->flags & flagsToCopy;
7573 cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
7574 meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
7575 cls->setInstanceSize(cls_ro_w->instanceStart);
7576 meta->setInstanceSize(meta_ro_w->instanceStart);
7577 } else {
7578 cls_ro_w->flags |= RO_ROOT;
7579 meta_ro_w->flags |= RO_ROOT;
7580 cls_ro_w->instanceStart = 0;
7581 meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
7582 cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
7583 meta->setInstanceSize(meta_ro_w->instanceStart);
7584 }
7585
7586 cls_ro_w->name.store(strdupIfMutable(name), std::memory_order_release);
7587 meta_ro_w->name.store(strdupIfMutable(name), std::memory_order_release);
7588
7589 cls_ro_w->ivarLayout = &UnsetLayout;
7590 cls_ro_w->weakIvarLayout = &UnsetLayout;
7591
7592 meta->chooseClassArrayIndex();
7593 cls->chooseClassArrayIndex();
7594
7595 // This absolutely needs to be done before addSubclass
7596 // as initializeToEmpty() clobbers the FAST_CACHE bits
7597 cls->cache.initializeToEmpty();
7598 meta->cache.initializeToEmpty();
7599
7600 #if FAST_CACHE_META
7601 meta->cache.setBit(FAST_CACHE_META);
7602 #endif
7603 meta->setInstancesRequireRawIsa();
7604
7605 // Connect to superclasses and metaclasses
7606 cls->initClassIsa(meta);
7607
7608 if (superclass) {
7609 meta->initClassIsa(superclass->ISA()->ISA());
7610 cls->setSuperclass(superclass);
7611 meta->setSuperclass(superclass->ISA());
7612 addSubclass(superclass, cls);
7613 addSubclass(superclass->ISA(), meta);
7614 } else {
7615 meta->initClassIsa(meta);
7616 cls->setSuperclass(Nil);
7617 meta->setSuperclass(cls);
7618 addRootClass(cls);
7619 addSubclass(cls, meta);
7620 }
7621
7622 addClassTableEntry(cls);
7623 }
7624
7625
7626 /***********************************************************************
7627 * verifySuperclass
7628 * Sanity-check the superclass provided to
7629 * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
7630 **********************************************************************/
7631 bool
7632 verifySuperclass(Class superclass, bool rootOK)
7633 {
7634 if (!superclass) {
7635 // Superclass does not exist.
7636 // If subclass may be a root class, this is OK.
7637 // If subclass must not be a root class, this is bad.
7638 return rootOK;
7639 }
7640
7641 // Superclass must be realized.
7642 if (! superclass->isRealized()) return false;
7643
7644 // Superclass must not be under construction.
7645 if (superclass->data()->flags & RW_CONSTRUCTING) return false;
7646
7647 return true;
7648 }
7649
7650
7651 /***********************************************************************
7652 * objc_initializeClassPair
7653 **********************************************************************/
7654 Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
7655 {
7656 // Fail if the class name is in use.
7657 if (look_up_class(name, NO, NO)) return nil;
7658
7659 mutex_locker_t lock(runtimeLock);
7660
7661 // Fail if the class name is in use.
7662 // Fail if the superclass isn't kosher.
7663 if (getClassExceptSomeSwift(name) ||
7664 !verifySuperclass(superclass, true/*rootOK*/))
7665 {
7666 return nil;
7667 }
7668
7669 objc_initializeClassPair_internal(superclass, name, cls, meta);
7670
7671 return cls;
7672 }
7673
7674
7675 /***********************************************************************
7676 * objc_allocateClassPair
7677 * fixme
7678 * Locking: acquires runtimeLock
7679 **********************************************************************/
7680 Class objc_allocateClassPair(Class superclass, const char *name,
7681 size_t extraBytes)
7682 {
7683 Class cls, meta;
7684
7685 // Fail if the class name is in use.
7686 if (look_up_class(name, NO, NO)) return nil;
7687
7688 mutex_locker_t lock(runtimeLock);
7689
7690 // Fail if the class name is in use.
7691 // Fail if the superclass isn't kosher.
7692 if (getClassExceptSomeSwift(name) ||
7693 !verifySuperclass(superclass, true/*rootOK*/))
7694 {
7695 return nil;
7696 }
7697
7698 // Allocate new classes.
7699 cls = alloc_class_for_subclass(superclass, extraBytes);
7700 meta = alloc_class_for_subclass(superclass, extraBytes);
7701
7702 // fixme mangle the name if it looks swift-y?
7703 objc_initializeClassPair_internal(superclass, name, cls, meta);
7704
7705 return cls;
7706 }
7707
7708
7709 /***********************************************************************
7710 * objc_registerClassPair
7711 * fixme
7712 * Locking: acquires runtimeLock
7713 **********************************************************************/
7714 void objc_registerClassPair(Class cls)
7715 {
7716 mutex_locker_t lock(runtimeLock);
7717
7718 checkIsKnownClass(cls);
7719
7720 if ((cls->data()->flags & RW_CONSTRUCTED) ||
7721 (cls->ISA()->data()->flags & RW_CONSTRUCTED))
7722 {
7723 _objc_inform("objc_registerClassPair: class '%s' was already "
7724 "registered!", cls->data()->ro()->getName());
7725 return;
7726 }
7727
7728 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
7729 !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
7730 {
7731 _objc_inform("objc_registerClassPair: class '%s' was not "
7732 "allocated with objc_allocateClassPair!",
7733 cls->data()->ro()->getName());
7734 return;
7735 }
7736
7737 // Clear "under construction" bit, set "done constructing" bit
7738 cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7739 cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7740
7741 // Add to named class table.
7742 addNamedClass(cls, cls->data()->ro()->getName());
7743 }
7744
7745
7746 /***********************************************************************
7747 * objc_readClassPair()
7748 * Read a class and metaclass as written by a compiler.
7749 * Assumes the class and metaclass are not referenced by other things
7750 * that might need to be fixed up (such as categories and subclasses).
7751 * Does not call +load.
7752 * Returns the class pointer, or nil.
7753 *
7754 * Locking: runtimeLock acquired by map_images
7755 **********************************************************************/
7756 Class objc_readClassPair(Class bits, const struct objc_image_info *info)
7757 {
7758 mutex_locker_t lock(runtimeLock);
7759
7760 // No info bits are significant yet.
7761 (void)info;
7762
7763 // Fail if the superclass isn't kosher.
7764 bool rootOK = bits->data()->flags & RO_ROOT;
7765 if (!verifySuperclass(bits->getSuperclass(), rootOK)){
7766 return nil;
7767 }
7768
7769 // Duplicate classes are allowed, just like they are for image loading.
7770 // readClass will complain about the duplicate.
7771
7772 Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
7773 if (cls != bits) {
7774 // This function isn't allowed to remap anything.
7775 _objc_fatal("objc_readClassPair for class %s changed %p to %p",
7776 cls->nameForLogging(), bits, cls);
7777 }
7778
7779 // The only client of this function is old Swift.
7780 // Stable Swift won't use it.
7781 // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
7782 cls = realizeClassWithoutSwift(cls, nil);
7783
7784 return cls;
7785 }
7786
7787
7788 /***********************************************************************
7789 * detach_class
7790 * Disconnect a class from other data structures.
7791 * Exception: does not remove the class from the +load list
7792 * Call this before free_class.
7793 * Locking: runtimeLock must be held by the caller.
7794 **********************************************************************/
7795 static void detach_class(Class cls, bool isMeta)
7796 {
7797 runtimeLock.assertLocked();
7798
7799 // categories not yet attached to this class
7800 objc::unattachedCategories.eraseClass(cls);
7801
7802 // superclass's subclass list
7803 if (cls->isRealized()) {
7804 Class supercls = cls->getSuperclass();
7805 if (supercls) {
7806 removeSubclass(supercls, cls);
7807 } else {
7808 removeRootClass(cls);
7809 }
7810 }
7811
7812 // class tables and +load queue
7813 if (!isMeta) {
7814 removeNamedClass(cls, cls->mangledName());
7815 }
7816 objc::allocatedClasses.get().erase(cls);
7817 }
7818
7819
7820 /***********************************************************************
7821 * free_class
7822 * Frees a class's data structures.
7823 * Call this after detach_class.
7824 * Locking: runtimeLock must be held by the caller
7825 **********************************************************************/
7826 static void free_class(Class cls)
7827 {
7828 runtimeLock.assertLocked();
7829
7830 if (! cls->isRealized()) return;
7831
7832 auto rw = cls->data();
7833 auto rwe = rw->ext();
7834 auto ro = rw->ro();
7835
7836 cls->cache.destroy();
7837
7838 if (rwe) {
7839 for (auto& meth : rwe->methods) {
7840 try_free(meth.types());
7841 }
7842 rwe->methods.tryFree();
7843 }
7844
7845 const ivar_list_t *ivars = ro->ivars;
7846 if (ivars) {
7847 for (auto& ivar : *ivars) {
7848 try_free(ivar.offset);
7849 try_free(ivar.name);
7850 try_free(ivar.type);
7851 }
7852 try_free(ivars);
7853 }
7854
7855 if (rwe) {
7856 for (auto& prop : rwe->properties) {
7857 try_free(prop.name);
7858 try_free(prop.attributes);
7859 }
7860 rwe->properties.tryFree();
7861
7862 rwe->protocols.tryFree();
7863 }
7864
7865 try_free(ro->getIvarLayout());
7866 try_free(ro->weakIvarLayout);
7867 try_free(ro->getName());
7868 try_free(ro);
7869 objc::zfree(rwe);
7870 objc::zfree(rw);
7871 try_free(cls);
7872 }
7873
7874
7875 void objc_disposeClassPair(Class cls)
7876 {
7877 mutex_locker_t lock(runtimeLock);
7878
7879 checkIsKnownClass(cls);
7880
7881 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
7882 !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
7883 {
7884 // class not allocated with objc_allocateClassPair
7885 // disposing still-unregistered class is OK!
7886 _objc_inform("objc_disposeClassPair: class '%s' was not "
7887 "allocated with objc_allocateClassPair!",
7888 cls->data()->ro()->getName());
7889 return;
7890 }
7891
7892 if (cls->isMetaClass()) {
7893 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
7894 "not a class!", cls->data()->ro()->getName());
7895 return;
7896 }
7897
7898 // Shouldn't have any live subclasses.
7899 if (cls->data()->firstSubclass) {
7900 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7901 "including '%s'!", cls->data()->ro()->getName(),
7902 cls->data()->firstSubclass->nameForLogging());
7903 }
7904 if (cls->ISA()->data()->firstSubclass) {
7905 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7906 "including '%s'!", cls->data()->ro()->getName(),
7907 cls->ISA()->data()->firstSubclass->nameForLogging());
7908 }
7909
7910 // don't remove_class_from_loadable_list()
7911 // - it's not there and we don't have the lock
7912 detach_class(cls->ISA(), YES);
7913 detach_class(cls, NO);
7914 free_class(cls->ISA());
7915 free_class(cls);
7916 }
7917
7918
7919 /***********************************************************************
7920 * objc_constructInstance
7921 * Creates an instance of `cls` at the location pointed to by `bytes`.
7922 * `bytes` must point to at least class_getInstanceSize(cls) bytes of
7923 * well-aligned zero-filled memory.
7924 * The new object's isa is set. Any C++ constructors are called.
7925 * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
7926 * nil, or if C++ constructors fail.
7927 * Note: class_createInstance() and class_createInstances() preflight this.
7928 **********************************************************************/
7929 id
7930 objc_constructInstance(Class cls, void *bytes)
7931 {
7932 if (!cls || !bytes) return nil;
7933
7934 id obj = (id)bytes;
7935
7936 // Read class's info bits all at once for performance
7937 bool hasCxxCtor = cls->hasCxxCtor();
7938 bool hasCxxDtor = cls->hasCxxDtor();
7939 bool fast = cls->canAllocNonpointer();
7940
7941 if (fast) {
7942 obj->initInstanceIsa(cls, hasCxxDtor);
7943 } else {
7944 obj->initIsa(cls);
7945 }
7946
7947 if (hasCxxCtor) {
7948 return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
7949 } else {
7950 return obj;
7951 }
7952 }
7953
7954
7955 /***********************************************************************
7956 * class_createInstance
7957 * fixme
7958 * Locking: none
7959 *
7960 * Note: this function has been carefully written so that the fastpath
7961 * takes no branch.
7962 **********************************************************************/
7963 static ALWAYS_INLINE id
7964 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
7965 int construct_flags = OBJECT_CONSTRUCT_NONE,
7966 bool cxxConstruct = true,
7967 size_t *outAllocatedSize = nil)
7968 {
7969 ASSERT(cls->isRealized());
7970
7971 // Read class's info bits all at once for performance
7972 bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
7973 bool hasCxxDtor = cls->hasCxxDtor();
7974 bool fast = cls->canAllocNonpointer();
7975 size_t size;
7976
7977 size = cls->instanceSize(extraBytes);
7978 if (outAllocatedSize) *outAllocatedSize = size;
7979
7980 id obj;
7981 if (zone) {
7982 obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
7983 } else {
7984 obj = (id)calloc(1, size);
7985 }
7986 if (slowpath(!obj)) {
7987 if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
7988 return _objc_callBadAllocHandler(cls);
7989 }
7990 return nil;
7991 }
7992
7993 if (!zone && fast) {
7994 obj->initInstanceIsa(cls, hasCxxDtor);
7995 } else {
7996 // Use raw pointer isa on the assumption that they might be
7997 // doing something weird with the zone or RR.
7998 obj->initIsa(cls);
7999 }
8000
8001 if (fastpath(!hasCxxCtor)) {
8002 return obj;
8003 }
8004
8005 construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
8006 return object_cxxConstructFromClass(obj, cls, construct_flags);
8007 }
8008
8009 id
8010 class_createInstance(Class cls, size_t extraBytes)
8011 {
8012 if (!cls) return nil;
8013 return _class_createInstanceFromZone(cls, extraBytes, nil);
8014 }
8015
8016 NEVER_INLINE
8017 id
8018 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
8019 {
8020 // allocWithZone under __OBJC2__ ignores the zone parameter
8021 return _class_createInstanceFromZone(cls, 0, nil,
8022 OBJECT_CONSTRUCT_CALL_BADALLOC);
8023 }
8024
8025 /***********************************************************************
8026 * class_createInstances
8027 * fixme
8028 * Locking: none
8029 **********************************************************************/
8030 #if SUPPORT_NONPOINTER_ISA
8031 #warning fixme optimize class_createInstances
8032 #endif
8033 unsigned
8034 class_createInstances(Class cls, size_t extraBytes,
8035 id *results, unsigned num_requested)
8036 {
8037 return _class_createInstancesFromZone(cls, extraBytes, nil,
8038 results, num_requested);
8039 }
8040
8041 /***********************************************************************
8042 * object_copyFromZone
8043 * fixme
8044 * Locking: none
8045 **********************************************************************/
8046 static id
8047 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
8048 {
8049 if (oldObj->isTaggedPointerOrNil()) return oldObj;
8050
8051 // fixme this doesn't handle C++ ivars correctly (#4619414)
8052
8053 Class cls = oldObj->ISA(/*authenticated*/true);
8054 size_t size;
8055 id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
8056 OBJECT_CONSTRUCT_NONE, false, &size);
8057 if (!obj) return nil;
8058
8059 // Copy everything except the isa, which was already set above.
8060 uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
8061 uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
8062 size_t copySize = size - sizeof(Class);
8063 memmove(copyDst, copySrc, copySize);
8064
8065 fixupCopiedIvars(obj, oldObj);
8066
8067 return obj;
8068 }
8069
8070
8071 /***********************************************************************
8072 * object_copy
8073 * fixme
8074 * Locking: none
8075 **********************************************************************/
8076 id
8077 object_copy(id oldObj, size_t extraBytes)
8078 {
8079 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
8080 }
8081
8082
8083 #if SUPPORT_ZONES
8084
8085 /***********************************************************************
8086 * class_createInstanceFromZone
8087 * fixme
8088 * Locking: none
8089 **********************************************************************/
8090 id
8091 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
8092 {
8093 if (!cls) return nil;
8094 return _class_createInstanceFromZone(cls, extraBytes, zone);
8095 }
8096
8097 /***********************************************************************
8098 * object_copyFromZone
8099 * fixme
8100 * Locking: none
8101 **********************************************************************/
8102 id
8103 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
8104 {
8105 return _object_copyFromZone(oldObj, extraBytes, zone);
8106 }
8107
8108 #endif
8109
8110
8111 /***********************************************************************
8112 * objc_destructInstance
8113 * Destroys an instance without freeing memory.
8114 * Calls C++ destructors.
8115 * Calls ARC ivar cleanup.
8116 * Removes associative references.
8117 * Returns `obj`. Does nothing if `obj` is nil.
8118 **********************************************************************/
8119 void *objc_destructInstance(id obj)
8120 {
8121 if (obj) {
8122 // Read all of the flags at once for performance.
8123 bool cxx = obj->hasCxxDtor();
8124 bool assoc = obj->hasAssociatedObjects();
8125
8126 // This order is important.
8127 if (cxx) object_cxxDestruct(obj);
8128 if (assoc) _object_remove_assocations(obj, /*deallocating*/true);
8129 obj->clearDeallocating();
8130 }
8131
8132 return obj;
8133 }
8134
8135
8136 /***********************************************************************
8137 * object_dispose
8138 * fixme
8139 * Locking: none
8140 **********************************************************************/
8141 id
8142 object_dispose(id obj)
8143 {
8144 if (!obj) return nil;
8145
8146 objc_destructInstance(obj);
8147 free(obj);
8148
8149 return nil;
8150 }
8151
8152
8153 /***********************************************************************
8154 * _objc_getFreedObjectClass
8155 * fixme
8156 * Locking: none
8157 **********************************************************************/
8158 Class _objc_getFreedObjectClass (void)
8159 {
8160 return nil;
8161 }
8162
8163
8164
8165 /***********************************************************************
8166 * Tagged pointer objects.
8167 *
8168 * Tagged pointer objects store the class and the object value in the
8169 * object pointer; the "pointer" does not actually point to anything.
8170 *
8171 * Tagged pointer objects currently use this representation:
8172 * (LSB)
8173 * 1 bit set if tagged, clear if ordinary object pointer
8174 * 3 bits tag index
8175 * 60 bits payload
8176 * (MSB)
8177 * The tag index defines the object's class.
8178 * The payload format is defined by the object's class.
8179 *
8180 * If the tag index is 0b111, the tagged pointer object uses an
8181 * "extended" representation, allowing more classes but with smaller payloads:
8182 * (LSB)
8183 * 1 bit set if tagged, clear if ordinary object pointer
8184 * 3 bits 0b111
8185 * 8 bits extended tag index
8186 * 52 bits payload
8187 * (MSB)
8188 *
8189 * Some architectures reverse the MSB and LSB in these representations.
8190 *
8191 * This representation is subject to change. Representation-agnostic SPI is:
8192 * objc-internal.h for class implementers.
8193 * objc-gdb.h for debuggers.
8194 **********************************************************************/
8195 #if !SUPPORT_TAGGED_POINTERS
8196
8197 // These variables are always provided for debuggers.
8198 uintptr_t objc_debug_taggedpointer_obfuscator = 0;
8199 uintptr_t objc_debug_taggedpointer_mask = 0;
8200 unsigned objc_debug_taggedpointer_slot_shift = 0;
8201 uintptr_t objc_debug_taggedpointer_slot_mask = 0;
8202 unsigned objc_debug_taggedpointer_payload_lshift = 0;
8203 unsigned objc_debug_taggedpointer_payload_rshift = 0;
8204 Class objc_debug_taggedpointer_classes[1] = { nil };
8205
8206 uintptr_t objc_debug_taggedpointer_ext_mask = 0;
8207 unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
8208 uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
8209 unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
8210 unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
8211 Class objc_debug_taggedpointer_ext_classes[1] = { nil };
8212
8213 uintptr_t objc_debug_constant_cfstring_tag_bits = 0;
8214
8215 static void
8216 disableTaggedPointers() { }
8217
8218 static void
8219 initializeTaggedPointerObfuscator(void) { }
8220
8221 #else
8222
8223 // The "slot" used in the class table and given to the debugger
8224 // includes the is-tagged bit. This makes objc_msgSend faster.
8225 // The "ext" representation doesn't do that.
8226
8227 uintptr_t objc_debug_taggedpointer_obfuscator;
8228 uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
8229 unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
8230 uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
8231 unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
8232 unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
8233 // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
8234
8235 uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
8236 unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
8237 uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
8238 unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
8239 unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
8240 // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
8241
8242 #if OBJC_SPLIT_TAGGED_POINTERS
8243 uint8_t objc_debug_tag60_permutations[8] = { 0, 1, 2, 3, 4, 5, 6, 7 };
8244 uintptr_t objc_debug_constant_cfstring_tag_bits = _OBJC_TAG_EXT_MASK | ((uintptr_t)(OBJC_TAG_Constant_CFString - OBJC_TAG_First52BitPayload) << _OBJC_TAG_EXT_SLOT_SHIFT);
8245 #else
8246 uintptr_t objc_debug_constant_cfstring_tag_bits = 0;
8247 #endif
8248
8249 static void
8250 disableTaggedPointers()
8251 {
8252 objc_debug_taggedpointer_mask = 0;
8253 objc_debug_taggedpointer_slot_shift = 0;
8254 objc_debug_taggedpointer_slot_mask = 0;
8255 objc_debug_taggedpointer_payload_lshift = 0;
8256 objc_debug_taggedpointer_payload_rshift = 0;
8257
8258 objc_debug_taggedpointer_ext_mask = 0;
8259 objc_debug_taggedpointer_ext_slot_shift = 0;
8260 objc_debug_taggedpointer_ext_slot_mask = 0;
8261 objc_debug_taggedpointer_ext_payload_lshift = 0;
8262 objc_debug_taggedpointer_ext_payload_rshift = 0;
8263 }
8264
8265
8266 // Returns a pointer to the class's storage in the tagged class arrays.
8267 // Assumes the tag is a valid basic tag.
8268 static Class *
8269 classSlotForBasicTagIndex(objc_tag_index_t tag)
8270 {
8271 #if OBJC_SPLIT_TAGGED_POINTERS
8272 uintptr_t obfuscatedTag = _objc_basicTagToObfuscatedTag(tag);
8273 return &objc_tag_classes[obfuscatedTag];
8274 #else
8275 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
8276 >> _OBJC_TAG_INDEX_SHIFT)
8277 & _OBJC_TAG_INDEX_MASK);
8278 uintptr_t obfuscatedTag = tag ^ tagObfuscator;
8279
8280 // Array index in objc_tag_classes includes the tagged bit itself
8281 # if SUPPORT_MSB_TAGGED_POINTERS
8282 return &objc_tag_classes[0x8 | obfuscatedTag];
8283 # else
8284 return &objc_tag_classes[(obfuscatedTag << 1) | 1];
8285 # endif
8286 #endif
8287 }
8288
8289
8290 // Returns a pointer to the class's storage in the tagged class arrays,
8291 // or nil if the tag is out of range.
8292 static Class *
8293 classSlotForTagIndex(objc_tag_index_t tag)
8294 {
8295 if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
8296 return classSlotForBasicTagIndex(tag);
8297 }
8298
8299 if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
8300 int index = tag - OBJC_TAG_First52BitPayload;
8301 #if OBJC_SPLIT_TAGGED_POINTERS
8302 if (tag >= OBJC_TAG_FirstUnobfuscatedSplitTag)
8303 return &objc_tag_ext_classes[index];
8304 #endif
8305 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
8306 >> _OBJC_TAG_EXT_INDEX_SHIFT)
8307 & _OBJC_TAG_EXT_INDEX_MASK);
8308 return &objc_tag_ext_classes[index ^ tagObfuscator];
8309 }
8310
8311 return nil;
8312 }
8313
8314 /***********************************************************************
8315 * initializeTaggedPointerObfuscator
8316 * Initialize objc_debug_taggedpointer_obfuscator with randomness.
8317 *
8318 * The tagged pointer obfuscator is intended to make it more difficult
8319 * for an attacker to construct a particular object as a tagged pointer,
8320 * in the presence of a buffer overflow or other write control over some
8321 * memory. The obfuscator is XORed with the tagged pointers when setting
8322 * or retrieving payload values. They are filled with randomness on first
8323 * use.
8324 **********************************************************************/
8325 static void
8326 initializeTaggedPointerObfuscator(void)
8327 {
8328 if (!DisableTaggedPointerObfuscation && dyld_program_sdk_at_least(dyld_fall_2018_os_versions)) {
8329 // Pull random data into the variable, then shift away all non-payload bits.
8330 arc4random_buf(&objc_debug_taggedpointer_obfuscator,
8331 sizeof(objc_debug_taggedpointer_obfuscator));
8332 objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
8333
8334 #if OBJC_SPLIT_TAGGED_POINTERS
8335 // The obfuscator doesn't apply to any of the extended tag mask or the no-obfuscation bit.
8336 objc_debug_taggedpointer_obfuscator &= ~(_OBJC_TAG_EXT_MASK | _OBJC_TAG_NO_OBFUSCATION_MASK);
8337
8338 // Shuffle the first seven entries of the tag permutator.
8339 int max = 7;
8340 for (int i = max - 1; i >= 0; i--) {
8341 int target = arc4random_uniform(i + 1);
8342 swap(objc_debug_tag60_permutations[i],
8343 objc_debug_tag60_permutations[target]);
8344 }
8345 #endif
8346 } else {
8347 // Set the obfuscator to zero for apps linked against older SDKs,
8348 // in case they're relying on the tagged pointer representation.
8349 objc_debug_taggedpointer_obfuscator = 0;
8350 }
8351 }
8352
8353
8354 /***********************************************************************
8355 * _objc_registerTaggedPointerClass
8356 * Set the class to use for the given tagged pointer index.
8357 * Aborts if the tag is out of range, or if the tag is already
8358 * used by some other class.
8359 **********************************************************************/
8360 void
8361 _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
8362 {
8363 if (objc_debug_taggedpointer_mask == 0) {
8364 _objc_fatal("tagged pointers are disabled");
8365 }
8366
8367 Class *slot = classSlotForTagIndex(tag);
8368 if (!slot) {
8369 _objc_fatal("tag index %u is invalid", (unsigned int)tag);
8370 }
8371
8372 Class oldCls = *slot;
8373
8374 if (cls && oldCls && cls != oldCls) {
8375 _objc_fatal("tag index %u used for two different classes "
8376 "(was %p %s, now %p %s)", tag,
8377 oldCls, oldCls->nameForLogging(),
8378 cls, cls->nameForLogging());
8379 }
8380
8381 *slot = cls;
8382
8383 // Store a placeholder class in the basic tag slot that is
8384 // reserved for the extended tag space, if it isn't set already.
8385 // Do this lazily when the first extended tag is registered so
8386 // that old debuggers characterize bogus pointers correctly more often.
8387 if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
8388 Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
8389 if (*extSlot == nil) {
8390 extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
8391 *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
8392 }
8393 }
8394 }
8395
8396
8397 /***********************************************************************
8398 * _objc_getClassForTag
8399 * Returns the class that is using the given tagged pointer tag.
8400 * Returns nil if no class is using that tag or the tag is out of range.
8401 **********************************************************************/
8402 Class
8403 _objc_getClassForTag(objc_tag_index_t tag)
8404 {
8405 Class *slot = classSlotForTagIndex(tag);
8406 if (slot) return *slot;
8407 else return nil;
8408 }
8409
8410 #endif
8411
8412
8413 #if SUPPORT_FIXUP
8414
8415 OBJC_EXTERN void objc_msgSend_fixup(void);
8416 OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
8417 OBJC_EXTERN void objc_msgSend_stret_fixup(void);
8418 OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
8419 #if defined(__i386__) || defined(__x86_64__)
8420 OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
8421 #endif
8422 #if defined(__x86_64__)
8423 OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
8424 #endif
8425
8426 OBJC_EXTERN void objc_msgSend_fixedup(void);
8427 OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
8428 OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
8429 OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
8430 #if defined(__i386__) || defined(__x86_64__)
8431 OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
8432 #endif
8433 #if defined(__x86_64__)
8434 OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
8435 #endif
8436
8437 /***********************************************************************
8438 * fixupMessageRef
8439 * Repairs an old vtable dispatch call site.
8440 * vtable dispatch itself is not supported.
8441 **********************************************************************/
8442 static void
8443 fixupMessageRef(message_ref_t *msg)
8444 {
8445 msg->sel = sel_registerName((const char *)msg->sel);
8446
8447 if (msg->imp == &objc_msgSend_fixup) {
8448 if (msg->sel == @selector(alloc)) {
8449 msg->imp = (IMP)&objc_alloc;
8450 } else if (msg->sel == @selector(allocWithZone:)) {
8451 msg->imp = (IMP)&objc_allocWithZone;
8452 } else if (msg->sel == @selector(retain)) {
8453 msg->imp = (IMP)&objc_retain;
8454 } else if (msg->sel == @selector(release)) {
8455 msg->imp = (IMP)&objc_release;
8456 } else if (msg->sel == @selector(autorelease)) {
8457 msg->imp = (IMP)&objc_autorelease;
8458 } else {
8459 msg->imp = &objc_msgSend_fixedup;
8460 }
8461 }
8462 else if (msg->imp == &objc_msgSendSuper2_fixup) {
8463 msg->imp = &objc_msgSendSuper2_fixedup;
8464 }
8465 else if (msg->imp == &objc_msgSend_stret_fixup) {
8466 msg->imp = &objc_msgSend_stret_fixedup;
8467 }
8468 else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
8469 msg->imp = &objc_msgSendSuper2_stret_fixedup;
8470 }
8471 #if defined(__i386__) || defined(__x86_64__)
8472 else if (msg->imp == &objc_msgSend_fpret_fixup) {
8473 msg->imp = &objc_msgSend_fpret_fixedup;
8474 }
8475 #endif
8476 #if defined(__x86_64__)
8477 else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
8478 msg->imp = &objc_msgSend_fp2ret_fixedup;
8479 }
8480 #endif
8481 }
8482
8483 // SUPPORT_FIXUP
8484 #endif
8485
8486
8487 // ProKit SPI
8488 static Class setSuperclass(Class cls, Class newSuper)
8489 {
8490 Class oldSuper;
8491
8492 runtimeLock.assertLocked();
8493
8494 ASSERT(cls->isRealized());
8495 ASSERT(newSuper->isRealized());
8496
8497 oldSuper = cls->getSuperclass();
8498 removeSubclass(oldSuper, cls);
8499 removeSubclass(oldSuper->ISA(), cls->ISA());
8500
8501 cls->setSuperclass(newSuper);
8502 cls->ISA()->setSuperclass(newSuper->ISA(/*authenticated*/true));
8503 addSubclass(newSuper, cls);
8504 addSubclass(newSuper->ISA(), cls->ISA());
8505
8506 // Flush subclass's method caches.
8507 flushCaches(cls, __func__, [](Class c){ return true; });
8508 flushCaches(cls->ISA(), __func__, [](Class c){ return true; });
8509
8510 return oldSuper;
8511 }
8512
8513
8514 Class class_setSuperclass(Class cls, Class newSuper)
8515 {
8516 mutex_locker_t lock(runtimeLock);
8517 return setSuperclass(cls, newSuper);
8518 }
8519
8520 void runtime_init(void)
8521 {
8522 objc::unattachedCategories.init(32);
8523 objc::allocatedClasses.init();
8524 }
8525
8526 // __OBJC2__
8527 #endif