]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.mm
objc4-781.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.mm
1 /*
2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "DenseMapExtras.h"
32 #include "objc-private.h"
33 #include "objc-runtime-new.h"
34 #include "objc-file.h"
35 #include "objc-cache.h"
36 #include "objc-zalloc.h"
37 #include <Block.h>
38 #include <objc/message.h>
39 #include <mach/shared_region.h>
40
41 #define newprotocol(p) ((protocol_t *)p)
42
43 static void disableTaggedPointers();
44 static void detach_class(Class cls, bool isMeta);
45 static void free_class(Class cls);
46 static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
47 static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
48 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
49 static bool method_lists_contains_any(method_list_t * const *mlists, method_list_t * const *end,
50 SEL sels[], size_t selcount);
51 static void flushCaches(Class cls);
52 static void initializeTaggedPointerObfuscator(void);
53 #if SUPPORT_FIXUP
54 static void fixupMessageRef(message_ref_t *msg);
55 #endif
56 static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
57 static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
58
59 struct locstamped_category_t {
60 category_t *cat;
61 struct header_info *hi;
62 };
63 enum {
64 ATTACH_CLASS = 1 << 0,
65 ATTACH_METACLASS = 1 << 1,
66 ATTACH_CLASS_AND_METACLASS = 1 << 2,
67 ATTACH_EXISTING = 1 << 3,
68 };
69 static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
70
71
72 /***********************************************************************
73 * Lock management
74 **********************************************************************/
75 mutex_t runtimeLock;
76 mutex_t selLock;
77 #if CONFIG_USE_CACHE_LOCK
78 mutex_t cacheUpdateLock;
79 #endif
80 recursive_mutex_t loadMethodLock;
81
82 /***********************************************************************
83 * Class structure decoding
84 **********************************************************************/
85
86 const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
87
88
89 /***********************************************************************
90 * Non-pointer isa decoding
91 **********************************************************************/
92 #if SUPPORT_INDEXED_ISA
93
94 // Indexed non-pointer isa.
95
96 // These are used to mask the ISA and see if its got an index or not.
97 const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
98 const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
99
100 // die if masks overlap
101 STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
102
103 // die if magic is wrong
104 STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
105
106 // Then these are used to extract the index from the ISA.
107 const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
108 const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
109
110 asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
111 "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
112 asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
113 "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
114 asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
115 "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
116 asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
117 "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
118
119
120 // And then we can use that index to get the class from this array. Note
121 // the size is provided so that clients can ensure the index they get is in
122 // bounds and not read off the end of the array.
123 // Defined in the objc-msg-*.s files
124 // const Class objc_indexed_classes[]
125
126 // When we don't have enough bits to store a class*, we can instead store an
127 // index in to this array. Classes are added here when they are realized.
128 // Note, an index of 0 is illegal.
129 uintptr_t objc_indexed_classes_count = 0;
130
131 // SUPPORT_INDEXED_ISA
132 #else
133 // not SUPPORT_INDEXED_ISA
134
135 // These variables exist but are all set to 0 so that they are ignored.
136 const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
137 const uintptr_t objc_debug_indexed_isa_magic_value = 0;
138 const uintptr_t objc_debug_indexed_isa_index_mask = 0;
139 const uintptr_t objc_debug_indexed_isa_index_shift = 0;
140 Class objc_indexed_classes[1] = { nil };
141 uintptr_t objc_indexed_classes_count = 0;
142
143 // not SUPPORT_INDEXED_ISA
144 #endif
145
146
147 #if SUPPORT_PACKED_ISA
148
149 // Packed non-pointer isa.
150
151 asm("\n .globl _objc_absolute_packed_isa_class_mask" \
152 "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
153
154 const uintptr_t objc_debug_isa_class_mask = ISA_MASK;
155 const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
156 const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
157
158 // die if masks overlap
159 STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
160
161 // die if magic is wrong
162 STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
163
164 // die if virtual address space bound goes up
165 STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
166 ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
167
168 // SUPPORT_PACKED_ISA
169 #else
170 // not SUPPORT_PACKED_ISA
171
172 // These variables exist but enforce pointer alignment only.
173 const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
174 const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
175 const uintptr_t objc_debug_isa_magic_value = 0;
176
177 // not SUPPORT_PACKED_ISA
178 #endif
179
180
181 /***********************************************************************
182 * Swift marker bits
183 **********************************************************************/
184 const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
185
186
187 /***********************************************************************
188 * allocatedClasses
189 * A table of all classes (and metaclasses) which have been allocated
190 * with objc_allocateClassPair.
191 **********************************************************************/
192 namespace objc {
193 static ExplicitInitDenseSet<Class> allocatedClasses;
194 }
195
196 /***********************************************************************
197 * _firstRealizedClass
198 * The root of all realized classes
199 **********************************************************************/
200 static Class _firstRealizedClass = nil;
201
202 /***********************************************************************
203 * didInitialAttachCategories
204 * Whether the initial attachment of categories present at startup has
205 * been done.
206 **********************************************************************/
207 static bool didInitialAttachCategories = false;
208
209 /***********************************************************************
210 * didCallDyldNotifyRegister
211 * Whether the call to _dyld_objc_notify_register has completed.
212 **********************************************************************/
213 bool didCallDyldNotifyRegister = false;
214
215 /*
216 Low two bits of mlist->entsize is used as the fixed-up marker.
217 PREOPTIMIZED VERSION:
218 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
219 (Protocol method lists are not sorted because of their extra parallel data)
220 Runtime fixed-up method lists get 3.
221 UN-PREOPTIMIZED VERSION:
222 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted)
223 Shared cache's sorting and uniquing are not trusted, but do affect the
224 location of the selector name string.
225 Runtime fixed-up method lists get 2.
226
227 High two bits of protocol->flags is used as the fixed-up marker.
228 PREOPTIMIZED VERSION:
229 Protocols from shared cache are 1<<30.
230 Runtime fixed-up protocols get 1<<30.
231 UN-PREOPTIMIZED VERSION:
232 Protocols from shared cache are 1<<30.
233 Shared cache's fixups are not trusted.
234 Runtime fixed-up protocols get 3<<30.
235 */
236
237 static uint32_t fixed_up_method_list = 3;
238 static uint32_t uniqued_method_list = 1;
239 static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
240 static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
241
242 void
243 disableSharedCacheOptimizations(void)
244 {
245 fixed_up_method_list = 2;
246 // It is safe to set uniqued method lists to 0 as we'll never call it unless
247 // the method list was already in need of being fixed up
248 uniqued_method_list = 0;
249 fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
250 // Its safe to just set canonical protocol to 0 as we'll never call
251 // clearIsCanonical() unless isCanonical() returned true, which can't happen
252 // with a 0 mask
253 canonical_protocol = 0;
254 }
255
256 bool method_list_t::isUniqued() const {
257 return (flags() & uniqued_method_list) != 0;
258 }
259
260 bool method_list_t::isFixedUp() const {
261 return flags() == fixed_up_method_list;
262 }
263
264 void method_list_t::setFixedUp() {
265 runtimeLock.assertLocked();
266 ASSERT(!isFixedUp());
267 entsizeAndFlags = entsize() | fixed_up_method_list;
268 }
269
270 bool protocol_t::isFixedUp() const {
271 return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
272 }
273
274 void protocol_t::setFixedUp() {
275 runtimeLock.assertLocked();
276 ASSERT(!isFixedUp());
277 flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
278 }
279
280 bool protocol_t::isCanonical() const {
281 return (flags & canonical_protocol) != 0;
282 }
283
284 void protocol_t::clearIsCanonical() {
285 runtimeLock.assertLocked();
286 ASSERT(isCanonical());
287 flags = flags & ~canonical_protocol;
288 }
289
290
291 method_list_t * const *method_array_t::endCategoryMethodLists(Class cls) const
292 {
293 auto mlists = beginLists();
294 auto mlistsEnd = endLists();
295
296 if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods())
297 {
298 // No methods, or no base methods.
299 // Everything here is a category method.
300 return mlistsEnd;
301 }
302
303 // Have base methods. Category methods are
304 // everything except the last method list.
305 return mlistsEnd - 1;
306 }
307
308 static const char *sel_cname(SEL sel)
309 {
310 return (const char *)(void *)sel;
311 }
312
313
314 static size_t protocol_list_size(const protocol_list_t *plist)
315 {
316 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
317 }
318
319
320 static void try_free(const void *p)
321 {
322 if (p && malloc_size(p)) free((void *)p);
323 }
324
325
326 using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
327 Class _Nonnull newClass);
328 // Normally there's only one handler registered.
329 static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
330
331 void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
332 (Class _Nonnull oldClass, Class _Nonnull newClass)) {
333 mutex_locker_t lock(runtimeLock);
334
335 classCopyFixupHandlers.append(newFixupHandler);
336 }
337
338 static Class
339 alloc_class_for_subclass(Class supercls, size_t extraBytes)
340 {
341 if (!supercls || !supercls->isAnySwift()) {
342 return _calloc_class(sizeof(objc_class) + extraBytes);
343 }
344
345 // Superclass is a Swift class. New subclass must duplicate its extra bits.
346
347 // Allocate the new class, with space for super's prefix and suffix
348 // and self's extraBytes.
349 swift_class_t *swiftSupercls = (swift_class_t *)supercls;
350 size_t superSize = swiftSupercls->classSize;
351 void *superBits = swiftSupercls->baseAddress();
352 void *bits = malloc(superSize + extraBytes);
353
354 // Copy all of the superclass's data to the new class.
355 memcpy(bits, superBits, superSize);
356
357 // Erase the objc data and the Swift description in the new class.
358 swift_class_t *swcls = (swift_class_t *)
359 ((uint8_t *)bits + swiftSupercls->classAddressOffset);
360 bzero(swcls, sizeof(objc_class));
361 swcls->description = nil;
362
363 for (auto handler : classCopyFixupHandlers) {
364 handler(supercls, (Class)swcls);
365 }
366
367 // Mark this class as Swift-enhanced.
368 if (supercls->isSwiftStable()) {
369 swcls->bits.setIsSwiftStable();
370 }
371 if (supercls->isSwiftLegacy()) {
372 swcls->bits.setIsSwiftLegacy();
373 }
374
375 return (Class)swcls;
376 }
377
378
379 /***********************************************************************
380 * object_getIndexedIvars.
381 **********************************************************************/
382 void *object_getIndexedIvars(id obj)
383 {
384 uint8_t *base = (uint8_t *)obj;
385
386 if (!obj) return nil;
387 if (obj->isTaggedPointer()) return nil;
388
389 if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
390
391 Class cls = (Class)obj;
392 if (!cls->isAnySwift()) return base + sizeof(objc_class);
393
394 swift_class_t *swcls = (swift_class_t *)cls;
395 return base - swcls->classAddressOffset + word_align(swcls->classSize);
396 }
397
398
399 /***********************************************************************
400 * make_ro_writeable
401 * Reallocates rw->ro if necessary to make it writeable.
402 * Locking: runtimeLock must be held by the caller.
403 **********************************************************************/
404 static class_ro_t *make_ro_writeable(class_rw_t *rw)
405 {
406 runtimeLock.assertLocked();
407
408 if (rw->flags & RW_COPIED_RO) {
409 // already writeable, do nothing
410 } else {
411 rw->set_ro(rw->ro()->duplicate());
412 rw->flags |= RW_COPIED_RO;
413 }
414 return const_cast<class_ro_t *>(rw->ro());
415 }
416
417
418 /***********************************************************************
419 * dataSegmentsContain
420 * Returns true if the given address lies within a data segment in any
421 * loaded image.
422 **********************************************************************/
423 NEVER_INLINE
424 static bool
425 dataSegmentsContain(Class cls)
426 {
427 uint32_t index;
428 if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
429 // if the class is realized (hence has a class_rw_t),
430 // memorize where we found the range
431 if (cls->isRealized()) {
432 cls->data()->witness = (uint16_t)index;
433 }
434 return true;
435 }
436 return false;
437 }
438
439
440 /***********************************************************************
441 * isKnownClass
442 * Return true if the class is known to the runtime (located within the
443 * shared cache, within the data segment of a loaded image, or has been
444 * allocated with obj_allocateClassPair).
445 *
446 * The result of this operation is cached on the class in a "witness"
447 * value that is cheaply checked in the fastpath.
448 **********************************************************************/
449 ALWAYS_INLINE
450 static bool
451 isKnownClass(Class cls)
452 {
453 if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
454 return true;
455 }
456 auto &set = objc::allocatedClasses.get();
457 return set.find(cls) != set.end() || dataSegmentsContain(cls);
458 }
459
460
461 /***********************************************************************
462 * addClassTableEntry
463 * Add a class to the table of all classes. If addMeta is true,
464 * automatically adds the metaclass of the class as well.
465 * Locking: runtimeLock must be held by the caller.
466 **********************************************************************/
467 static void
468 addClassTableEntry(Class cls, bool addMeta = true)
469 {
470 runtimeLock.assertLocked();
471
472 // This class is allowed to be a known class via the shared cache or via
473 // data segments, but it is not allowed to be in the dynamic table already.
474 auto &set = objc::allocatedClasses.get();
475
476 ASSERT(set.find(cls) == set.end());
477
478 if (!isKnownClass(cls))
479 set.insert(cls);
480 if (addMeta)
481 addClassTableEntry(cls->ISA(), false);
482 }
483
484
485 /***********************************************************************
486 * checkIsKnownClass
487 * Checks the given class against the list of all known classes. Dies
488 * with a fatal error if the class is not known.
489 * Locking: runtimeLock must be held by the caller.
490 **********************************************************************/
491 ALWAYS_INLINE
492 static void
493 checkIsKnownClass(Class cls)
494 {
495 if (slowpath(!isKnownClass(cls))) {
496 _objc_fatal("Attempt to use unknown class %p.", cls);
497 }
498 }
499
500 /***********************************************************************
501 * classNSObject
502 * Returns class NSObject.
503 * Locking: none
504 **********************************************************************/
505 static Class classNSObject(void)
506 {
507 extern objc_class OBJC_CLASS_$_NSObject;
508 return (Class)&OBJC_CLASS_$_NSObject;
509 }
510
511 static Class metaclassNSObject(void)
512 {
513 extern objc_class OBJC_METACLASS_$_NSObject;
514 return (Class)&OBJC_METACLASS_$_NSObject;
515 }
516
517 /***********************************************************************
518 * printReplacements
519 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
520 * Warn about methods from cats that override other methods in cats or cls.
521 * Assumes no methods from cats have been added to cls yet.
522 **********************************************************************/
523 __attribute__((cold, noinline))
524 static void
525 printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
526 {
527 uint32_t c;
528 bool isMeta = cls->isMetaClass();
529
530 // Newest categories are LAST in cats
531 // Later categories override earlier ones.
532 for (c = 0; c < cats_count; c++) {
533 category_t *cat = cats_list[c].cat;
534
535 method_list_t *mlist = cat->methodsForMeta(isMeta);
536 if (!mlist) continue;
537
538 for (const auto& meth : *mlist) {
539 SEL s = sel_registerName(sel_cname(meth.name));
540
541 // Search for replaced methods in method lookup order.
542 // Complain about the first duplicate only.
543
544 // Look for method in earlier categories
545 for (uint32_t c2 = 0; c2 < c; c2++) {
546 category_t *cat2 = cats_list[c2].cat;
547
548 const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
549 if (!mlist2) continue;
550
551 for (const auto& meth2 : *mlist2) {
552 SEL s2 = sel_registerName(sel_cname(meth2.name));
553 if (s == s2) {
554 logReplacedMethod(cls->nameForLogging(), s,
555 cls->isMetaClass(), cat->name,
556 meth2.imp, meth.imp);
557 goto complained;
558 }
559 }
560 }
561
562 // Look for method in cls
563 for (const auto& meth2 : cls->data()->methods()) {
564 SEL s2 = sel_registerName(sel_cname(meth2.name));
565 if (s == s2) {
566 logReplacedMethod(cls->nameForLogging(), s,
567 cls->isMetaClass(), cat->name,
568 meth2.imp, meth.imp);
569 goto complained;
570 }
571 }
572
573 complained:
574 ;
575 }
576 }
577 }
578
579
580 /***********************************************************************
581 * unreasonableClassCount
582 * Provides an upper bound for any iteration of classes,
583 * to prevent spins when runtime metadata is corrupted.
584 **********************************************************************/
585 static unsigned unreasonableClassCount()
586 {
587 runtimeLock.assertLocked();
588
589 int base = NXCountMapTable(gdb_objc_realized_classes) +
590 getPreoptimizedClassUnreasonableCount();
591
592 // Provide lots of slack here. Some iterations touch metaclasses too.
593 // Some iterations backtrack (like realized class iteration).
594 // We don't need an efficient bound, merely one that prevents spins.
595 return (base + 1) * 16;
596 }
597
598
599 /***********************************************************************
600 * Class enumerators
601 * The passed in block returns `false` if subclasses can be skipped
602 * Locking: runtimeLock must be held by the caller.
603 **********************************************************************/
604 static inline void
605 foreach_realized_class_and_subclass_2(Class top, unsigned &count,
606 bool skip_metaclass,
607 bool (^code)(Class) __attribute((noescape)))
608 {
609 Class cls = top;
610
611 runtimeLock.assertLocked();
612 ASSERT(top);
613
614 while (1) {
615 if (--count == 0) {
616 _objc_fatal("Memory corruption in class list.");
617 }
618
619 bool skip_subclasses;
620
621 if (skip_metaclass && cls->isMetaClass()) {
622 skip_subclasses = true;
623 } else {
624 skip_subclasses = !code(cls);
625 }
626
627 if (!skip_subclasses && cls->data()->firstSubclass) {
628 cls = cls->data()->firstSubclass;
629 } else {
630 while (!cls->data()->nextSiblingClass && cls != top) {
631 cls = cls->superclass;
632 if (--count == 0) {
633 _objc_fatal("Memory corruption in class list.");
634 }
635 }
636 if (cls == top) break;
637 cls = cls->data()->nextSiblingClass;
638 }
639 }
640 }
641
642 // Enumerates a class and all of its realized subclasses.
643 static void
644 foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
645 {
646 unsigned int count = unreasonableClassCount();
647
648 foreach_realized_class_and_subclass_2(top, count, false, code);
649 }
650
651 // Enumerates all realized classes and metaclasses.
652 static void
653 foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
654 {
655 unsigned int count = unreasonableClassCount();
656
657 for (Class top = _firstRealizedClass;
658 top != nil;
659 top = top->data()->nextSiblingClass)
660 {
661 foreach_realized_class_and_subclass_2(top, count, false, code);
662 }
663 }
664
665 // Enumerates all realized classes (ignoring metaclasses).
666 static void
667 foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
668 {
669 unsigned int count = unreasonableClassCount();
670
671 for (Class top = _firstRealizedClass;
672 top != nil;
673 top = top->data()->nextSiblingClass)
674 {
675 foreach_realized_class_and_subclass_2(top, count, true, code);
676 }
677 }
678
679
680 /***********************************************************************
681 * Method Scanners / Optimization tracking
682 * Implementation of scanning for various implementations of methods.
683 **********************************************************************/
684
685 namespace objc {
686
687 enum SelectorBundle {
688 AWZ,
689 RR,
690 Core,
691 };
692
693 namespace scanner {
694
695 // The current state of NSObject swizzling for every scanner
696 //
697 // It allows for cheap checks of global swizzles, and also lets
698 // things like IMP Swizzling before NSObject has been initialized
699 // to be remembered, as setInitialized() would miss these.
700 //
701 // Every pair of bits describes a SelectorBundle.
702 // even bits: is NSObject class swizzled for this bundle
703 // odd bits: is NSObject meta class swizzled for this bundle
704 static uintptr_t NSObjectSwizzledMask;
705
706 static ALWAYS_INLINE uintptr_t
707 swizzlingBit(SelectorBundle bundle, bool isMeta)
708 {
709 return 1UL << (2 * bundle + isMeta);
710 }
711
712 static void __attribute__((cold, noinline))
713 printCustom(Class cls, SelectorBundle bundle, bool inherited)
714 {
715 static char const * const SelectorBundleName[] = {
716 [AWZ] = "CUSTOM AWZ",
717 [RR] = "CUSTOM RR",
718 [Core] = "CUSTOM Core",
719 };
720
721 _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
722 cls->nameForLogging(),
723 cls->isMetaClass() ? " (meta)" : "",
724 inherited ? " (inherited)" : "");
725 }
726
727 enum class Scope { Instances, Classes, Both };
728
729 template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
730 class Mixin {
731
732 // work around compiler being broken with templates using Class/objc_class,
733 // probably some weird confusion with Class being builtin
734 ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
735 return (objc_class *)cls;
736 }
737
738 static void
739 setCustomRecursively(Class cls, bool inherited = false)
740 {
741 foreach_realized_class_and_subclass(cls, [=](Class c){
742 if (c != cls && !as_objc_class(c)->isInitialized()) {
743 // Subclass not yet initialized. Wait for setInitialized() to do it
744 return false;
745 }
746 if (Traits::isCustom(c)) {
747 return false;
748 }
749 Traits::setCustom(c);
750 if (ShouldPrint) {
751 printCustom(cls, Bundle, inherited || c != cls);
752 }
753 return true;
754 });
755 }
756
757 static bool
758 isNSObjectSwizzled(bool isMeta)
759 {
760 return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
761 }
762
763 static void
764 setNSObjectSwizzled(Class NSOClass, bool isMeta)
765 {
766 NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
767 if (as_objc_class(NSOClass)->isInitialized()) {
768 setCustomRecursively(NSOClass);
769 }
770 }
771
772 static void
773 scanChangedMethodForUnknownClass(const method_t *meth)
774 {
775 Class cls;
776
777 cls = classNSObject();
778 if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
779 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
780 if (meth == &meth2) {
781 setNSObjectSwizzled(cls, NO);
782 break;
783 }
784 }
785 }
786
787 cls = metaclassNSObject();
788 if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
789 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
790 if (meth == &meth2) {
791 setNSObjectSwizzled(cls, YES);
792 break;
793 }
794 }
795 }
796 }
797
798 static void
799 scanAddedClassImpl(Class cls, bool isMeta)
800 {
801 Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
802 bool setCustom = NO, inherited = NO;
803
804 if (isNSObjectSwizzled(isMeta)) {
805 setCustom = YES;
806 } else if (cls == NSOClass) {
807 // NSObject is default but we need to check categories
808 auto &methods = as_objc_class(cls)->data()->methods();
809 setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
810 methods.endCategoryMethodLists(cls));
811 } else if (!isMeta && !as_objc_class(cls)->superclass) {
812 // Custom Root class
813 setCustom = YES;
814 } else if (Traits::isCustom(as_objc_class(cls)->superclass)) {
815 // Superclass is custom, therefore we are too.
816 setCustom = YES;
817 inherited = YES;
818 } else {
819 // Not NSObject.
820 auto &methods = as_objc_class(cls)->data()->methods();
821 setCustom = Traits::scanMethodLists(methods.beginLists(),
822 methods.endLists());
823 }
824 if (slowpath(setCustom)) {
825 if (ShouldPrint) printCustom(cls, Bundle, inherited);
826 } else {
827 Traits::setDefault(cls);
828 }
829 }
830
831 public:
832 // Scan a class that is about to be marked Initialized for particular
833 // bundles of selectors, and mark the class and its children
834 // accordingly.
835 //
836 // This also handles inheriting properties from its superclass.
837 //
838 // Caller: objc_class::setInitialized()
839 static void
840 scanInitializedClass(Class cls, Class metacls)
841 {
842 if (Domain != Scope::Classes) {
843 scanAddedClassImpl(cls, false);
844 }
845 if (Domain != Scope::Instances) {
846 scanAddedClassImpl(metacls, true);
847 }
848 }
849
850 // Inherit various properties from the superclass when a class
851 // is being added to the graph.
852 //
853 // Caller: addSubclass()
854 static void
855 scanAddedSubClass(Class subcls, Class supercls)
856 {
857 if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
858 setCustomRecursively(subcls, true);
859 }
860 }
861
862 // Scan Method lists for selectors that would override things
863 // in a Bundle.
864 //
865 // This is used to detect when categories override problematic selectors
866 // are injected in a class after it has been initialized.
867 //
868 // Caller: prepareMethodLists()
869 static void
870 scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
871 {
872 if (slowpath(Traits::isCustom(cls))) {
873 return;
874 }
875 if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
876 setCustomRecursively(cls);
877 }
878 }
879
880 // Handle IMP Swizzling (the IMP for an exisiting method being changed).
881 //
882 // In almost all cases, IMP swizzling does not affect custom bits.
883 // Custom search will already find the method whether or not
884 // it is swizzled, so it does not transition from non-custom to custom.
885 //
886 // The only cases where IMP swizzling can affect the custom bits is
887 // if the swizzled method is one of the methods that is assumed to be
888 // non-custom. These special cases are listed in setInitialized().
889 // We look for such cases here.
890 //
891 // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
892 static void
893 scanChangedMethod(Class cls, const method_t *meth)
894 {
895 if (fastpath(!Traits::isInterestingSelector(meth->name))) {
896 return;
897 }
898
899 if (cls) {
900 bool isMeta = as_objc_class(cls)->isMetaClass();
901 if (isMeta && Domain != Scope::Instances) {
902 if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
903 setNSObjectSwizzled(cls, isMeta);
904 }
905 }
906 if (!isMeta && Domain != Scope::Classes) {
907 if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
908 setNSObjectSwizzled(cls, isMeta);
909 }
910 }
911 } else {
912 // We're called from method_exchangeImplementations, only NSObject
913 // class and metaclass may be problematic (exchanging the default
914 // builtin IMP of an interesting seleector, is a swizzling that,
915 // may flip our scanned property. For other classes, the previous
916 // value had already flipped the property).
917 //
918 // However, as we don't know the class, we need to scan all of
919 // NSObject class and metaclass methods (this is SLOW).
920 scanChangedMethodForUnknownClass(meth);
921 }
922 }
923 };
924
925 } // namespace scanner
926
927 // AWZ methods: +alloc / +allocWithZone:
928 struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
929 static bool isCustom(Class cls) {
930 return cls->hasCustomAWZ();
931 }
932 static void setCustom(Class cls) {
933 cls->setHasCustomAWZ();
934 }
935 static void setDefault(Class cls) {
936 cls->setHasDefaultAWZ();
937 }
938 static bool isInterestingSelector(SEL sel) {
939 return sel == @selector(alloc) || sel == @selector(allocWithZone:);
940 }
941 static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
942 SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
943 return method_lists_contains_any(mlists, end, sels, 2);
944 }
945 };
946
947 // Retain/Release methods that are extremely rarely overridden
948 //
949 // retain/release/autorelease/retainCount/
950 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
951 struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
952 #if !SUPPORT_NONPOINTER_ISA
953 , scanner::Scope::Instances
954 #endif
955 > {
956 static bool isCustom(Class cls) {
957 return cls->hasCustomRR();
958 }
959 static void setCustom(Class cls) {
960 cls->setHasCustomRR();
961 }
962 static void setDefault(Class cls) {
963 cls->setHasDefaultRR();
964 }
965 static bool isInterestingSelector(SEL sel) {
966 return sel == @selector(retain) ||
967 sel == @selector(release) ||
968 sel == @selector(autorelease) ||
969 sel == @selector(_tryRetain) ||
970 sel == @selector(_isDeallocating) ||
971 sel == @selector(retainCount) ||
972 sel == @selector(allowsWeakReference) ||
973 sel == @selector(retainWeakReference);
974 }
975 static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
976 SEL sels[8] = {
977 @selector(retain),
978 @selector(release),
979 @selector(autorelease),
980 @selector(_tryRetain),
981 @selector(_isDeallocating),
982 @selector(retainCount),
983 @selector(allowsWeakReference),
984 @selector(retainWeakReference),
985 };
986 return method_lists_contains_any(mlists, end, sels, 8);
987 }
988 };
989
990 // Core NSObject methods that are extremely rarely overridden
991 //
992 // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
993 struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
994 static bool isCustom(Class cls) {
995 return cls->hasCustomCore();
996 }
997 static void setCustom(Class cls) {
998 cls->setHasCustomCore();
999 }
1000 static void setDefault(Class cls) {
1001 cls->setHasDefaultCore();
1002 }
1003 static bool isInterestingSelector(SEL sel) {
1004 return sel == @selector(new) ||
1005 sel == @selector(self) ||
1006 sel == @selector(class) ||
1007 sel == @selector(isKindOfClass:) ||
1008 sel == @selector(respondsToSelector:);
1009 }
1010 static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
1011 SEL sels[5] = {
1012 @selector(new),
1013 @selector(self),
1014 @selector(class),
1015 @selector(isKindOfClass:),
1016 @selector(respondsToSelector:)
1017 };
1018 return method_lists_contains_any(mlists, end, sels, 5);
1019 }
1020 };
1021
1022 class category_list : nocopy_t {
1023 union {
1024 locstamped_category_t lc;
1025 struct {
1026 locstamped_category_t *array;
1027 // this aliases with locstamped_category_t::hi
1028 // which is an aliased pointer
1029 uint32_t is_array : 1;
1030 uint32_t count : 31;
1031 uint32_t size : 32;
1032 };
1033 } _u;
1034
1035 public:
1036 category_list() : _u{{nullptr, nullptr}} { }
1037 category_list(locstamped_category_t lc) : _u{{lc}} { }
1038 category_list(category_list &&other) : category_list() {
1039 std::swap(_u, other._u);
1040 }
1041 ~category_list()
1042 {
1043 if (_u.is_array) {
1044 free(_u.array);
1045 }
1046 }
1047
1048 uint32_t count() const
1049 {
1050 if (_u.is_array) return _u.count;
1051 return _u.lc.cat ? 1 : 0;
1052 }
1053
1054 uint32_t arrayByteSize(uint32_t size) const
1055 {
1056 return sizeof(locstamped_category_t) * size;
1057 }
1058
1059 const locstamped_category_t *array() const
1060 {
1061 return _u.is_array ? _u.array : &_u.lc;
1062 }
1063
1064 void append(locstamped_category_t lc)
1065 {
1066 if (_u.is_array) {
1067 if (_u.count == _u.size) {
1068 // Have a typical malloc growth:
1069 // - size <= 8: grow by 2
1070 // - size <= 16: grow by 4
1071 // - size <= 32: grow by 8
1072 // ... etc
1073 _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
1074 _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
1075 }
1076 _u.array[_u.count++] = lc;
1077 } else if (_u.lc.cat == NULL) {
1078 _u.lc = lc;
1079 } else {
1080 locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
1081 arr[0] = _u.lc;
1082 arr[1] = lc;
1083
1084 _u.array = arr;
1085 _u.is_array = true;
1086 _u.count = 2;
1087 _u.size = 2;
1088 }
1089 }
1090
1091 void erase(category_t *cat)
1092 {
1093 if (_u.is_array) {
1094 for (int i = 0; i < _u.count; i++) {
1095 if (_u.array[i].cat == cat) {
1096 // shift entries to preserve list order
1097 memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
1098 return;
1099 }
1100 }
1101 } else if (_u.lc.cat == cat) {
1102 _u.lc.cat = NULL;
1103 _u.lc.hi = NULL;
1104 }
1105 }
1106 };
1107
1108 class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
1109 {
1110 public:
1111 void addForClass(locstamped_category_t lc, Class cls)
1112 {
1113 runtimeLock.assertLocked();
1114
1115 if (slowpath(PrintConnecting)) {
1116 _objc_inform("CLASS: found category %c%s(%s)",
1117 cls->isMetaClass() ? '+' : '-',
1118 cls->nameForLogging(), lc.cat->name);
1119 }
1120
1121 auto result = get().try_emplace(cls, lc);
1122 if (!result.second) {
1123 result.first->second.append(lc);
1124 }
1125 }
1126
1127 void attachToClass(Class cls, Class previously, int flags)
1128 {
1129 runtimeLock.assertLocked();
1130 ASSERT((flags & ATTACH_CLASS) ||
1131 (flags & ATTACH_METACLASS) ||
1132 (flags & ATTACH_CLASS_AND_METACLASS));
1133
1134 auto &map = get();
1135 auto it = map.find(previously);
1136
1137 if (it != map.end()) {
1138 category_list &list = it->second;
1139 if (flags & ATTACH_CLASS_AND_METACLASS) {
1140 int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
1141 attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
1142 attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
1143 } else {
1144 attachCategories(cls, list.array(), list.count(), flags);
1145 }
1146 map.erase(it);
1147 }
1148 }
1149
1150 void eraseCategoryForClass(category_t *cat, Class cls)
1151 {
1152 runtimeLock.assertLocked();
1153
1154 auto &map = get();
1155 auto it = map.find(cls);
1156 if (it != map.end()) {
1157 category_list &list = it->second;
1158 list.erase(cat);
1159 if (list.count() == 0) {
1160 map.erase(it);
1161 }
1162 }
1163 }
1164
1165 void eraseClass(Class cls)
1166 {
1167 runtimeLock.assertLocked();
1168
1169 get().erase(cls);
1170 }
1171 };
1172
1173 static UnattachedCategories unattachedCategories;
1174
1175 } // namespace objc
1176
1177 static bool isBundleClass(Class cls)
1178 {
1179 return cls->data()->ro()->flags & RO_FROM_BUNDLE;
1180 }
1181
1182
1183 static void
1184 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1185 {
1186 runtimeLock.assertLocked();
1187 ASSERT(!mlist->isFixedUp());
1188
1189 // fixme lock less in attachMethodLists ?
1190 // dyld3 may have already uniqued, but not sorted, the list
1191 if (!mlist->isUniqued()) {
1192 mutex_locker_t lock(selLock);
1193
1194 // Unique selectors in list.
1195 for (auto& meth : *mlist) {
1196 const char *name = sel_cname(meth.name);
1197 meth.name = sel_registerNameNoLock(name, bundleCopy);
1198 }
1199 }
1200
1201 // Sort by selector address.
1202 if (sort) {
1203 method_t::SortBySELAddress sorter;
1204 std::stable_sort(mlist->begin(), mlist->end(), sorter);
1205 }
1206
1207 // Mark method list as uniqued and sorted
1208 mlist->setFixedUp();
1209 }
1210
1211
1212 static void
1213 prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
1214 bool baseMethods, bool methodsFromBundle)
1215 {
1216 runtimeLock.assertLocked();
1217
1218 if (addedCount == 0) return;
1219
1220 // There exist RR/AWZ/Core special cases for some class's base methods.
1221 // But this code should never need to scan base methods for RR/AWZ/Core:
1222 // default RR/AWZ/Core cannot be set before setInitialized().
1223 // Therefore we need not handle any special cases here.
1224 if (baseMethods) {
1225 ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
1226 }
1227
1228 // Add method lists to array.
1229 // Reallocate un-fixed method lists.
1230 // The new methods are PREPENDED to the method list array.
1231
1232 for (int i = 0; i < addedCount; i++) {
1233 method_list_t *mlist = addedLists[i];
1234 ASSERT(mlist);
1235
1236 // Fixup selectors if necessary
1237 if (!mlist->isFixedUp()) {
1238 fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1239 }
1240 }
1241
1242 // If the class is initialized, then scan for method implementations
1243 // tracked by the class's flags. If it's not initialized yet,
1244 // then objc_class::setInitialized() will take care of it.
1245 if (cls->isInitialized()) {
1246 objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1247 objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1248 objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1249 }
1250 }
1251
1252 class_rw_ext_t *
1253 class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy)
1254 {
1255 runtimeLock.assertLocked();
1256
1257 auto rwe = objc::zalloc<class_rw_ext_t>();
1258
1259 rwe->version = (ro->flags & RO_META) ? 7 : 0;
1260
1261 method_list_t *list = ro->baseMethods();
1262 if (list) {
1263 if (deepCopy) list = list->duplicate();
1264 rwe->methods.attachLists(&list, 1);
1265 }
1266
1267 // See comments in objc_duplicateClass
1268 // property lists and protocol lists historically
1269 // have not been deep-copied
1270 //
1271 // This is probably wrong and ought to be fixed some day
1272 property_list_t *proplist = ro->baseProperties;
1273 if (proplist) {
1274 rwe->properties.attachLists(&proplist, 1);
1275 }
1276
1277 protocol_list_t *protolist = ro->baseProtocols;
1278 if (protolist) {
1279 rwe->protocols.attachLists(&protolist, 1);
1280 }
1281
1282 set_ro_or_rwe(rwe, ro);
1283 return rwe;
1284 }
1285
1286 // Attach method lists and properties and protocols from categories to a class.
1287 // Assumes the categories in cats are all loaded and sorted by load order,
1288 // oldest categories first.
1289 static void
1290 attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
1291 int flags)
1292 {
1293 if (slowpath(PrintReplacedMethods)) {
1294 printReplacements(cls, cats_list, cats_count);
1295 }
1296 if (slowpath(PrintConnecting)) {
1297 _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
1298 cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
1299 cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
1300 }
1301
1302 /*
1303 * Only a few classes have more than 64 categories during launch.
1304 * This uses a little stack, and avoids malloc.
1305 *
1306 * Categories must be added in the proper order, which is back
1307 * to front. To do that with the chunking, we iterate cats_list
1308 * from front to back, build up the local buffers backwards,
1309 * and call attachLists on the chunks. attachLists prepends the
1310 * lists, so the final result is in the expected order.
1311 */
1312 constexpr uint32_t ATTACH_BUFSIZ = 64;
1313 method_list_t *mlists[ATTACH_BUFSIZ];
1314 property_list_t *proplists[ATTACH_BUFSIZ];
1315 protocol_list_t *protolists[ATTACH_BUFSIZ];
1316
1317 uint32_t mcount = 0;
1318 uint32_t propcount = 0;
1319 uint32_t protocount = 0;
1320 bool fromBundle = NO;
1321 bool isMeta = (flags & ATTACH_METACLASS);
1322 auto rwe = cls->data()->extAllocIfNeeded();
1323
1324 for (uint32_t i = 0; i < cats_count; i++) {
1325 auto& entry = cats_list[i];
1326
1327 method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
1328 if (mlist) {
1329 if (mcount == ATTACH_BUFSIZ) {
1330 prepareMethodLists(cls, mlists, mcount, NO, fromBundle);
1331 rwe->methods.attachLists(mlists, mcount);
1332 mcount = 0;
1333 }
1334 mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
1335 fromBundle |= entry.hi->isBundle();
1336 }
1337
1338 property_list_t *proplist =
1339 entry.cat->propertiesForMeta(isMeta, entry.hi);
1340 if (proplist) {
1341 if (propcount == ATTACH_BUFSIZ) {
1342 rwe->properties.attachLists(proplists, propcount);
1343 propcount = 0;
1344 }
1345 proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
1346 }
1347
1348 protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
1349 if (protolist) {
1350 if (protocount == ATTACH_BUFSIZ) {
1351 rwe->protocols.attachLists(protolists, protocount);
1352 protocount = 0;
1353 }
1354 protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
1355 }
1356 }
1357
1358 if (mcount > 0) {
1359 prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle);
1360 rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
1361 if (flags & ATTACH_EXISTING) flushCaches(cls);
1362 }
1363
1364 rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
1365
1366 rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
1367 }
1368
1369
1370 /***********************************************************************
1371 * methodizeClass
1372 * Fixes up cls's method list, protocol list, and property list.
1373 * Attaches any outstanding categories.
1374 * Locking: runtimeLock must be held by the caller
1375 **********************************************************************/
1376 static void methodizeClass(Class cls, Class previously)
1377 {
1378 runtimeLock.assertLocked();
1379
1380 bool isMeta = cls->isMetaClass();
1381 auto rw = cls->data();
1382 auto ro = rw->ro();
1383 auto rwe = rw->ext();
1384
1385 // Methodizing for the first time
1386 if (PrintConnecting) {
1387 _objc_inform("CLASS: methodizing class '%s' %s",
1388 cls->nameForLogging(), isMeta ? "(meta)" : "");
1389 }
1390
1391 // Install methods and properties that the class implements itself.
1392 method_list_t *list = ro->baseMethods();
1393 if (list) {
1394 prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls));
1395 if (rwe) rwe->methods.attachLists(&list, 1);
1396 }
1397
1398 property_list_t *proplist = ro->baseProperties;
1399 if (rwe && proplist) {
1400 rwe->properties.attachLists(&proplist, 1);
1401 }
1402
1403 protocol_list_t *protolist = ro->baseProtocols;
1404 if (rwe && protolist) {
1405 rwe->protocols.attachLists(&protolist, 1);
1406 }
1407
1408 // Root classes get bonus method implementations if they don't have
1409 // them already. These apply before category replacements.
1410 if (cls->isRootMetaclass()) {
1411 // root metaclass
1412 addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
1413 }
1414
1415 // Attach categories.
1416 if (previously) {
1417 if (isMeta) {
1418 objc::unattachedCategories.attachToClass(cls, previously,
1419 ATTACH_METACLASS);
1420 } else {
1421 // When a class relocates, categories with class methods
1422 // may be registered on the class itself rather than on
1423 // the metaclass. Tell attachToClass to look for those.
1424 objc::unattachedCategories.attachToClass(cls, previously,
1425 ATTACH_CLASS_AND_METACLASS);
1426 }
1427 }
1428 objc::unattachedCategories.attachToClass(cls, cls,
1429 isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
1430
1431 #if DEBUG
1432 // Debug: sanity-check all SELs; log method list contents
1433 for (const auto& meth : rw->methods()) {
1434 if (PrintConnecting) {
1435 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1436 cls->nameForLogging(), sel_getName(meth.name));
1437 }
1438 ASSERT(sel_registerName(sel_getName(meth.name)) == meth.name);
1439 }
1440 #endif
1441 }
1442
1443
1444 /***********************************************************************
1445 * nonMetaClasses
1446 * Returns the secondary metaclass => class map
1447 * Used for some cases of +initialize and +resolveClassMethod:.
1448 * This map does not contain all class and metaclass pairs. It only
1449 * contains metaclasses whose classes would be in the runtime-allocated
1450 * named-class table, but are not because some other class with the same name
1451 * is in that table.
1452 * Classes with no duplicates are not included.
1453 * Classes in the preoptimized named-class table are not included.
1454 * Classes whose duplicates are in the preoptimized table are not included.
1455 * Most code should use getMaybeUnrealizedNonMetaClass()
1456 * instead of reading this table.
1457 * Locking: runtimeLock must be read- or write-locked by the caller
1458 **********************************************************************/
1459 static NXMapTable *nonmeta_class_map = nil;
1460 static NXMapTable *nonMetaClasses(void)
1461 {
1462 runtimeLock.assertLocked();
1463
1464 if (nonmeta_class_map) return nonmeta_class_map;
1465
1466 // nonmeta_class_map is typically small
1467 INIT_ONCE_PTR(nonmeta_class_map,
1468 NXCreateMapTable(NXPtrValueMapPrototype, 32),
1469 NXFreeMapTable(v));
1470
1471 return nonmeta_class_map;
1472 }
1473
1474
1475 /***********************************************************************
1476 * addNonMetaClass
1477 * Adds metacls => cls to the secondary metaclass map
1478 * Locking: runtimeLock must be held by the caller
1479 **********************************************************************/
1480 static void addNonMetaClass(Class cls)
1481 {
1482 runtimeLock.assertLocked();
1483 void *old;
1484 old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
1485
1486 ASSERT(!cls->isMetaClassMaybeUnrealized());
1487 ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
1488 ASSERT(!old);
1489 }
1490
1491
1492 static void removeNonMetaClass(Class cls)
1493 {
1494 runtimeLock.assertLocked();
1495 NXMapRemove(nonMetaClasses(), cls->ISA());
1496 }
1497
1498
1499 static bool scanMangledField(const char *&string, const char *end,
1500 const char *&field, int& length)
1501 {
1502 // Leading zero not allowed.
1503 if (*string == '0') return false;
1504
1505 length = 0;
1506 field = string;
1507 while (field < end) {
1508 char c = *field;
1509 if (!isdigit(c)) break;
1510 field++;
1511 if (__builtin_smul_overflow(length, 10, &length)) return false;
1512 if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
1513 }
1514
1515 string = field + length;
1516 return length > 0 && string <= end;
1517 }
1518
1519
1520 /***********************************************************************
1521 * copySwiftV1DemangledName
1522 * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
1523 * Returns nil if the string doesn't look like a mangled Swift v1 name.
1524 * The result must be freed with free().
1525 **********************************************************************/
1526 static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
1527 {
1528 if (!string) return nil;
1529
1530 // Swift mangling prefix.
1531 if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
1532 string += 4;
1533
1534 const char *end = string + strlen(string);
1535
1536 // Module name.
1537 const char *prefix;
1538 int prefixLength;
1539 if (string[0] == 's') {
1540 // "s" is the Swift module.
1541 prefix = "Swift";
1542 prefixLength = 5;
1543 string += 1;
1544 } else {
1545 if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
1546 }
1547
1548 // Class or protocol name.
1549 const char *suffix;
1550 int suffixLength;
1551 if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
1552
1553 if (isProtocol) {
1554 // Remainder must be "_".
1555 if (strcmp(string, "_") != 0) return nil;
1556 } else {
1557 // Remainder must be empty.
1558 if (string != end) return nil;
1559 }
1560
1561 char *result;
1562 asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
1563 return result;
1564 }
1565
1566
1567 /***********************************************************************
1568 * copySwiftV1MangledName
1569 * Returns the Swift 1.0 mangled form of the given class or protocol name.
1570 * Returns nil if the string doesn't look like an unmangled Swift name.
1571 * The result must be freed with free().
1572 **********************************************************************/
1573 static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
1574 {
1575 if (!string) return nil;
1576
1577 size_t dotCount = 0;
1578 size_t dotIndex;
1579 const char *s;
1580 for (s = string; *s; s++) {
1581 if (*s == '.') {
1582 dotCount++;
1583 dotIndex = s - string;
1584 }
1585 }
1586 size_t stringLength = s - string;
1587
1588 if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
1589 return nil;
1590 }
1591
1592 const char *prefix = string;
1593 size_t prefixLength = dotIndex;
1594 const char *suffix = string + dotIndex + 1;
1595 size_t suffixLength = stringLength - (dotIndex + 1);
1596
1597 char *name;
1598
1599 if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
1600 asprintf(&name, "_Tt%cs%zu%.*s%s",
1601 isProtocol ? 'P' : 'C',
1602 suffixLength, (int)suffixLength, suffix,
1603 isProtocol ? "_" : "");
1604 } else {
1605 asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
1606 isProtocol ? 'P' : 'C',
1607 prefixLength, (int)prefixLength, prefix,
1608 suffixLength, (int)suffixLength, suffix,
1609 isProtocol ? "_" : "");
1610 }
1611 return name;
1612 }
1613
1614
1615 /***********************************************************************
1616 * getClassExceptSomeSwift
1617 * Looks up a class by name. The class MIGHT NOT be realized.
1618 * Demangled Swift names are recognized.
1619 * Classes known to the Swift runtime but not yet used are NOT recognized.
1620 * (such as subclasses of un-instantiated generics)
1621 * Use look_up_class() to find them as well.
1622 * Locking: runtimeLock must be read- or write-locked by the caller.
1623 **********************************************************************/
1624
1625 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1626 // named classes not in the dyld shared cache, whether realized or not.
1627 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1628 uintptr_t objc_debug_realized_class_generation_count;
1629
1630 static Class getClass_impl(const char *name)
1631 {
1632 runtimeLock.assertLocked();
1633
1634 // allocated in _read_images
1635 ASSERT(gdb_objc_realized_classes);
1636
1637 // Try runtime-allocated table
1638 Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
1639 if (result) return result;
1640
1641 // Try table from dyld shared cache.
1642 // Note we do this last to handle the case where we dlopen'ed a shared cache
1643 // dylib with duplicates of classes already present in the main executable.
1644 // In that case, we put the class from the main executable in
1645 // gdb_objc_realized_classes and want to check that before considering any
1646 // newly loaded shared cache binaries.
1647 return getPreoptimizedClass(name);
1648 }
1649
1650 static Class getClassExceptSomeSwift(const char *name)
1651 {
1652 runtimeLock.assertLocked();
1653
1654 // Try name as-is
1655 Class result = getClass_impl(name);
1656 if (result) return result;
1657
1658 // Try Swift-mangled equivalent of the given name.
1659 if (char *swName = copySwiftV1MangledName(name)) {
1660 result = getClass_impl(swName);
1661 free(swName);
1662 return result;
1663 }
1664
1665 return nil;
1666 }
1667
1668
1669 /***********************************************************************
1670 * addNamedClass
1671 * Adds name => cls to the named non-meta class map.
1672 * Warns about duplicate class names and keeps the old mapping.
1673 * Locking: runtimeLock must be held by the caller
1674 **********************************************************************/
1675 static void addNamedClass(Class cls, const char *name, Class replacing = nil)
1676 {
1677 runtimeLock.assertLocked();
1678 Class old;
1679 if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
1680 inform_duplicate(name, old, cls);
1681
1682 // getMaybeUnrealizedNonMetaClass uses name lookups.
1683 // Classes not found by name lookup must be in the
1684 // secondary meta->nonmeta table.
1685 addNonMetaClass(cls);
1686 } else {
1687 NXMapInsert(gdb_objc_realized_classes, name, cls);
1688 }
1689 ASSERT(!(cls->data()->flags & RO_META));
1690
1691 // wrong: constructed classes are already realized when they get here
1692 // ASSERT(!cls->isRealized());
1693 }
1694
1695
1696 /***********************************************************************
1697 * removeNamedClass
1698 * Removes cls from the name => cls map.
1699 * Locking: runtimeLock must be held by the caller
1700 **********************************************************************/
1701 static void removeNamedClass(Class cls, const char *name)
1702 {
1703 runtimeLock.assertLocked();
1704 ASSERT(!(cls->data()->flags & RO_META));
1705 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1706 NXMapRemove(gdb_objc_realized_classes, name);
1707 } else {
1708 // cls has a name collision with another class - don't remove the other
1709 // but do remove cls from the secondary metaclass->class map.
1710 removeNonMetaClass(cls);
1711 }
1712 }
1713
1714
1715 /***********************************************************************
1716 * futureNamedClasses
1717 * Returns the classname => future class map for unrealized future classes.
1718 * Locking: runtimeLock must be held by the caller
1719 **********************************************************************/
1720 static NXMapTable *future_named_class_map = nil;
1721 static NXMapTable *futureNamedClasses()
1722 {
1723 runtimeLock.assertLocked();
1724
1725 if (future_named_class_map) return future_named_class_map;
1726
1727 // future_named_class_map is big enough for CF's classes and a few others
1728 future_named_class_map =
1729 NXCreateMapTable(NXStrValueMapPrototype, 32);
1730
1731 return future_named_class_map;
1732 }
1733
1734
1735 static bool haveFutureNamedClasses() {
1736 return future_named_class_map && NXCountMapTable(future_named_class_map);
1737 }
1738
1739
1740 /***********************************************************************
1741 * addFutureNamedClass
1742 * Installs cls as the class structure to use for the named class if it appears.
1743 * Locking: runtimeLock must be held by the caller
1744 **********************************************************************/
1745 static void addFutureNamedClass(const char *name, Class cls)
1746 {
1747 void *old;
1748
1749 runtimeLock.assertLocked();
1750
1751 if (PrintFuture) {
1752 _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
1753 }
1754
1755 class_rw_t *rw = objc::zalloc<class_rw_t>();
1756 class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
1757 ro->name = strdupIfMutable(name);
1758 rw->set_ro(ro);
1759 cls->setData(rw);
1760 cls->data()->flags = RO_FUTURE;
1761
1762 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1763 ASSERT(!old);
1764 }
1765
1766
1767 /***********************************************************************
1768 * popFutureNamedClass
1769 * Removes the named class from the unrealized future class list,
1770 * because it has been realized.
1771 * Returns nil if the name is not used by a future class.
1772 * Locking: runtimeLock must be held by the caller
1773 **********************************************************************/
1774 static Class popFutureNamedClass(const char *name)
1775 {
1776 runtimeLock.assertLocked();
1777
1778 Class cls = nil;
1779
1780 if (future_named_class_map) {
1781 cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
1782 if (cls && NXCountMapTable(future_named_class_map) == 0) {
1783 NXFreeMapTable(future_named_class_map);
1784 future_named_class_map = nil;
1785 }
1786 }
1787
1788 return cls;
1789 }
1790
1791
1792 /***********************************************************************
1793 * remappedClasses
1794 * Returns the oldClass => newClass map for realized future classes.
1795 * Returns the oldClass => nil map for ignored weak-linked classes.
1796 * Locking: runtimeLock must be read- or write-locked by the caller
1797 **********************************************************************/
1798 static objc::DenseMap<Class, Class> *remappedClasses(bool create)
1799 {
1800 static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
1801
1802 runtimeLock.assertLocked();
1803
1804 // start big enough to hold CF's classes and a few others
1805 return remapped_class_map.get(create, 32);
1806 }
1807
1808
1809 /***********************************************************************
1810 * noClassesRemapped
1811 * Returns YES if no classes have been remapped
1812 * Locking: runtimeLock must be read- or write-locked by the caller
1813 **********************************************************************/
1814 static bool noClassesRemapped(void)
1815 {
1816 runtimeLock.assertLocked();
1817
1818 bool result = (remappedClasses(NO) == nil);
1819 #if DEBUG
1820 // Catch construction of an empty table, which defeats optimization.
1821 auto *map = remappedClasses(NO);
1822 if (map) ASSERT(map->size() > 0);
1823 #endif
1824 return result;
1825 }
1826
1827
1828 /***********************************************************************
1829 * addRemappedClass
1830 * newcls is a realized future class, replacing oldcls.
1831 * OR newcls is nil, replacing ignored weak-linked class oldcls.
1832 * Locking: runtimeLock must be write-locked by the caller
1833 **********************************************************************/
1834 static void addRemappedClass(Class oldcls, Class newcls)
1835 {
1836 runtimeLock.assertLocked();
1837
1838 if (PrintFuture) {
1839 _objc_inform("FUTURE: using %p instead of %p for %s",
1840 (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
1841 }
1842
1843 auto result = remappedClasses(YES)->insert({ oldcls, newcls });
1844 #if DEBUG
1845 if (!std::get<1>(result)) {
1846 // An existing mapping was overwritten. This is not allowed
1847 // unless it was to nil.
1848 auto iterator = std::get<0>(result);
1849 auto value = std::get<1>(*iterator);
1850 ASSERT(value == nil);
1851 }
1852 #else
1853 (void)result;
1854 #endif
1855 }
1856
1857
1858 /***********************************************************************
1859 * remapClass
1860 * Returns the live class pointer for cls, which may be pointing to
1861 * a class struct that has been reallocated.
1862 * Returns nil if cls is ignored because of weak linking.
1863 * Locking: runtimeLock must be read- or write-locked by the caller
1864 **********************************************************************/
1865 static Class remapClass(Class cls)
1866 {
1867 runtimeLock.assertLocked();
1868
1869 if (!cls) return nil;
1870
1871 auto *map = remappedClasses(NO);
1872 if (!map)
1873 return cls;
1874
1875 auto iterator = map->find(cls);
1876 if (iterator == map->end())
1877 return cls;
1878 return std::get<1>(*iterator);
1879 }
1880
1881 static Class remapClass(classref_t cls)
1882 {
1883 return remapClass((Class)cls);
1884 }
1885
1886 Class _class_remap(Class cls)
1887 {
1888 mutex_locker_t lock(runtimeLock);
1889 return remapClass(cls);
1890 }
1891
1892 /***********************************************************************
1893 * remapClassRef
1894 * Fix up a class ref, in case the class referenced has been reallocated
1895 * or is an ignored weak-linked class.
1896 * Locking: runtimeLock must be read- or write-locked by the caller
1897 **********************************************************************/
1898 static void remapClassRef(Class *clsref)
1899 {
1900 runtimeLock.assertLocked();
1901
1902 Class newcls = remapClass(*clsref);
1903 if (*clsref != newcls) *clsref = newcls;
1904 }
1905
1906
1907 _Nullable Class
1908 objc_loadClassref(_Nullable Class * _Nonnull clsref)
1909 {
1910 auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
1911
1912 uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
1913 if (fastpath((cls & 1) == 0))
1914 return (Class)cls;
1915
1916 auto stub = (stub_class_t *)(cls & ~1ULL);
1917 Class initialized = stub->initializer((Class)stub, nil);
1918 atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
1919 return initialized;
1920 }
1921
1922
1923 /***********************************************************************
1924 * getMaybeUnrealizedNonMetaClass
1925 * Return the ordinary class for this class or metaclass.
1926 * `inst` is an instance of `cls` or a subclass thereof, or nil.
1927 * Non-nil inst is faster.
1928 * The result may be unrealized.
1929 * Used by +initialize.
1930 * Locking: runtimeLock must be read- or write-locked by the caller
1931 **********************************************************************/
1932 static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
1933 {
1934 static int total, named, secondary, sharedcache, dyld3;
1935 runtimeLock.assertLocked();
1936 ASSERT(metacls->isRealized());
1937
1938 total++;
1939
1940 // return cls itself if it's already a non-meta class
1941 if (!metacls->isMetaClass()) return metacls;
1942
1943 // metacls really is a metaclass
1944 // which means inst (if any) is a class
1945
1946 // special case for root metaclass
1947 // where inst == inst->ISA() == metacls is possible
1948 if (metacls->ISA() == metacls) {
1949 Class cls = metacls->superclass;
1950 ASSERT(cls->isRealized());
1951 ASSERT(!cls->isMetaClass());
1952 ASSERT(cls->ISA() == metacls);
1953 if (cls->ISA() == metacls) return cls;
1954 }
1955
1956 // use inst if available
1957 if (inst) {
1958 Class cls = remapClass((Class)inst);
1959 // cls may be a subclass - find the real class for metacls
1960 // fixme this probably stops working once Swift starts
1961 // reallocating classes if cls is unrealized.
1962 while (cls) {
1963 if (cls->ISA() == metacls) {
1964 ASSERT(!cls->isMetaClassMaybeUnrealized());
1965 return cls;
1966 }
1967 cls = cls->superclass;
1968 }
1969 #if DEBUG
1970 _objc_fatal("cls is not an instance of metacls");
1971 #else
1972 // release build: be forgiving and fall through to slow lookups
1973 #endif
1974 }
1975
1976 // try name lookup
1977 {
1978 Class cls = getClassExceptSomeSwift(metacls->mangledName());
1979 if (cls && cls->ISA() == metacls) {
1980 named++;
1981 if (PrintInitializing) {
1982 _objc_inform("INITIALIZE: %d/%d (%g%%) "
1983 "successful by-name metaclass lookups",
1984 named, total, named*100.0/total);
1985 }
1986 return cls;
1987 }
1988 }
1989
1990 // try secondary table
1991 {
1992 Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
1993 if (cls) {
1994 secondary++;
1995 if (PrintInitializing) {
1996 _objc_inform("INITIALIZE: %d/%d (%g%%) "
1997 "successful secondary metaclass lookups",
1998 secondary, total, secondary*100.0/total);
1999 }
2000
2001 ASSERT(cls->ISA() == metacls);
2002 return cls;
2003 }
2004 }
2005
2006 // try the dyld closure table
2007 if (isPreoptimized())
2008 {
2009 // Try table from dyld closure first. It was built to ignore the dupes it
2010 // knows will come from the cache, so anything left in here was there when
2011 // we launched
2012 Class cls = nil;
2013 // Note, we have to pass the lambda directly here as otherwise we would try
2014 // message copy and autorelease.
2015 _dyld_for_each_objc_class(metacls->mangledName(),
2016 [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
2017 // Skip images which aren't loaded. This supports the case where dyld
2018 // might soft link an image from the main binary so its possibly not
2019 // loaded yet.
2020 if (!isLoaded)
2021 return;
2022
2023 // Found a loaded image with this class name, so check if its the right one
2024 Class result = (Class)classPtr;
2025 if (result->ISA() == metacls) {
2026 cls = result;
2027 *stop = true;
2028 }
2029 });
2030
2031 if (cls) {
2032 dyld3++;
2033 if (PrintInitializing) {
2034 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2035 "successful dyld closure metaclass lookups",
2036 dyld3, total, dyld3*100.0/total);
2037 }
2038
2039 return cls;
2040 }
2041 }
2042
2043 // try any duplicates in the dyld shared cache
2044 {
2045 Class cls = nil;
2046
2047 int count;
2048 Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
2049 if (classes) {
2050 for (int i = 0; i < count; i++) {
2051 if (classes[i]->ISA() == metacls) {
2052 cls = classes[i];
2053 break;
2054 }
2055 }
2056 free(classes);
2057 }
2058
2059 if (cls) {
2060 sharedcache++;
2061 if (PrintInitializing) {
2062 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2063 "successful shared cache metaclass lookups",
2064 sharedcache, total, sharedcache*100.0/total);
2065 }
2066
2067 return cls;
2068 }
2069 }
2070
2071 _objc_fatal("no class for metaclass %p", (void*)metacls);
2072 }
2073
2074
2075 /***********************************************************************
2076 * class_initialize. Send the '+initialize' message on demand to any
2077 * uninitialized class. Force initialization of superclasses first.
2078 * inst is an instance of cls, or nil. Non-nil is better for performance.
2079 * Returns the class pointer. If the class was unrealized then
2080 * it may be reallocated.
2081 * Locking:
2082 * runtimeLock must be held by the caller
2083 * This function may drop the lock.
2084 * On exit the lock is re-acquired or dropped as requested by leaveLocked.
2085 **********************************************************************/
2086 static Class initializeAndMaybeRelock(Class cls, id inst,
2087 mutex_t& lock, bool leaveLocked)
2088 {
2089 lock.assertLocked();
2090 ASSERT(cls->isRealized());
2091
2092 if (cls->isInitialized()) {
2093 if (!leaveLocked) lock.unlock();
2094 return cls;
2095 }
2096
2097 // Find the non-meta class for cls, if it is not already one.
2098 // The +initialize message is sent to the non-meta class object.
2099 Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
2100
2101 // Realize the non-meta class if necessary.
2102 if (nonmeta->isRealized()) {
2103 // nonmeta is cls, which was already realized
2104 // OR nonmeta is distinct, but is already realized
2105 // - nothing else to do
2106 lock.unlock();
2107 } else {
2108 nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
2109 // runtimeLock is now unlocked
2110 // fixme Swift can't relocate the class today,
2111 // but someday it will:
2112 cls = object_getClass(nonmeta);
2113 }
2114
2115 // runtimeLock is now unlocked, for +initialize dispatch
2116 ASSERT(nonmeta->isRealized());
2117 initializeNonMetaClass(nonmeta);
2118
2119 if (leaveLocked) runtimeLock.lock();
2120 return cls;
2121 }
2122
2123 // Locking: acquires runtimeLock
2124 Class class_initialize(Class cls, id obj)
2125 {
2126 runtimeLock.lock();
2127 return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
2128 }
2129
2130 // Locking: caller must hold runtimeLock; this may drop and re-acquire it
2131 static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
2132 {
2133 return initializeAndMaybeRelock(cls, obj, lock, true);
2134 }
2135
2136
2137 /***********************************************************************
2138 * addRootClass
2139 * Adds cls as a new realized root class.
2140 * Locking: runtimeLock must be held by the caller.
2141 **********************************************************************/
2142 static void addRootClass(Class cls)
2143 {
2144 runtimeLock.assertLocked();
2145
2146 ASSERT(cls->isRealized());
2147
2148 objc_debug_realized_class_generation_count++;
2149
2150 cls->data()->nextSiblingClass = _firstRealizedClass;
2151 _firstRealizedClass = cls;
2152 }
2153
2154 static void removeRootClass(Class cls)
2155 {
2156 runtimeLock.assertLocked();
2157
2158 objc_debug_realized_class_generation_count++;
2159
2160 Class *classp;
2161 for (classp = &_firstRealizedClass;
2162 *classp != cls;
2163 classp = &(*classp)->data()->nextSiblingClass)
2164 { }
2165
2166 *classp = (*classp)->data()->nextSiblingClass;
2167 }
2168
2169
2170 /***********************************************************************
2171 * addSubclass
2172 * Adds subcls as a subclass of supercls.
2173 * Locking: runtimeLock must be held by the caller.
2174 **********************************************************************/
2175 static void addSubclass(Class supercls, Class subcls)
2176 {
2177 runtimeLock.assertLocked();
2178
2179 if (supercls && subcls) {
2180 ASSERT(supercls->isRealized());
2181 ASSERT(subcls->isRealized());
2182
2183 objc_debug_realized_class_generation_count++;
2184
2185 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2186 supercls->data()->firstSubclass = subcls;
2187
2188 if (supercls->hasCxxCtor()) {
2189 subcls->setHasCxxCtor();
2190 }
2191
2192 if (supercls->hasCxxDtor()) {
2193 subcls->setHasCxxDtor();
2194 }
2195
2196 objc::AWZScanner::scanAddedSubClass(subcls, supercls);
2197 objc::RRScanner::scanAddedSubClass(subcls, supercls);
2198 objc::CoreScanner::scanAddedSubClass(subcls, supercls);
2199
2200 // Special case: instancesRequireRawIsa does not propagate
2201 // from root class to root metaclass
2202 if (supercls->instancesRequireRawIsa() && supercls->superclass) {
2203 subcls->setInstancesRequireRawIsaRecursively(true);
2204 }
2205 }
2206 }
2207
2208
2209 /***********************************************************************
2210 * removeSubclass
2211 * Removes subcls as a subclass of supercls.
2212 * Locking: runtimeLock must be held by the caller.
2213 **********************************************************************/
2214 static void removeSubclass(Class supercls, Class subcls)
2215 {
2216 runtimeLock.assertLocked();
2217 ASSERT(supercls->isRealized());
2218 ASSERT(subcls->isRealized());
2219 ASSERT(subcls->superclass == supercls);
2220
2221 objc_debug_realized_class_generation_count++;
2222
2223 Class *cp;
2224 for (cp = &supercls->data()->firstSubclass;
2225 *cp && *cp != subcls;
2226 cp = &(*cp)->data()->nextSiblingClass)
2227 ;
2228 ASSERT(*cp == subcls);
2229 *cp = subcls->data()->nextSiblingClass;
2230 }
2231
2232
2233
2234 /***********************************************************************
2235 * protocols
2236 * Returns the protocol name => protocol map for protocols.
2237 * Locking: runtimeLock must read- or write-locked by the caller
2238 **********************************************************************/
2239 static NXMapTable *protocols(void)
2240 {
2241 static NXMapTable *protocol_map = nil;
2242
2243 runtimeLock.assertLocked();
2244
2245 INIT_ONCE_PTR(protocol_map,
2246 NXCreateMapTable(NXStrValueMapPrototype, 16),
2247 NXFreeMapTable(v) );
2248
2249 return protocol_map;
2250 }
2251
2252
2253 /***********************************************************************
2254 * getProtocol
2255 * Looks up a protocol by name. Demangled Swift names are recognized.
2256 * Locking: runtimeLock must be read- or write-locked by the caller.
2257 **********************************************************************/
2258 static NEVER_INLINE Protocol *getProtocol(const char *name)
2259 {
2260 runtimeLock.assertLocked();
2261
2262 // Try name as-is.
2263 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
2264 if (result) return result;
2265
2266 // Try Swift-mangled equivalent of the given name.
2267 if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
2268 result = (Protocol *)NXMapGet(protocols(), swName);
2269 free(swName);
2270 if (result) return result;
2271 }
2272
2273 // Try table from dyld shared cache
2274 // Temporarily check that we are using the new table. Eventually this check
2275 // will always be true.
2276 // FIXME: Remove this check when we can
2277 if (sharedCacheSupportsProtocolRoots()) {
2278 result = getPreoptimizedProtocol(name);
2279 if (result) return result;
2280 }
2281
2282 return nil;
2283 }
2284
2285
2286 /***********************************************************************
2287 * remapProtocol
2288 * Returns the live protocol pointer for proto, which may be pointing to
2289 * a protocol struct that has been reallocated.
2290 * Locking: runtimeLock must be read- or write-locked by the caller
2291 **********************************************************************/
2292 static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
2293 {
2294 runtimeLock.assertLocked();
2295
2296 // Protocols in shared cache images have a canonical bit to mark that they
2297 // are the definition we should use
2298 if (((protocol_t *)proto)->isCanonical())
2299 return (protocol_t *)proto;
2300
2301 protocol_t *newproto = (protocol_t *)
2302 getProtocol(((protocol_t *)proto)->mangledName);
2303 return newproto ? newproto : (protocol_t *)proto;
2304 }
2305
2306
2307 /***********************************************************************
2308 * remapProtocolRef
2309 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2310 * Locking: runtimeLock must be read- or write-locked by the caller
2311 **********************************************************************/
2312 static size_t UnfixedProtocolReferences;
2313 static void remapProtocolRef(protocol_t **protoref)
2314 {
2315 runtimeLock.assertLocked();
2316
2317 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2318 if (*protoref != newproto) {
2319 *protoref = newproto;
2320 UnfixedProtocolReferences++;
2321 }
2322 }
2323
2324
2325 /***********************************************************************
2326 * moveIvars
2327 * Slides a class's ivars to accommodate the given superclass size.
2328 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2329 * Locking: runtimeLock must be held by the caller.
2330 **********************************************************************/
2331 static void moveIvars(class_ro_t *ro, uint32_t superSize)
2332 {
2333 runtimeLock.assertLocked();
2334
2335 uint32_t diff;
2336
2337 ASSERT(superSize > ro->instanceStart);
2338 diff = superSize - ro->instanceStart;
2339
2340 if (ro->ivars) {
2341 // Find maximum alignment in this class's ivars
2342 uint32_t maxAlignment = 1;
2343 for (const auto& ivar : *ro->ivars) {
2344 if (!ivar.offset) continue; // anonymous bitfield
2345
2346 uint32_t alignment = ivar.alignment();
2347 if (alignment > maxAlignment) maxAlignment = alignment;
2348 }
2349
2350 // Compute a slide value that preserves that alignment
2351 uint32_t alignMask = maxAlignment - 1;
2352 diff = (diff + alignMask) & ~alignMask;
2353
2354 // Slide all of this class's ivars en masse
2355 for (const auto& ivar : *ro->ivars) {
2356 if (!ivar.offset) continue; // anonymous bitfield
2357
2358 uint32_t oldOffset = (uint32_t)*ivar.offset;
2359 uint32_t newOffset = oldOffset + diff;
2360 *ivar.offset = newOffset;
2361
2362 if (PrintIvars) {
2363 _objc_inform("IVARS: offset %u -> %u for %s "
2364 "(size %u, align %u)",
2365 oldOffset, newOffset, ivar.name,
2366 ivar.size, ivar.alignment());
2367 }
2368 }
2369 }
2370
2371 *(uint32_t *)&ro->instanceStart += diff;
2372 *(uint32_t *)&ro->instanceSize += diff;
2373 }
2374
2375
2376 static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
2377 {
2378 class_rw_t *rw = cls->data();
2379
2380 ASSERT(supercls);
2381 ASSERT(!cls->isMetaClass());
2382
2383 /* debug: print them all before sliding
2384 if (ro->ivars) {
2385 for (const auto& ivar : *ro->ivars) {
2386 if (!ivar.offset) continue; // anonymous bitfield
2387
2388 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2389 ro->name, ivar.name,
2390 *ivar.offset, ivar.size, ivar.alignment());
2391 }
2392 }
2393 */
2394
2395 // Non-fragile ivars - reconcile this class with its superclass
2396 const class_ro_t *super_ro = supercls->data()->ro();
2397
2398 if (DebugNonFragileIvars) {
2399 // Debugging: Force non-fragile ivars to slide.
2400 // Intended to find compiler, runtime, and program bugs.
2401 // If it fails with this and works without, you have a problem.
2402
2403 // Operation: Reset everything to 0 + misalignment.
2404 // Then force the normal sliding logic to push everything back.
2405
2406 // Exceptions: root classes, metaclasses, *NSCF* classes,
2407 // __CF* classes, NSConstantString, NSSimpleCString
2408
2409 // (already know it's not root because supercls != nil)
2410 const char *clsname = cls->mangledName();
2411 if (!strstr(clsname, "NSCF") &&
2412 0 != strncmp(clsname, "__CF", 4) &&
2413 0 != strcmp(clsname, "NSConstantString") &&
2414 0 != strcmp(clsname, "NSSimpleCString"))
2415 {
2416 uint32_t oldStart = ro->instanceStart;
2417 class_ro_t *ro_w = make_ro_writeable(rw);
2418 ro = rw->ro();
2419
2420 // Find max ivar alignment in class.
2421 // default to word size to simplify ivar update
2422 uint32_t alignment = 1<<WORD_SHIFT;
2423 if (ro->ivars) {
2424 for (const auto& ivar : *ro->ivars) {
2425 if (ivar.alignment() > alignment) {
2426 alignment = ivar.alignment();
2427 }
2428 }
2429 }
2430 uint32_t misalignment = ro->instanceStart % alignment;
2431 uint32_t delta = ro->instanceStart - misalignment;
2432 ro_w->instanceStart = misalignment;
2433 ro_w->instanceSize -= delta;
2434
2435 if (PrintIvars) {
2436 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2437 "to slide (instanceStart %zu -> %zu)",
2438 cls->nameForLogging(), (size_t)oldStart,
2439 (size_t)ro->instanceStart);
2440 }
2441
2442 if (ro->ivars) {
2443 for (const auto& ivar : *ro->ivars) {
2444 if (!ivar.offset) continue; // anonymous bitfield
2445 *ivar.offset -= delta;
2446 }
2447 }
2448 }
2449 }
2450
2451 if (ro->instanceStart >= super_ro->instanceSize) {
2452 // Superclass has not overgrown its space. We're done here.
2453 return;
2454 }
2455 // fixme can optimize for "class has no new ivars", etc
2456
2457 if (ro->instanceStart < super_ro->instanceSize) {
2458 // Superclass has changed size. This class's ivars must move.
2459 // Also slide layout bits in parallel.
2460 // This code is incapable of compacting the subclass to
2461 // compensate for a superclass that shrunk, so don't do that.
2462 if (PrintIvars) {
2463 _objc_inform("IVARS: sliding ivars for class %s "
2464 "(superclass was %u bytes, now %u)",
2465 cls->nameForLogging(), ro->instanceStart,
2466 super_ro->instanceSize);
2467 }
2468 class_ro_t *ro_w = make_ro_writeable(rw);
2469 ro = rw->ro();
2470 moveIvars(ro_w, super_ro->instanceSize);
2471 gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2472 }
2473 }
2474
2475
2476 /***********************************************************************
2477 * realizeClassWithoutSwift
2478 * Performs first-time initialization on class cls,
2479 * including allocating its read-write data.
2480 * Does not perform any Swift-side initialization.
2481 * Returns the real class structure for the class.
2482 * Locking: runtimeLock must be write-locked by the caller
2483 **********************************************************************/
2484 static Class realizeClassWithoutSwift(Class cls, Class previously)
2485 {
2486 runtimeLock.assertLocked();
2487
2488 class_rw_t *rw;
2489 Class supercls;
2490 Class metacls;
2491
2492 if (!cls) return nil;
2493 if (cls->isRealized()) return cls;
2494 ASSERT(cls == remapClass(cls));
2495
2496 // fixme verify class is not in an un-dlopened part of the shared cache?
2497
2498 auto ro = (const class_ro_t *)cls->data();
2499 auto isMeta = ro->flags & RO_META;
2500 if (ro->flags & RO_FUTURE) {
2501 // This was a future class. rw data is already allocated.
2502 rw = cls->data();
2503 ro = cls->data()->ro();
2504 ASSERT(!isMeta);
2505 cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
2506 } else {
2507 // Normal class. Allocate writeable class data.
2508 rw = objc::zalloc<class_rw_t>();
2509 rw->set_ro(ro);
2510 rw->flags = RW_REALIZED|RW_REALIZING|isMeta;
2511 cls->setData(rw);
2512 }
2513
2514 #if FAST_CACHE_META
2515 if (isMeta) cls->cache.setBit(FAST_CACHE_META);
2516 #endif
2517
2518 // Choose an index for this class.
2519 // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
2520 cls->chooseClassArrayIndex();
2521
2522 if (PrintConnecting) {
2523 _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
2524 cls->nameForLogging(), isMeta ? " (meta)" : "",
2525 (void*)cls, ro, cls->classArrayIndex(),
2526 cls->isSwiftStable() ? "(swift)" : "",
2527 cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
2528 }
2529
2530 // Realize superclass and metaclass, if they aren't already.
2531 // This needs to be done after RW_REALIZED is set above, for root classes.
2532 // This needs to be done after class index is chosen, for root metaclasses.
2533 // This assumes that none of those classes have Swift contents,
2534 // or that Swift's initializers have already been called.
2535 // fixme that assumption will be wrong if we add support
2536 // for ObjC subclasses of Swift classes.
2537 supercls = realizeClassWithoutSwift(remapClass(cls->superclass), nil);
2538 metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
2539
2540 #if SUPPORT_NONPOINTER_ISA
2541 if (isMeta) {
2542 // Metaclasses do not need any features from non pointer ISA
2543 // This allows for a faspath for classes in objc_retain/objc_release.
2544 cls->setInstancesRequireRawIsa();
2545 } else {
2546 // Disable non-pointer isa for some classes and/or platforms.
2547 // Set instancesRequireRawIsa.
2548 bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
2549 bool rawIsaIsInherited = false;
2550 static bool hackedDispatch = false;
2551
2552 if (DisableNonpointerIsa) {
2553 // Non-pointer isa disabled by environment or app SDK version
2554 instancesRequireRawIsa = true;
2555 }
2556 else if (!hackedDispatch && 0 == strcmp(ro->name, "OS_object"))
2557 {
2558 // hack for libdispatch et al - isa also acts as vtable pointer
2559 hackedDispatch = true;
2560 instancesRequireRawIsa = true;
2561 }
2562 else if (supercls && supercls->superclass &&
2563 supercls->instancesRequireRawIsa())
2564 {
2565 // This is also propagated by addSubclass()
2566 // but nonpointer isa setup needs it earlier.
2567 // Special case: instancesRequireRawIsa does not propagate
2568 // from root class to root metaclass
2569 instancesRequireRawIsa = true;
2570 rawIsaIsInherited = true;
2571 }
2572
2573 if (instancesRequireRawIsa) {
2574 cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
2575 }
2576 }
2577 // SUPPORT_NONPOINTER_ISA
2578 #endif
2579
2580 // Update superclass and metaclass in case of remapping
2581 cls->superclass = supercls;
2582 cls->initClassIsa(metacls);
2583
2584 // Reconcile instance variable offsets / layout.
2585 // This may reallocate class_ro_t, updating our ro variable.
2586 if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
2587
2588 // Set fastInstanceSize if it wasn't set already.
2589 cls->setInstanceSize(ro->instanceSize);
2590
2591 // Copy some flags from ro to rw
2592 if (ro->flags & RO_HAS_CXX_STRUCTORS) {
2593 cls->setHasCxxDtor();
2594 if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
2595 cls->setHasCxxCtor();
2596 }
2597 }
2598
2599 // Propagate the associated objects forbidden flag from ro or from
2600 // the superclass.
2601 if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
2602 (supercls && supercls->forbidsAssociatedObjects()))
2603 {
2604 rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
2605 }
2606
2607 // Connect this class to its superclass's subclass lists
2608 if (supercls) {
2609 addSubclass(supercls, cls);
2610 } else {
2611 addRootClass(cls);
2612 }
2613
2614 // Attach categories
2615 methodizeClass(cls, previously);
2616
2617 return cls;
2618 }
2619
2620
2621 /***********************************************************************
2622 * _objc_realizeClassFromSwift
2623 * Called by Swift when it needs the ObjC part of a class to be realized.
2624 * There are four cases:
2625 * 1. cls != nil; previously == cls
2626 * Class cls is being realized in place
2627 * 2. cls != nil; previously == nil
2628 * Class cls is being constructed at runtime
2629 * 3. cls != nil; previously != cls
2630 * The class that was at previously has been reallocated to cls
2631 * 4. cls == nil, previously != nil
2632 * The class at previously is hereby disavowed
2633 *
2634 * Only variants #1 and #2 are supported today.
2635 *
2636 * Locking: acquires runtimeLock
2637 **********************************************************************/
2638 Class _objc_realizeClassFromSwift(Class cls, void *previously)
2639 {
2640 if (cls) {
2641 if (previously && previously != (void*)cls) {
2642 // #3: relocation
2643 mutex_locker_t lock(runtimeLock);
2644 addRemappedClass((Class)previously, cls);
2645 addClassTableEntry(cls);
2646 addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
2647 return realizeClassWithoutSwift(cls, (Class)previously);
2648 } else {
2649 // #1 and #2: realization in place, or new class
2650 mutex_locker_t lock(runtimeLock);
2651
2652 if (!previously) {
2653 // #2: new class
2654 cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
2655 }
2656
2657 // #1 and #2: realization in place, or new class
2658 // We ignore the Swift metadata initializer callback.
2659 // We assume that's all handled since we're being called from Swift.
2660 return realizeClassWithoutSwift(cls, nil);
2661 }
2662 }
2663 else {
2664 // #4: disavowal
2665 // In the future this will mean remapping the old address to nil
2666 // and if necessary removing the old address from any other tables.
2667 _objc_fatal("Swift requested that class %p be ignored, "
2668 "but libobjc does not support that.", previously);
2669 }
2670 }
2671
2672 /***********************************************************************
2673 * realizeSwiftClass
2674 * Performs first-time initialization on class cls,
2675 * including allocating its read-write data,
2676 * and any Swift-side initialization.
2677 * Returns the real class structure for the class.
2678 * Locking: acquires runtimeLock indirectly
2679 **********************************************************************/
2680 static Class realizeSwiftClass(Class cls)
2681 {
2682 runtimeLock.assertUnlocked();
2683
2684 // Some assumptions:
2685 // * Metaclasses never have a Swift initializer.
2686 // * Root classes never have a Swift initializer.
2687 // (These two together avoid initialization order problems at the root.)
2688 // * Unrealized non-Swift classes have no Swift ancestry.
2689 // * Unrealized Swift classes with no initializer have no ancestry that
2690 // does have the initializer.
2691 // (These two together mean we don't need to scan superclasses here
2692 // and we don't need to worry about Swift superclasses inside
2693 // realizeClassWithoutSwift()).
2694
2695 // fixme some of these assumptions will be wrong
2696 // if we add support for ObjC sublasses of Swift classes.
2697
2698 #if DEBUG
2699 runtimeLock.lock();
2700 ASSERT(remapClass(cls) == cls);
2701 ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
2702 ASSERT(!cls->isMetaClassMaybeUnrealized());
2703 ASSERT(cls->superclass);
2704 runtimeLock.unlock();
2705 #endif
2706
2707 // Look for a Swift metadata initialization function
2708 // installed on the class. If it is present we call it.
2709 // That function in turn initializes the Swift metadata,
2710 // prepares the "compiler-generated" ObjC metadata if not
2711 // already present, and calls _objc_realizeSwiftClass() to finish
2712 // our own initialization.
2713
2714 if (auto init = cls->swiftMetadataInitializer()) {
2715 if (PrintConnecting) {
2716 _objc_inform("CLASS: calling Swift metadata initializer "
2717 "for class '%s' (%p)", cls->nameForLogging(), cls);
2718 }
2719
2720 Class newcls = init(cls, nil);
2721
2722 // fixme someday Swift will need to relocate classes at this point,
2723 // but we don't accept that yet.
2724 if (cls != newcls) {
2725 mutex_locker_t lock(runtimeLock);
2726 addRemappedClass(cls, newcls);
2727 }
2728
2729 return newcls;
2730 }
2731 else {
2732 // No Swift-side initialization callback.
2733 // Perform our own realization directly.
2734 mutex_locker_t lock(runtimeLock);
2735 return realizeClassWithoutSwift(cls, nil);
2736 }
2737 }
2738
2739
2740 /***********************************************************************
2741 * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
2742 * Realize a class that might be a Swift class.
2743 * Returns the real class structure for the class.
2744 * Locking:
2745 * runtimeLock must be held on entry
2746 * runtimeLock may be dropped during execution
2747 * ...AndUnlock function leaves runtimeLock unlocked on exit
2748 * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
2749 * This complication avoids repeated lock transitions in some cases.
2750 **********************************************************************/
2751 static Class
2752 realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
2753 {
2754 lock.assertLocked();
2755
2756 if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
2757 // Non-Swift class. Realize it now with the lock still held.
2758 // fixme wrong in the future for objc subclasses of swift classes
2759 realizeClassWithoutSwift(cls, nil);
2760 if (!leaveLocked) lock.unlock();
2761 } else {
2762 // Swift class. We need to drop locks and call the Swift
2763 // runtime to initialize it.
2764 lock.unlock();
2765 cls = realizeSwiftClass(cls);
2766 ASSERT(cls->isRealized()); // callback must have provoked realization
2767 if (leaveLocked) lock.lock();
2768 }
2769
2770 return cls;
2771 }
2772
2773 static Class
2774 realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
2775 {
2776 return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
2777 }
2778
2779 static Class
2780 realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
2781 {
2782 return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
2783 }
2784
2785
2786 /***********************************************************************
2787 * missingWeakSuperclass
2788 * Return YES if some superclass of cls was weak-linked and is missing.
2789 **********************************************************************/
2790 static bool
2791 missingWeakSuperclass(Class cls)
2792 {
2793 ASSERT(!cls->isRealized());
2794
2795 if (!cls->superclass) {
2796 // superclass nil. This is normal for root classes only.
2797 return (!(cls->data()->flags & RO_ROOT));
2798 } else {
2799 // superclass not nil. Check if a higher superclass is missing.
2800 Class supercls = remapClass(cls->superclass);
2801 ASSERT(cls != cls->superclass);
2802 ASSERT(cls != supercls);
2803 if (!supercls) return YES;
2804 if (supercls->isRealized()) return NO;
2805 return missingWeakSuperclass(supercls);
2806 }
2807 }
2808
2809
2810 /***********************************************************************
2811 * realizeAllClassesInImage
2812 * Non-lazily realizes all unrealized classes in the given image.
2813 * Locking: runtimeLock must be held by the caller.
2814 * Locking: this function may drop and re-acquire the lock.
2815 **********************************************************************/
2816 static void realizeAllClassesInImage(header_info *hi)
2817 {
2818 runtimeLock.assertLocked();
2819
2820 size_t count, i;
2821 classref_t const *classlist;
2822
2823 if (hi->areAllClassesRealized()) return;
2824
2825 classlist = _getObjc2ClassList(hi, &count);
2826
2827 for (i = 0; i < count; i++) {
2828 Class cls = remapClass(classlist[i]);
2829 if (cls) {
2830 realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
2831 }
2832 }
2833
2834 hi->setAllClassesRealized(YES);
2835 }
2836
2837
2838 /***********************************************************************
2839 * realizeAllClasses
2840 * Non-lazily realizes all unrealized classes in all known images.
2841 * Locking: runtimeLock must be held by the caller.
2842 * Locking: this function may drop and re-acquire the lock.
2843 * Dropping the lock makes this function thread-unsafe with respect
2844 * to concurrent image unload, but the callers of this function
2845 * already ultimately do something that is also thread-unsafe with
2846 * respect to image unload (such as using the list of all classes).
2847 **********************************************************************/
2848 static void realizeAllClasses(void)
2849 {
2850 runtimeLock.assertLocked();
2851
2852 header_info *hi;
2853 for (hi = FirstHeader; hi; hi = hi->getNext()) {
2854 realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
2855 }
2856 }
2857
2858
2859 /***********************************************************************
2860 * _objc_allocateFutureClass
2861 * Allocate an unresolved future class for the given class name.
2862 * Returns any existing allocation if one was already made.
2863 * Assumes the named class doesn't exist yet.
2864 * Locking: acquires runtimeLock
2865 **********************************************************************/
2866 Class _objc_allocateFutureClass(const char *name)
2867 {
2868 mutex_locker_t lock(runtimeLock);
2869
2870 Class cls;
2871 NXMapTable *map = futureNamedClasses();
2872
2873 if ((cls = (Class)NXMapGet(map, name))) {
2874 // Already have a future class for this name.
2875 return cls;
2876 }
2877
2878 cls = _calloc_class(sizeof(objc_class));
2879 addFutureNamedClass(name, cls);
2880
2881 return cls;
2882 }
2883
2884
2885 /***********************************************************************
2886 * objc_getFutureClass. Return the id of the named class.
2887 * If the class does not exist, return an uninitialized class
2888 * structure that will be used for the class when and if it
2889 * does get loaded.
2890 * Not thread safe.
2891 **********************************************************************/
2892 Class objc_getFutureClass(const char *name)
2893 {
2894 Class cls;
2895
2896 // YES unconnected, NO class handler
2897 // (unconnected is OK because it will someday be the real class)
2898 cls = look_up_class(name, YES, NO);
2899 if (cls) {
2900 if (PrintFuture) {
2901 _objc_inform("FUTURE: found %p already in use for %s",
2902 (void*)cls, name);
2903 }
2904
2905 return cls;
2906 }
2907
2908 // No class or future class with that name yet. Make one.
2909 // fixme not thread-safe with respect to
2910 // simultaneous library load or getFutureClass.
2911 return _objc_allocateFutureClass(name);
2912 }
2913
2914
2915 BOOL _class_isFutureClass(Class cls)
2916 {
2917 return cls && cls->isFuture();
2918 }
2919
2920
2921 /***********************************************************************
2922 * _objc_flush_caches
2923 * Flushes all caches.
2924 * (Historical behavior: flush caches for cls, its metaclass,
2925 * and subclasses thereof. Nil flushes all classes.)
2926 * Locking: acquires runtimeLock
2927 **********************************************************************/
2928 static void flushCaches(Class cls)
2929 {
2930 runtimeLock.assertLocked();
2931 #if CONFIG_USE_CACHE_LOCK
2932 mutex_locker_t lock(cacheUpdateLock);
2933 #endif
2934
2935 if (cls) {
2936 foreach_realized_class_and_subclass(cls, [](Class c){
2937 cache_erase_nolock(c);
2938 return true;
2939 });
2940 }
2941 else {
2942 foreach_realized_class_and_metaclass([](Class c){
2943 cache_erase_nolock(c);
2944 return true;
2945 });
2946 }
2947 }
2948
2949
2950 void _objc_flush_caches(Class cls)
2951 {
2952 {
2953 mutex_locker_t lock(runtimeLock);
2954 flushCaches(cls);
2955 if (cls && cls->superclass && cls != cls->getIsa()) {
2956 flushCaches(cls->getIsa());
2957 } else {
2958 // cls is a root class or root metaclass. Its metaclass is itself
2959 // or a subclass so the metaclass caches were already flushed.
2960 }
2961 }
2962
2963 if (!cls) {
2964 // collectALot if cls==nil
2965 #if CONFIG_USE_CACHE_LOCK
2966 mutex_locker_t lock(cacheUpdateLock);
2967 #else
2968 mutex_locker_t lock(runtimeLock);
2969 #endif
2970 cache_collect(true);
2971 }
2972 }
2973
2974
2975 /***********************************************************************
2976 * map_images
2977 * Process the given images which are being mapped in by dyld.
2978 * Calls ABI-agnostic code after taking ABI-specific locks.
2979 *
2980 * Locking: write-locks runtimeLock
2981 **********************************************************************/
2982 void
2983 map_images(unsigned count, const char * const paths[],
2984 const struct mach_header * const mhdrs[])
2985 {
2986 mutex_locker_t lock(runtimeLock);
2987 return map_images_nolock(count, paths, mhdrs);
2988 }
2989
2990
2991 static void load_categories_nolock(header_info *hi) {
2992 bool hasClassProperties = hi->info()->hasCategoryClassProperties();
2993
2994 size_t count;
2995 auto processCatlist = [&](category_t * const *catlist) {
2996 for (unsigned i = 0; i < count; i++) {
2997 category_t *cat = catlist[i];
2998 Class cls = remapClass(cat->cls);
2999 locstamped_category_t lc{cat, hi};
3000
3001 if (!cls) {
3002 // Category's target class is missing (probably weak-linked).
3003 // Ignore the category.
3004 if (PrintConnecting) {
3005 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3006 "missing weak-linked target class",
3007 cat->name, cat);
3008 }
3009 continue;
3010 }
3011
3012 // Process this category.
3013 if (cls->isStubClass()) {
3014 // Stub classes are never realized. Stub classes
3015 // don't know their metaclass until they're
3016 // initialized, so we have to add categories with
3017 // class methods or properties to the stub itself.
3018 // methodizeClass() will find them and add them to
3019 // the metaclass as appropriate.
3020 if (cat->instanceMethods ||
3021 cat->protocols ||
3022 cat->instanceProperties ||
3023 cat->classMethods ||
3024 cat->protocols ||
3025 (hasClassProperties && cat->_classProperties))
3026 {
3027 objc::unattachedCategories.addForClass(lc, cls);
3028 }
3029 } else {
3030 // First, register the category with its target class.
3031 // Then, rebuild the class's method lists (etc) if
3032 // the class is realized.
3033 if (cat->instanceMethods || cat->protocols
3034 || cat->instanceProperties)
3035 {
3036 if (cls->isRealized()) {
3037 attachCategories(cls, &lc, 1, ATTACH_EXISTING);
3038 } else {
3039 objc::unattachedCategories.addForClass(lc, cls);
3040 }
3041 }
3042
3043 if (cat->classMethods || cat->protocols
3044 || (hasClassProperties && cat->_classProperties))
3045 {
3046 if (cls->ISA()->isRealized()) {
3047 attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
3048 } else {
3049 objc::unattachedCategories.addForClass(lc, cls->ISA());
3050 }
3051 }
3052 }
3053 }
3054 };
3055
3056 processCatlist(_getObjc2CategoryList(hi, &count));
3057 processCatlist(_getObjc2CategoryList2(hi, &count));
3058 }
3059
3060 static void loadAllCategories() {
3061 mutex_locker_t lock(runtimeLock);
3062
3063 for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
3064 load_categories_nolock(hi);
3065 }
3066 }
3067
3068 /***********************************************************************
3069 * load_images
3070 * Process +load in the given images which are being mapped in by dyld.
3071 *
3072 * Locking: write-locks runtimeLock and loadMethodLock
3073 **********************************************************************/
3074 extern bool hasLoadMethods(const headerType *mhdr);
3075 extern void prepare_load_methods(const headerType *mhdr);
3076
3077 void
3078 load_images(const char *path __unused, const struct mach_header *mh)
3079 {
3080 if (!didInitialAttachCategories && didCallDyldNotifyRegister) {
3081 didInitialAttachCategories = true;
3082 loadAllCategories();
3083 }
3084
3085 // Return without taking locks if there are no +load methods here.
3086 if (!hasLoadMethods((const headerType *)mh)) return;
3087
3088 recursive_mutex_locker_t lock(loadMethodLock);
3089
3090 // Discover load methods
3091 {
3092 mutex_locker_t lock2(runtimeLock);
3093 prepare_load_methods((const headerType *)mh);
3094 }
3095
3096 // Call +load methods (without runtimeLock - re-entrant)
3097 call_load_methods();
3098 }
3099
3100
3101 /***********************************************************************
3102 * unmap_image
3103 * Process the given image which is about to be unmapped by dyld.
3104 *
3105 * Locking: write-locks runtimeLock and loadMethodLock
3106 **********************************************************************/
3107 void
3108 unmap_image(const char *path __unused, const struct mach_header *mh)
3109 {
3110 recursive_mutex_locker_t lock(loadMethodLock);
3111 mutex_locker_t lock2(runtimeLock);
3112 unmap_image_nolock(mh);
3113 }
3114
3115
3116 /***********************************************************************
3117 * mustReadClasses
3118 * Preflight check in advance of readClass() from an image.
3119 **********************************************************************/
3120 bool mustReadClasses(header_info *hi, bool hasDyldRoots)
3121 {
3122 const char *reason;
3123
3124 // If the image is not preoptimized then we must read classes.
3125 if (!hi->hasPreoptimizedClasses()) {
3126 reason = nil; // Don't log this one because it is noisy.
3127 goto readthem;
3128 }
3129
3130 // If iOS simulator then we must read classes.
3131 #if TARGET_OS_SIMULATOR
3132 reason = "the image is for iOS simulator";
3133 goto readthem;
3134 #endif
3135
3136 ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
3137
3138 // If the image may have missing weak superclasses then we must read classes
3139 if (!noMissingWeakSuperclasses() || hasDyldRoots) {
3140 reason = "the image may contain classes with missing weak superclasses";
3141 goto readthem;
3142 }
3143
3144 // If there are unresolved future classes then we must read classes.
3145 if (haveFutureNamedClasses()) {
3146 reason = "there are unresolved future classes pending";
3147 goto readthem;
3148 }
3149
3150 // readClass() rewrites bits in backward-deploying Swift stable ABI code.
3151 // The assumption here is there there are no such classes
3152 // in the dyld shared cache.
3153 #if DEBUG
3154 {
3155 size_t count;
3156 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3157 for (size_t i = 0; i < count; i++) {
3158 Class cls = remapClass(classlist[i]);
3159 ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
3160 }
3161 }
3162 #endif
3163
3164 // readClass() does not need to do anything.
3165 return NO;
3166
3167 readthem:
3168 if (PrintPreopt && reason) {
3169 _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
3170 "because %s", hi->fname(), reason);
3171 }
3172 return YES;
3173 }
3174
3175
3176 /***********************************************************************
3177 * readClass
3178 * Read a class and metaclass as written by a compiler.
3179 * Returns the new class pointer. This could be:
3180 * - cls
3181 * - nil (cls has a missing weak-linked superclass)
3182 * - something else (space for this class was reserved by a future class)
3183 *
3184 * Note that all work performed by this function is preflighted by
3185 * mustReadClasses(). Do not change this function without updating that one.
3186 *
3187 * Locking: runtimeLock acquired by map_images or objc_readClassPair
3188 **********************************************************************/
3189 Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
3190 {
3191 const char *mangledName = cls->mangledName();
3192
3193 if (missingWeakSuperclass(cls)) {
3194 // No superclass (probably weak-linked).
3195 // Disavow any knowledge of this subclass.
3196 if (PrintConnecting) {
3197 _objc_inform("CLASS: IGNORING class '%s' with "
3198 "missing weak-linked superclass",
3199 cls->nameForLogging());
3200 }
3201 addRemappedClass(cls, nil);
3202 cls->superclass = nil;
3203 return nil;
3204 }
3205
3206 cls->fixupBackwardDeployingStableSwift();
3207
3208 Class replacing = nil;
3209 if (Class newCls = popFutureNamedClass(mangledName)) {
3210 // This name was previously allocated as a future class.
3211 // Copy objc_class to future class's struct.
3212 // Preserve future's rw data block.
3213
3214 if (newCls->isAnySwift()) {
3215 _objc_fatal("Can't complete future class request for '%s' "
3216 "because the real class is too big.",
3217 cls->nameForLogging());
3218 }
3219
3220 class_rw_t *rw = newCls->data();
3221 const class_ro_t *old_ro = rw->ro();
3222 memcpy(newCls, cls, sizeof(objc_class));
3223 rw->set_ro((class_ro_t *)newCls->data());
3224 newCls->setData(rw);
3225 freeIfMutable((char *)old_ro->name);
3226 free((void *)old_ro);
3227
3228 addRemappedClass(cls, newCls);
3229
3230 replacing = cls;
3231 cls = newCls;
3232 }
3233
3234 if (headerIsPreoptimized && !replacing) {
3235 // class list built in shared cache
3236 // fixme strict assert doesn't work because of duplicates
3237 // ASSERT(cls == getClass(name));
3238 ASSERT(getClassExceptSomeSwift(mangledName));
3239 } else {
3240 addNamedClass(cls, mangledName, replacing);
3241 addClassTableEntry(cls);
3242 }
3243
3244 // for future reference: shared cache never contains MH_BUNDLEs
3245 if (headerIsBundle) {
3246 cls->data()->flags |= RO_FROM_BUNDLE;
3247 cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
3248 }
3249
3250 return cls;
3251 }
3252
3253
3254 /***********************************************************************
3255 * readProtocol
3256 * Read a protocol as written by a compiler.
3257 **********************************************************************/
3258 static void
3259 readProtocol(protocol_t *newproto, Class protocol_class,
3260 NXMapTable *protocol_map,
3261 bool headerIsPreoptimized, bool headerIsBundle)
3262 {
3263 // This is not enough to make protocols in unloaded bundles safe,
3264 // but it does prevent crashes when looking up unrelated protocols.
3265 auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
3266
3267 protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
3268
3269 if (oldproto) {
3270 if (oldproto != newproto) {
3271 // Some other definition already won.
3272 if (PrintProtocols) {
3273 _objc_inform("PROTOCOLS: protocol at %p is %s "
3274 "(duplicate of %p)",
3275 newproto, oldproto->nameForLogging(), oldproto);
3276 }
3277
3278 // If we are a shared cache binary then we have a definition of this
3279 // protocol, but if another one was chosen then we need to clear our
3280 // isCanonical bit so that no-one trusts it.
3281 // Note, if getProtocol returned a shared cache protocol then the
3282 // canonical definition is already in the shared cache and we don't
3283 // need to do anything.
3284 if (headerIsPreoptimized && !oldproto->isCanonical()) {
3285 // Note newproto is an entry in our __objc_protolist section which
3286 // for shared cache binaries points to the original protocol in
3287 // that binary, not the shared cache uniqued one.
3288 auto cacheproto = (protocol_t *)
3289 getSharedCachePreoptimizedProtocol(newproto->mangledName);
3290 if (cacheproto && cacheproto->isCanonical())
3291 cacheproto->clearIsCanonical();
3292 }
3293 }
3294 }
3295 else if (headerIsPreoptimized) {
3296 // Shared cache initialized the protocol object itself,
3297 // but in order to allow out-of-cache replacement we need
3298 // to add it to the protocol table now.
3299
3300 protocol_t *cacheproto = (protocol_t *)
3301 getPreoptimizedProtocol(newproto->mangledName);
3302 protocol_t *installedproto;
3303 if (cacheproto && cacheproto != newproto) {
3304 // Another definition in the shared cache wins (because
3305 // everything in the cache was fixed up to point to it).
3306 installedproto = cacheproto;
3307 }
3308 else {
3309 // This definition wins.
3310 installedproto = newproto;
3311 }
3312
3313 ASSERT(installedproto->getIsa() == protocol_class);
3314 ASSERT(installedproto->size >= sizeof(protocol_t));
3315 insertFn(protocol_map, installedproto->mangledName,
3316 installedproto);
3317
3318 if (PrintProtocols) {
3319 _objc_inform("PROTOCOLS: protocol at %p is %s",
3320 installedproto, installedproto->nameForLogging());
3321 if (newproto != installedproto) {
3322 _objc_inform("PROTOCOLS: protocol at %p is %s "
3323 "(duplicate of %p)",
3324 newproto, installedproto->nameForLogging(),
3325 installedproto);
3326 }
3327 }
3328 }
3329 else if (newproto->size >= sizeof(protocol_t)) {
3330 // New protocol from an un-preoptimized image
3331 // with sufficient storage. Fix it up in place.
3332 // fixme duplicate protocols from unloadable bundle
3333 newproto->initIsa(protocol_class); // fixme pinned
3334 insertFn(protocol_map, newproto->mangledName, newproto);
3335 if (PrintProtocols) {
3336 _objc_inform("PROTOCOLS: protocol at %p is %s",
3337 newproto, newproto->nameForLogging());
3338 }
3339 }
3340 else {
3341 // New protocol from an un-preoptimized image
3342 // with insufficient storage. Reallocate it.
3343 // fixme duplicate protocols from unloadable bundle
3344 size_t size = max(sizeof(protocol_t), (size_t)newproto->size);
3345 protocol_t *installedproto = (protocol_t *)calloc(size, 1);
3346 memcpy(installedproto, newproto, newproto->size);
3347 installedproto->size = (typeof(installedproto->size))size;
3348
3349 installedproto->initIsa(protocol_class); // fixme pinned
3350 insertFn(protocol_map, installedproto->mangledName, installedproto);
3351 if (PrintProtocols) {
3352 _objc_inform("PROTOCOLS: protocol at %p is %s ",
3353 installedproto, installedproto->nameForLogging());
3354 _objc_inform("PROTOCOLS: protocol at %p is %s "
3355 "(reallocated to %p)",
3356 newproto, installedproto->nameForLogging(),
3357 installedproto);
3358 }
3359 }
3360 }
3361
3362 /***********************************************************************
3363 * _read_images
3364 * Perform initial processing of the headers in the linked
3365 * list beginning with headerList.
3366 *
3367 * Called by: map_images_nolock
3368 *
3369 * Locking: runtimeLock acquired by map_images
3370 **********************************************************************/
3371 void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
3372 {
3373 header_info *hi;
3374 uint32_t hIndex;
3375 size_t count;
3376 size_t i;
3377 Class *resolvedFutureClasses = nil;
3378 size_t resolvedFutureClassCount = 0;
3379 static bool doneOnce;
3380 bool launchTime = NO;
3381 TimeLogger ts(PrintImageTimes);
3382
3383 runtimeLock.assertLocked();
3384
3385 #define EACH_HEADER \
3386 hIndex = 0; \
3387 hIndex < hCount && (hi = hList[hIndex]); \
3388 hIndex++
3389
3390 if (!doneOnce) {
3391 doneOnce = YES;
3392 launchTime = YES;
3393
3394 #if SUPPORT_NONPOINTER_ISA
3395 // Disable non-pointer isa under some conditions.
3396
3397 # if SUPPORT_INDEXED_ISA
3398 // Disable nonpointer isa if any image contains old Swift code
3399 for (EACH_HEADER) {
3400 if (hi->info()->containsSwift() &&
3401 hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
3402 {
3403 DisableNonpointerIsa = true;
3404 if (PrintRawIsa) {
3405 _objc_inform("RAW ISA: disabling non-pointer isa because "
3406 "the app or a framework contains Swift code "
3407 "older than Swift 3.0");
3408 }
3409 break;
3410 }
3411 }
3412 # endif
3413
3414 # if TARGET_OS_OSX
3415 // Disable non-pointer isa if the app is too old
3416 // (linked before OS X 10.11)
3417 if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_11) {
3418 DisableNonpointerIsa = true;
3419 if (PrintRawIsa) {
3420 _objc_inform("RAW ISA: disabling non-pointer isa because "
3421 "the app is too old (SDK version " SDK_FORMAT ")",
3422 FORMAT_SDK(dyld_get_program_sdk_version()));
3423 }
3424 }
3425
3426 // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
3427 // New apps that load old extensions may need this.
3428 for (EACH_HEADER) {
3429 if (hi->mhdr()->filetype != MH_EXECUTE) continue;
3430 unsigned long size;
3431 if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
3432 DisableNonpointerIsa = true;
3433 if (PrintRawIsa) {
3434 _objc_inform("RAW ISA: disabling non-pointer isa because "
3435 "the app has a __DATA,__objc_rawisa section");
3436 }
3437 }
3438 break; // assume only one MH_EXECUTE image
3439 }
3440 # endif
3441
3442 #endif
3443
3444 if (DisableTaggedPointers) {
3445 disableTaggedPointers();
3446 }
3447
3448 initializeTaggedPointerObfuscator();
3449
3450 if (PrintConnecting) {
3451 _objc_inform("CLASS: found %d classes during launch", totalClasses);
3452 }
3453
3454 // namedClasses
3455 // Preoptimized classes don't go in this table.
3456 // 4/3 is NXMapTable's load factor
3457 int namedClassesSize =
3458 (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
3459 gdb_objc_realized_classes =
3460 NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
3461
3462 ts.log("IMAGE TIMES: first time tasks");
3463 }
3464
3465 // Fix up @selector references
3466 static size_t UnfixedSelectors;
3467 {
3468 mutex_locker_t lock(selLock);
3469 for (EACH_HEADER) {
3470 if (hi->hasPreoptimizedSelectors()) continue;
3471
3472 bool isBundle = hi->isBundle();
3473 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3474 UnfixedSelectors += count;
3475 for (i = 0; i < count; i++) {
3476 const char *name = sel_cname(sels[i]);
3477 SEL sel = sel_registerNameNoLock(name, isBundle);
3478 if (sels[i] != sel) {
3479 sels[i] = sel;
3480 }
3481 }
3482 }
3483 }
3484
3485 ts.log("IMAGE TIMES: fix up selector references");
3486
3487 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
3488 bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
3489
3490 for (EACH_HEADER) {
3491 if (! mustReadClasses(hi, hasDyldRoots)) {
3492 // Image is sufficiently optimized that we need not call readClass()
3493 continue;
3494 }
3495
3496 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3497
3498 bool headerIsBundle = hi->isBundle();
3499 bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
3500
3501 for (i = 0; i < count; i++) {
3502 Class cls = (Class)classlist[i];
3503 Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
3504
3505 if (newCls != cls && newCls) {
3506 // Class was moved but not deleted. Currently this occurs
3507 // only when the new class resolved a future class.
3508 // Non-lazily realize the class below.
3509 resolvedFutureClasses = (Class *)
3510 realloc(resolvedFutureClasses,
3511 (resolvedFutureClassCount+1) * sizeof(Class));
3512 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3513 }
3514 }
3515 }
3516
3517 ts.log("IMAGE TIMES: discover classes");
3518
3519 // Fix up remapped classes
3520 // Class list and nonlazy class list remain unremapped.
3521 // Class refs and super refs are remapped for message dispatching.
3522
3523 if (!noClassesRemapped()) {
3524 for (EACH_HEADER) {
3525 Class *classrefs = _getObjc2ClassRefs(hi, &count);
3526 for (i = 0; i < count; i++) {
3527 remapClassRef(&classrefs[i]);
3528 }
3529 // fixme why doesn't test future1 catch the absence of this?
3530 classrefs = _getObjc2SuperRefs(hi, &count);
3531 for (i = 0; i < count; i++) {
3532 remapClassRef(&classrefs[i]);
3533 }
3534 }
3535 }
3536
3537 ts.log("IMAGE TIMES: remap classes");
3538
3539 #if SUPPORT_FIXUP
3540 // Fix up old objc_msgSend_fixup call sites
3541 for (EACH_HEADER) {
3542 message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
3543 if (count == 0) continue;
3544
3545 if (PrintVtables) {
3546 _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
3547 "call sites in %s", count, hi->fname());
3548 }
3549 for (i = 0; i < count; i++) {
3550 fixupMessageRef(refs+i);
3551 }
3552 }
3553
3554 ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
3555 #endif
3556
3557 bool cacheSupportsProtocolRoots = sharedCacheSupportsProtocolRoots();
3558
3559 // Discover protocols. Fix up protocol refs.
3560 for (EACH_HEADER) {
3561 extern objc_class OBJC_CLASS_$_Protocol;
3562 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3563 ASSERT(cls);
3564 NXMapTable *protocol_map = protocols();
3565 bool isPreoptimized = hi->hasPreoptimizedProtocols();
3566
3567 // Skip reading protocols if this is an image from the shared cache
3568 // and we support roots
3569 // Note, after launch we do need to walk the protocol as the protocol
3570 // in the shared cache is marked with isCanonical() and that may not
3571 // be true if some non-shared cache binary was chosen as the canonical
3572 // definition
3573 if (launchTime && isPreoptimized && cacheSupportsProtocolRoots) {
3574 if (PrintProtocols) {
3575 _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
3576 hi->fname());
3577 }
3578 continue;
3579 }
3580
3581 bool isBundle = hi->isBundle();
3582
3583 protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
3584 for (i = 0; i < count; i++) {
3585 readProtocol(protolist[i], cls, protocol_map,
3586 isPreoptimized, isBundle);
3587 }
3588 }
3589
3590 ts.log("IMAGE TIMES: discover protocols");
3591
3592 // Fix up @protocol references
3593 // Preoptimized images may have the right
3594 // answer already but we don't know for sure.
3595 for (EACH_HEADER) {
3596 // At launch time, we know preoptimized image refs are pointing at the
3597 // shared cache definition of a protocol. We can skip the check on
3598 // launch, but have to visit @protocol refs for shared cache images
3599 // loaded later.
3600 if (launchTime && cacheSupportsProtocolRoots && hi->isPreoptimized())
3601 continue;
3602 protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
3603 for (i = 0; i < count; i++) {
3604 remapProtocolRef(&protolist[i]);
3605 }
3606 }
3607
3608 ts.log("IMAGE TIMES: fix up @protocol references");
3609
3610 // Discover categories. Only do this after the initial category
3611 // attachment has been done. For categories present at startup,
3612 // discovery is deferred until the first load_images call after
3613 // the call to _dyld_objc_notify_register completes. rdar://problem/53119145
3614 if (didInitialAttachCategories) {
3615 for (EACH_HEADER) {
3616 load_categories_nolock(hi);
3617 }
3618 }
3619
3620 ts.log("IMAGE TIMES: discover categories");
3621
3622 // Category discovery MUST BE Late to avoid potential races
3623 // when other threads call the new category code before
3624 // this thread finishes its fixups.
3625
3626 // +load handled by prepare_load_methods()
3627
3628 // Realize non-lazy classes (for +load methods and static instances)
3629 for (EACH_HEADER) {
3630 classref_t const *classlist =
3631 _getObjc2NonlazyClassList(hi, &count);
3632 for (i = 0; i < count; i++) {
3633 Class cls = remapClass(classlist[i]);
3634 if (!cls) continue;
3635
3636 addClassTableEntry(cls);
3637
3638 if (cls->isSwiftStable()) {
3639 if (cls->swiftMetadataInitializer()) {
3640 _objc_fatal("Swift class %s with a metadata initializer "
3641 "is not allowed to be non-lazy",
3642 cls->nameForLogging());
3643 }
3644 // fixme also disallow relocatable classes
3645 // We can't disallow all Swift classes because of
3646 // classes like Swift.__EmptyArrayStorage
3647 }
3648 realizeClassWithoutSwift(cls, nil);
3649 }
3650 }
3651
3652 ts.log("IMAGE TIMES: realize non-lazy classes");
3653
3654 // Realize newly-resolved future classes, in case CF manipulates them
3655 if (resolvedFutureClasses) {
3656 for (i = 0; i < resolvedFutureClassCount; i++) {
3657 Class cls = resolvedFutureClasses[i];
3658 if (cls->isSwiftStable()) {
3659 _objc_fatal("Swift class is not allowed to be future");
3660 }
3661 realizeClassWithoutSwift(cls, nil);
3662 cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
3663 }
3664 free(resolvedFutureClasses);
3665 }
3666
3667 ts.log("IMAGE TIMES: realize future classes");
3668
3669 if (DebugNonFragileIvars) {
3670 realizeAllClasses();
3671 }
3672
3673
3674 // Print preoptimization statistics
3675 if (PrintPreopt) {
3676 static unsigned int PreoptTotalMethodLists;
3677 static unsigned int PreoptOptimizedMethodLists;
3678 static unsigned int PreoptTotalClasses;
3679 static unsigned int PreoptOptimizedClasses;
3680
3681 for (EACH_HEADER) {
3682 if (hi->hasPreoptimizedSelectors()) {
3683 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
3684 "in %s", hi->fname());
3685 }
3686 else if (hi->info()->optimizedByDyld()) {
3687 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
3688 "in %s", hi->fname());
3689 }
3690
3691 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3692 for (i = 0; i < count; i++) {
3693 Class cls = remapClass(classlist[i]);
3694 if (!cls) continue;
3695
3696 PreoptTotalClasses++;
3697 if (hi->hasPreoptimizedClasses()) {
3698 PreoptOptimizedClasses++;
3699 }
3700
3701 const method_list_t *mlist;
3702 if ((mlist = ((class_ro_t *)cls->data())->baseMethods())) {
3703 PreoptTotalMethodLists++;
3704 if (mlist->isFixedUp()) {
3705 PreoptOptimizedMethodLists++;
3706 }
3707 }
3708 if ((mlist=((class_ro_t *)cls->ISA()->data())->baseMethods())) {
3709 PreoptTotalMethodLists++;
3710 if (mlist->isFixedUp()) {
3711 PreoptOptimizedMethodLists++;
3712 }
3713 }
3714 }
3715 }
3716
3717 _objc_inform("PREOPTIMIZATION: %zu selector references not "
3718 "pre-optimized", UnfixedSelectors);
3719 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3720 PreoptOptimizedMethodLists, PreoptTotalMethodLists,
3721 PreoptTotalMethodLists
3722 ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
3723 : 0.0);
3724 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3725 PreoptOptimizedClasses, PreoptTotalClasses,
3726 PreoptTotalClasses
3727 ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
3728 : 0.0);
3729 _objc_inform("PREOPTIMIZATION: %zu protocol references not "
3730 "pre-optimized", UnfixedProtocolReferences);
3731 }
3732
3733 #undef EACH_HEADER
3734 }
3735
3736
3737 /***********************************************************************
3738 * prepare_load_methods
3739 * Schedule +load for classes in this image, any un-+load-ed
3740 * superclasses in other images, and any categories in this image.
3741 **********************************************************************/
3742 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3743 // cls must already be connected.
3744 static void schedule_class_load(Class cls)
3745 {
3746 if (!cls) return;
3747 ASSERT(cls->isRealized()); // _read_images should realize
3748
3749 if (cls->data()->flags & RW_LOADED) return;
3750
3751 // Ensure superclass-first ordering
3752 schedule_class_load(cls->superclass);
3753
3754 add_class_to_loadable_list(cls);
3755 cls->setInfo(RW_LOADED);
3756 }
3757
3758 // Quick scan for +load methods that doesn't take a lock.
3759 bool hasLoadMethods(const headerType *mhdr)
3760 {
3761 size_t count;
3762 if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
3763 if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
3764 return false;
3765 }
3766
3767 void prepare_load_methods(const headerType *mhdr)
3768 {
3769 size_t count, i;
3770
3771 runtimeLock.assertLocked();
3772
3773 classref_t const *classlist =
3774 _getObjc2NonlazyClassList(mhdr, &count);
3775 for (i = 0; i < count; i++) {
3776 schedule_class_load(remapClass(classlist[i]));
3777 }
3778
3779 category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
3780 for (i = 0; i < count; i++) {
3781 category_t *cat = categorylist[i];
3782 Class cls = remapClass(cat->cls);
3783 if (!cls) continue; // category for ignored weak-linked class
3784 if (cls->isSwiftStable()) {
3785 _objc_fatal("Swift class extensions and categories on Swift "
3786 "classes are not allowed to have +load methods");
3787 }
3788 realizeClassWithoutSwift(cls, nil);
3789 ASSERT(cls->ISA()->isRealized());
3790 add_category_to_loadable_list(cat);
3791 }
3792 }
3793
3794
3795 /***********************************************************************
3796 * _unload_image
3797 * Only handles MH_BUNDLE for now.
3798 * Locking: write-lock and loadMethodLock acquired by unmap_image
3799 **********************************************************************/
3800 void _unload_image(header_info *hi)
3801 {
3802 size_t count, i;
3803
3804 loadMethodLock.assertLocked();
3805 runtimeLock.assertLocked();
3806
3807 // Unload unattached categories and categories waiting for +load.
3808
3809 // Ignore __objc_catlist2. We don't support unloading Swift
3810 // and we never will.
3811 category_t * const *catlist = _getObjc2CategoryList(hi, &count);
3812 for (i = 0; i < count; i++) {
3813 category_t *cat = catlist[i];
3814 Class cls = remapClass(cat->cls);
3815 if (!cls) continue; // category for ignored weak-linked class
3816
3817 // fixme for MH_DYLIB cat's class may have been unloaded already
3818
3819 // unattached list
3820 objc::unattachedCategories.eraseCategoryForClass(cat, cls);
3821
3822 // +load queue
3823 remove_category_from_loadable_list(cat);
3824 }
3825
3826 // Unload classes.
3827
3828 // Gather classes from both __DATA,__objc_clslist
3829 // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
3830 // only, and we need to unload that class if we unload an arclite image.
3831
3832 objc::DenseSet<Class> classes{};
3833 classref_t const *classlist;
3834
3835 classlist = _getObjc2ClassList(hi, &count);
3836 for (i = 0; i < count; i++) {
3837 Class cls = remapClass(classlist[i]);
3838 if (cls) classes.insert(cls);
3839 }
3840
3841 classlist = _getObjc2NonlazyClassList(hi, &count);
3842 for (i = 0; i < count; i++) {
3843 Class cls = remapClass(classlist[i]);
3844 if (cls) classes.insert(cls);
3845 }
3846
3847 // First detach classes from each other. Then free each class.
3848 // This avoid bugs where this loop unloads a subclass before its superclass
3849
3850 for (Class cls: classes) {
3851 remove_class_from_loadable_list(cls);
3852 detach_class(cls->ISA(), YES);
3853 detach_class(cls, NO);
3854 }
3855 for (Class cls: classes) {
3856 free_class(cls->ISA());
3857 free_class(cls);
3858 }
3859
3860 // XXX FIXME -- Clean up protocols:
3861 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3862
3863 // fixme DebugUnload
3864 }
3865
3866
3867 /***********************************************************************
3868 * method_getDescription
3869 * Returns a pointer to this method's objc_method_description.
3870 * Locking: none
3871 **********************************************************************/
3872 struct objc_method_description *
3873 method_getDescription(Method m)
3874 {
3875 if (!m) return nil;
3876 return (struct objc_method_description *)m;
3877 }
3878
3879
3880 IMP
3881 method_getImplementation(Method m)
3882 {
3883 return m ? m->imp : nil;
3884 }
3885
3886
3887 /***********************************************************************
3888 * method_getName
3889 * Returns this method's selector.
3890 * The method must not be nil.
3891 * The method must already have been fixed-up.
3892 * Locking: none
3893 **********************************************************************/
3894 SEL
3895 method_getName(Method m)
3896 {
3897 if (!m) return nil;
3898
3899 ASSERT(m->name == sel_registerName(sel_getName(m->name)));
3900 return m->name;
3901 }
3902
3903
3904 /***********************************************************************
3905 * method_getTypeEncoding
3906 * Returns this method's old-style type encoding string.
3907 * The method must not be nil.
3908 * Locking: none
3909 **********************************************************************/
3910 const char *
3911 method_getTypeEncoding(Method m)
3912 {
3913 if (!m) return nil;
3914 return m->types;
3915 }
3916
3917
3918 /***********************************************************************
3919 * method_setImplementation
3920 * Sets this method's implementation to imp.
3921 * The previous implementation is returned.
3922 **********************************************************************/
3923 static IMP
3924 _method_setImplementation(Class cls, method_t *m, IMP imp)
3925 {
3926 runtimeLock.assertLocked();
3927
3928 if (!m) return nil;
3929 if (!imp) return nil;
3930
3931 IMP old = m->imp;
3932 m->imp = imp;
3933
3934 // Cache updates are slow if cls is nil (i.e. unknown)
3935 // RR/AWZ updates are slow if cls is nil (i.e. unknown)
3936 // fixme build list of classes whose Methods are known externally?
3937
3938 flushCaches(cls);
3939
3940 adjustCustomFlagsForMethodChange(cls, m);
3941
3942 return old;
3943 }
3944
3945 IMP
3946 method_setImplementation(Method m, IMP imp)
3947 {
3948 // Don't know the class - will be slow if RR/AWZ are affected
3949 // fixme build list of classes whose Methods are known externally?
3950 mutex_locker_t lock(runtimeLock);
3951 return _method_setImplementation(Nil, m, imp);
3952 }
3953
3954
3955 void method_exchangeImplementations(Method m1, Method m2)
3956 {
3957 if (!m1 || !m2) return;
3958
3959 mutex_locker_t lock(runtimeLock);
3960
3961 IMP m1_imp = m1->imp;
3962 m1->imp = m2->imp;
3963 m2->imp = m1_imp;
3964
3965
3966 // RR/AWZ updates are slow because class is unknown
3967 // Cache updates are slow because class is unknown
3968 // fixme build list of classes whose Methods are known externally?
3969
3970 flushCaches(nil);
3971
3972 adjustCustomFlagsForMethodChange(nil, m1);
3973 adjustCustomFlagsForMethodChange(nil, m2);
3974 }
3975
3976
3977 /***********************************************************************
3978 * ivar_getOffset
3979 * fixme
3980 * Locking: none
3981 **********************************************************************/
3982 ptrdiff_t
3983 ivar_getOffset(Ivar ivar)
3984 {
3985 if (!ivar) return 0;
3986 return *ivar->offset;
3987 }
3988
3989
3990 /***********************************************************************
3991 * ivar_getName
3992 * fixme
3993 * Locking: none
3994 **********************************************************************/
3995 const char *
3996 ivar_getName(Ivar ivar)
3997 {
3998 if (!ivar) return nil;
3999 return ivar->name;
4000 }
4001
4002
4003 /***********************************************************************
4004 * ivar_getTypeEncoding
4005 * fixme
4006 * Locking: none
4007 **********************************************************************/
4008 const char *
4009 ivar_getTypeEncoding(Ivar ivar)
4010 {
4011 if (!ivar) return nil;
4012 return ivar->type;
4013 }
4014
4015
4016
4017 const char *property_getName(objc_property_t prop)
4018 {
4019 return prop->name;
4020 }
4021
4022 const char *property_getAttributes(objc_property_t prop)
4023 {
4024 return prop->attributes;
4025 }
4026
4027 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
4028 unsigned int *outCount)
4029 {
4030 if (!prop) {
4031 if (outCount) *outCount = 0;
4032 return nil;
4033 }
4034
4035 mutex_locker_t lock(runtimeLock);
4036 return copyPropertyAttributeList(prop->attributes,outCount);
4037 }
4038
4039 char * property_copyAttributeValue(objc_property_t prop, const char *name)
4040 {
4041 if (!prop || !name || *name == '\0') return nil;
4042
4043 mutex_locker_t lock(runtimeLock);
4044 return copyPropertyAttributeValue(prop->attributes, name);
4045 }
4046
4047
4048 /***********************************************************************
4049 * getExtendedTypesIndexesForMethod
4050 * Returns:
4051 * a is the count of methods in all method lists before m's method list
4052 * b is the index of m in m's method list
4053 * a+b is the index of m's extended types in the extended types array
4054 **********************************************************************/
4055 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
4056 {
4057 a = 0;
4058
4059 if (proto->instanceMethods) {
4060 if (isRequiredMethod && isInstanceMethod) {
4061 b = proto->instanceMethods->indexOfMethod(m);
4062 return;
4063 }
4064 a += proto->instanceMethods->count;
4065 }
4066
4067 if (proto->classMethods) {
4068 if (isRequiredMethod && !isInstanceMethod) {
4069 b = proto->classMethods->indexOfMethod(m);
4070 return;
4071 }
4072 a += proto->classMethods->count;
4073 }
4074
4075 if (proto->optionalInstanceMethods) {
4076 if (!isRequiredMethod && isInstanceMethod) {
4077 b = proto->optionalInstanceMethods->indexOfMethod(m);
4078 return;
4079 }
4080 a += proto->optionalInstanceMethods->count;
4081 }
4082
4083 if (proto->optionalClassMethods) {
4084 if (!isRequiredMethod && !isInstanceMethod) {
4085 b = proto->optionalClassMethods->indexOfMethod(m);
4086 return;
4087 }
4088 a += proto->optionalClassMethods->count;
4089 }
4090 }
4091
4092
4093 /***********************************************************************
4094 * getExtendedTypesIndexForMethod
4095 * Returns the index of m's extended types in proto's extended types array.
4096 **********************************************************************/
4097 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
4098 {
4099 uint32_t a;
4100 uint32_t b;
4101 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
4102 isInstanceMethod, a, b);
4103 return a + b;
4104 }
4105
4106
4107 /***********************************************************************
4108 * fixupProtocolMethodList
4109 * Fixes up a single method list in a protocol.
4110 **********************************************************************/
4111 static void
4112 fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
4113 bool required, bool instance)
4114 {
4115 runtimeLock.assertLocked();
4116
4117 if (!mlist) return;
4118 if (mlist->isFixedUp()) return;
4119
4120 const char **extTypes = proto->extendedMethodTypes();
4121 fixupMethodList(mlist, true/*always copy for simplicity*/,
4122 !extTypes/*sort if no extended method types*/);
4123
4124 if (extTypes) {
4125 // Sort method list and extended method types together.
4126 // fixupMethodList() can't do this.
4127 // fixme COW stomp
4128 uint32_t count = mlist->count;
4129 uint32_t prefix;
4130 uint32_t junk;
4131 getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
4132 required, instance, prefix, junk);
4133 for (uint32_t i = 0; i < count; i++) {
4134 for (uint32_t j = i+1; j < count; j++) {
4135 method_t& mi = mlist->get(i);
4136 method_t& mj = mlist->get(j);
4137 if (mi.name > mj.name) {
4138 std::swap(mi, mj);
4139 std::swap(extTypes[prefix+i], extTypes[prefix+j]);
4140 }
4141 }
4142 }
4143 }
4144 }
4145
4146
4147 /***********************************************************************
4148 * fixupProtocol
4149 * Fixes up all of a protocol's method lists.
4150 **********************************************************************/
4151 static void
4152 fixupProtocol(protocol_t *proto)
4153 {
4154 runtimeLock.assertLocked();
4155
4156 if (proto->protocols) {
4157 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4158 protocol_t *sub = remapProtocol(proto->protocols->list[i]);
4159 if (!sub->isFixedUp()) fixupProtocol(sub);
4160 }
4161 }
4162
4163 fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
4164 fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
4165 fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
4166 fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
4167
4168 // fixme memory barrier so we can check this with no lock
4169 proto->setFixedUp();
4170 }
4171
4172
4173 /***********************************************************************
4174 * fixupProtocolIfNeeded
4175 * Fixes up all of a protocol's method lists if they aren't fixed up already.
4176 * Locking: write-locks runtimeLock.
4177 **********************************************************************/
4178 static void
4179 fixupProtocolIfNeeded(protocol_t *proto)
4180 {
4181 runtimeLock.assertUnlocked();
4182 ASSERT(proto);
4183
4184 if (!proto->isFixedUp()) {
4185 mutex_locker_t lock(runtimeLock);
4186 fixupProtocol(proto);
4187 }
4188 }
4189
4190
4191 static method_list_t *
4192 getProtocolMethodList(protocol_t *proto, bool required, bool instance)
4193 {
4194 method_list_t **mlistp = nil;
4195 if (required) {
4196 if (instance) {
4197 mlistp = &proto->instanceMethods;
4198 } else {
4199 mlistp = &proto->classMethods;
4200 }
4201 } else {
4202 if (instance) {
4203 mlistp = &proto->optionalInstanceMethods;
4204 } else {
4205 mlistp = &proto->optionalClassMethods;
4206 }
4207 }
4208
4209 return *mlistp;
4210 }
4211
4212
4213 /***********************************************************************
4214 * protocol_getMethod_nolock
4215 * Locking: runtimeLock must be held by the caller
4216 **********************************************************************/
4217 static method_t *
4218 protocol_getMethod_nolock(protocol_t *proto, SEL sel,
4219 bool isRequiredMethod, bool isInstanceMethod,
4220 bool recursive)
4221 {
4222 runtimeLock.assertLocked();
4223
4224 if (!proto || !sel) return nil;
4225
4226 ASSERT(proto->isFixedUp());
4227
4228 method_list_t *mlist =
4229 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4230 if (mlist) {
4231 method_t *m = search_method_list(mlist, sel);
4232 if (m) return m;
4233 }
4234
4235 if (recursive && proto->protocols) {
4236 method_t *m;
4237 for (uint32_t i = 0; i < proto->protocols->count; i++) {
4238 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
4239 m = protocol_getMethod_nolock(realProto, sel,
4240 isRequiredMethod, isInstanceMethod,
4241 true);
4242 if (m) return m;
4243 }
4244 }
4245
4246 return nil;
4247 }
4248
4249
4250 /***********************************************************************
4251 * protocol_getMethod
4252 * fixme
4253 * Locking: acquires runtimeLock
4254 **********************************************************************/
4255 Method
4256 protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
4257 {
4258 if (!proto) return nil;
4259 fixupProtocolIfNeeded(proto);
4260
4261 mutex_locker_t lock(runtimeLock);
4262 return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
4263 isInstanceMethod, recursive);
4264 }
4265
4266
4267 /***********************************************************************
4268 * protocol_getMethodTypeEncoding_nolock
4269 * Return the @encode string for the requested protocol method.
4270 * Returns nil if the compiler did not emit any extended @encode data.
4271 * Locking: runtimeLock must be held by the caller
4272 **********************************************************************/
4273 const char *
4274 protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
4275 bool isRequiredMethod,
4276 bool isInstanceMethod)
4277 {
4278 runtimeLock.assertLocked();
4279
4280 if (!proto) return nil;
4281 if (!proto->extendedMethodTypes()) return nil;
4282
4283 ASSERT(proto->isFixedUp());
4284
4285 method_t *m =
4286 protocol_getMethod_nolock(proto, sel,
4287 isRequiredMethod, isInstanceMethod, false);
4288 if (m) {
4289 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
4290 isRequiredMethod,
4291 isInstanceMethod);
4292 return proto->extendedMethodTypes()[i];
4293 }
4294
4295 // No method with that name. Search incorporated protocols.
4296 if (proto->protocols) {
4297 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4298 const char *enc =
4299 protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
4300 if (enc) return enc;
4301 }
4302 }
4303
4304 return nil;
4305 }
4306
4307 /***********************************************************************
4308 * _protocol_getMethodTypeEncoding
4309 * Return the @encode string for the requested protocol method.
4310 * Returns nil if the compiler did not emit any extended @encode data.
4311 * Locking: acquires runtimeLock
4312 **********************************************************************/
4313 const char *
4314 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
4315 BOOL isRequiredMethod, BOOL isInstanceMethod)
4316 {
4317 protocol_t *proto = newprotocol(proto_gen);
4318
4319 if (!proto) return nil;
4320 fixupProtocolIfNeeded(proto);
4321
4322 mutex_locker_t lock(runtimeLock);
4323 return protocol_getMethodTypeEncoding_nolock(proto, sel,
4324 isRequiredMethod,
4325 isInstanceMethod);
4326 }
4327
4328
4329 /***********************************************************************
4330 * protocol_t::demangledName
4331 * Returns the (Swift-demangled) name of the given protocol.
4332 * Locking: none
4333 **********************************************************************/
4334 const char *
4335 protocol_t::demangledName()
4336 {
4337 ASSERT(hasDemangledNameField());
4338
4339 if (! _demangledName) {
4340 char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
4341 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
4342 (void**)&_demangledName))
4343 {
4344 if (de) free(de);
4345 }
4346 }
4347 return _demangledName;
4348 }
4349
4350 /***********************************************************************
4351 * protocol_getName
4352 * Returns the (Swift-demangled) name of the given protocol.
4353 * Locking: runtimeLock must not be held by the caller
4354 **********************************************************************/
4355 const char *
4356 protocol_getName(Protocol *proto)
4357 {
4358 if (!proto) return "nil";
4359 else return newprotocol(proto)->demangledName();
4360 }
4361
4362
4363 /***********************************************************************
4364 * protocol_getInstanceMethodDescription
4365 * Returns the description of a named instance method.
4366 * Locking: runtimeLock must not be held by the caller
4367 **********************************************************************/
4368 struct objc_method_description
4369 protocol_getMethodDescription(Protocol *p, SEL aSel,
4370 BOOL isRequiredMethod, BOOL isInstanceMethod)
4371 {
4372 Method m =
4373 protocol_getMethod(newprotocol(p), aSel,
4374 isRequiredMethod, isInstanceMethod, true);
4375 if (m) return *method_getDescription(m);
4376 else return (struct objc_method_description){nil, nil};
4377 }
4378
4379
4380 /***********************************************************************
4381 * protocol_conformsToProtocol_nolock
4382 * Returns YES if self conforms to other.
4383 * Locking: runtimeLock must be held by the caller.
4384 **********************************************************************/
4385 static bool
4386 protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
4387 {
4388 runtimeLock.assertLocked();
4389
4390 if (!self || !other) {
4391 return NO;
4392 }
4393
4394 // protocols need not be fixed up
4395
4396 if (0 == strcmp(self->mangledName, other->mangledName)) {
4397 return YES;
4398 }
4399
4400 if (self->protocols) {
4401 uintptr_t i;
4402 for (i = 0; i < self->protocols->count; i++) {
4403 protocol_t *proto = remapProtocol(self->protocols->list[i]);
4404 if (other == proto) {
4405 return YES;
4406 }
4407 if (0 == strcmp(other->mangledName, proto->mangledName)) {
4408 return YES;
4409 }
4410 if (protocol_conformsToProtocol_nolock(proto, other)) {
4411 return YES;
4412 }
4413 }
4414 }
4415
4416 return NO;
4417 }
4418
4419
4420 /***********************************************************************
4421 * protocol_conformsToProtocol
4422 * Returns YES if self conforms to other.
4423 * Locking: acquires runtimeLock
4424 **********************************************************************/
4425 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
4426 {
4427 mutex_locker_t lock(runtimeLock);
4428 return protocol_conformsToProtocol_nolock(newprotocol(self),
4429 newprotocol(other));
4430 }
4431
4432
4433 /***********************************************************************
4434 * protocol_isEqual
4435 * Return YES if two protocols are equal (i.e. conform to each other)
4436 * Locking: acquires runtimeLock
4437 **********************************************************************/
4438 BOOL protocol_isEqual(Protocol *self, Protocol *other)
4439 {
4440 if (self == other) return YES;
4441 if (!self || !other) return NO;
4442
4443 if (!protocol_conformsToProtocol(self, other)) return NO;
4444 if (!protocol_conformsToProtocol(other, self)) return NO;
4445
4446 return YES;
4447 }
4448
4449
4450 /***********************************************************************
4451 * protocol_copyMethodDescriptionList
4452 * Returns descriptions of a protocol's methods.
4453 * Locking: acquires runtimeLock
4454 **********************************************************************/
4455 struct objc_method_description *
4456 protocol_copyMethodDescriptionList(Protocol *p,
4457 BOOL isRequiredMethod,BOOL isInstanceMethod,
4458 unsigned int *outCount)
4459 {
4460 protocol_t *proto = newprotocol(p);
4461 struct objc_method_description *result = nil;
4462 unsigned int count = 0;
4463
4464 if (!proto) {
4465 if (outCount) *outCount = 0;
4466 return nil;
4467 }
4468
4469 fixupProtocolIfNeeded(proto);
4470
4471 mutex_locker_t lock(runtimeLock);
4472
4473 method_list_t *mlist =
4474 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4475
4476 if (mlist) {
4477 result = (struct objc_method_description *)
4478 calloc(mlist->count + 1, sizeof(struct objc_method_description));
4479 for (const auto& meth : *mlist) {
4480 result[count].name = meth.name;
4481 result[count].types = (char *)meth.types;
4482 count++;
4483 }
4484 }
4485
4486 if (outCount) *outCount = count;
4487 return result;
4488 }
4489
4490
4491 /***********************************************************************
4492 * protocol_getProperty
4493 * fixme
4494 * Locking: runtimeLock must be held by the caller
4495 **********************************************************************/
4496 static property_t *
4497 protocol_getProperty_nolock(protocol_t *proto, const char *name,
4498 bool isRequiredProperty, bool isInstanceProperty)
4499 {
4500 runtimeLock.assertLocked();
4501
4502 if (!isRequiredProperty) {
4503 // Only required properties are currently supported.
4504 return nil;
4505 }
4506
4507 property_list_t *plist = isInstanceProperty ?
4508 proto->instanceProperties : proto->classProperties();
4509 if (plist) {
4510 for (auto& prop : *plist) {
4511 if (0 == strcmp(name, prop.name)) {
4512 return &prop;
4513 }
4514 }
4515 }
4516
4517 if (proto->protocols) {
4518 uintptr_t i;
4519 for (i = 0; i < proto->protocols->count; i++) {
4520 protocol_t *p = remapProtocol(proto->protocols->list[i]);
4521 property_t *prop =
4522 protocol_getProperty_nolock(p, name,
4523 isRequiredProperty,
4524 isInstanceProperty);
4525 if (prop) return prop;
4526 }
4527 }
4528
4529 return nil;
4530 }
4531
4532 objc_property_t protocol_getProperty(Protocol *p, const char *name,
4533 BOOL isRequiredProperty, BOOL isInstanceProperty)
4534 {
4535 if (!p || !name) return nil;
4536
4537 mutex_locker_t lock(runtimeLock);
4538 return (objc_property_t)
4539 protocol_getProperty_nolock(newprotocol(p), name,
4540 isRequiredProperty, isInstanceProperty);
4541 }
4542
4543
4544 /***********************************************************************
4545 * protocol_copyPropertyList
4546 * protocol_copyPropertyList2
4547 * fixme
4548 * Locking: acquires runtimeLock
4549 **********************************************************************/
4550 static property_t **
4551 copyPropertyList(property_list_t *plist, unsigned int *outCount)
4552 {
4553 property_t **result = nil;
4554 unsigned int count = 0;
4555
4556 if (plist) {
4557 count = plist->count;
4558 }
4559
4560 if (count > 0) {
4561 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4562
4563 count = 0;
4564 for (auto& prop : *plist) {
4565 result[count++] = &prop;
4566 }
4567 result[count] = nil;
4568 }
4569
4570 if (outCount) *outCount = count;
4571 return result;
4572 }
4573
4574 objc_property_t *
4575 protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
4576 BOOL isRequiredProperty, BOOL isInstanceProperty)
4577 {
4578 if (!proto || !isRequiredProperty) {
4579 // Optional properties are not currently supported.
4580 if (outCount) *outCount = 0;
4581 return nil;
4582 }
4583
4584 mutex_locker_t lock(runtimeLock);
4585
4586 property_list_t *plist = isInstanceProperty
4587 ? newprotocol(proto)->instanceProperties
4588 : newprotocol(proto)->classProperties();
4589 return (objc_property_t *)copyPropertyList(plist, outCount);
4590 }
4591
4592 objc_property_t *
4593 protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4594 {
4595 return protocol_copyPropertyList2(proto, outCount,
4596 YES/*required*/, YES/*instance*/);
4597 }
4598
4599
4600 /***********************************************************************
4601 * protocol_copyProtocolList
4602 * Copies this protocol's incorporated protocols.
4603 * Does not copy those protocol's incorporated protocols in turn.
4604 * Locking: acquires runtimeLock
4605 **********************************************************************/
4606 Protocol * __unsafe_unretained *
4607 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4608 {
4609 unsigned int count = 0;
4610 Protocol **result = nil;
4611 protocol_t *proto = newprotocol(p);
4612
4613 if (!proto) {
4614 if (outCount) *outCount = 0;
4615 return nil;
4616 }
4617
4618 mutex_locker_t lock(runtimeLock);
4619
4620 if (proto->protocols) {
4621 count = (unsigned int)proto->protocols->count;
4622 }
4623 if (count > 0) {
4624 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4625
4626 unsigned int i;
4627 for (i = 0; i < count; i++) {
4628 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4629 }
4630 result[i] = nil;
4631 }
4632
4633 if (outCount) *outCount = count;
4634 return result;
4635 }
4636
4637
4638 /***********************************************************************
4639 * objc_allocateProtocol
4640 * Creates a new protocol. The protocol may not be used until
4641 * objc_registerProtocol() is called.
4642 * Returns nil if a protocol with the same name already exists.
4643 * Locking: acquires runtimeLock
4644 **********************************************************************/
4645 Protocol *
4646 objc_allocateProtocol(const char *name)
4647 {
4648 mutex_locker_t lock(runtimeLock);
4649
4650 if (getProtocol(name)) {
4651 return nil;
4652 }
4653
4654 protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
4655
4656 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4657 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4658 result->initProtocolIsa(cls);
4659 result->size = sizeof(protocol_t);
4660 // fixme mangle the name if it looks swift-y?
4661 result->mangledName = strdupIfMutable(name);
4662
4663 // fixme reserve name without installing
4664
4665 return (Protocol *)result;
4666 }
4667
4668
4669 /***********************************************************************
4670 * objc_registerProtocol
4671 * Registers a newly-constructed protocol. The protocol is now
4672 * ready for use and immutable.
4673 * Locking: acquires runtimeLock
4674 **********************************************************************/
4675 void objc_registerProtocol(Protocol *proto_gen)
4676 {
4677 protocol_t *proto = newprotocol(proto_gen);
4678
4679 mutex_locker_t lock(runtimeLock);
4680
4681 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4682 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4683 extern objc_class OBJC_CLASS_$_Protocol;
4684 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4685
4686 if (proto->ISA() == cls) {
4687 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4688 "registered!", proto->nameForLogging());
4689 return;
4690 }
4691 if (proto->ISA() != oldcls) {
4692 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4693 "with objc_allocateProtocol!", proto->nameForLogging());
4694 return;
4695 }
4696
4697 // NOT initProtocolIsa(). The protocol object may already
4698 // have been retained and we must preserve that count.
4699 proto->changeIsa(cls);
4700
4701 // Don't add this protocol if we already have it.
4702 // Should we warn on duplicates?
4703 if (getProtocol(proto->mangledName) == nil) {
4704 NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
4705 }
4706 }
4707
4708
4709 /***********************************************************************
4710 * protocol_addProtocol
4711 * Adds an incorporated protocol to another protocol.
4712 * No method enforcement is performed.
4713 * `proto` must be under construction. `addition` must not.
4714 * Locking: acquires runtimeLock
4715 **********************************************************************/
4716 void
4717 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4718 {
4719 protocol_t *proto = newprotocol(proto_gen);
4720 protocol_t *addition = newprotocol(addition_gen);
4721
4722 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4723 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4724
4725 if (!proto_gen) return;
4726 if (!addition_gen) return;
4727
4728 mutex_locker_t lock(runtimeLock);
4729
4730 if (proto->ISA() != cls) {
4731 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4732 "under construction!", proto->nameForLogging());
4733 return;
4734 }
4735 if (addition->ISA() == cls) {
4736 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4737 "under construction!", addition->nameForLogging());
4738 return;
4739 }
4740
4741 protocol_list_t *protolist = proto->protocols;
4742 if (!protolist) {
4743 protolist = (protocol_list_t *)
4744 calloc(1, sizeof(protocol_list_t)
4745 + sizeof(protolist->list[0]));
4746 } else {
4747 protolist = (protocol_list_t *)
4748 realloc(protolist, protocol_list_size(protolist)
4749 + sizeof(protolist->list[0]));
4750 }
4751
4752 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4753 proto->protocols = protolist;
4754 }
4755
4756
4757 /***********************************************************************
4758 * protocol_addMethodDescription
4759 * Adds a method to a protocol. The protocol must be under construction.
4760 * Locking: acquires runtimeLock
4761 **********************************************************************/
4762 static void
4763 protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
4764 {
4765 if (!list) {
4766 list = (method_list_t *)calloc(sizeof(method_list_t), 1);
4767 list->entsizeAndFlags = sizeof(list->first);
4768 list->setFixedUp();
4769 } else {
4770 size_t size = list->byteSize() + list->entsize();
4771 list = (method_list_t *)realloc(list, size);
4772 }
4773
4774 method_t& meth = list->get(list->count++);
4775 meth.name = name;
4776 meth.types = types ? strdupIfMutable(types) : "";
4777 meth.imp = nil;
4778 }
4779
4780 void
4781 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4782 BOOL isRequiredMethod, BOOL isInstanceMethod)
4783 {
4784 protocol_t *proto = newprotocol(proto_gen);
4785
4786 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4787 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4788
4789 if (!proto_gen) return;
4790
4791 mutex_locker_t lock(runtimeLock);
4792
4793 if (proto->ISA() != cls) {
4794 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4795 "under construction!", proto->nameForLogging());
4796 return;
4797 }
4798
4799 if (isRequiredMethod && isInstanceMethod) {
4800 protocol_addMethod_nolock(proto->instanceMethods, name, types);
4801 } else if (isRequiredMethod && !isInstanceMethod) {
4802 protocol_addMethod_nolock(proto->classMethods, name, types);
4803 } else if (!isRequiredMethod && isInstanceMethod) {
4804 protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
4805 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4806 protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
4807 }
4808 }
4809
4810
4811 /***********************************************************************
4812 * protocol_addProperty
4813 * Adds a property to a protocol. The protocol must be under construction.
4814 * Locking: acquires runtimeLock
4815 **********************************************************************/
4816 static void
4817 protocol_addProperty_nolock(property_list_t *&plist, const char *name,
4818 const objc_property_attribute_t *attrs,
4819 unsigned int count)
4820 {
4821 if (!plist) {
4822 plist = (property_list_t *)calloc(sizeof(property_list_t), 1);
4823 plist->entsizeAndFlags = sizeof(property_t);
4824 } else {
4825 plist = (property_list_t *)
4826 realloc(plist, sizeof(property_list_t)
4827 + plist->count * plist->entsize());
4828 }
4829
4830 property_t& prop = plist->get(plist->count++);
4831 prop.name = strdupIfMutable(name);
4832 prop.attributes = copyPropertyAttributeString(attrs, count);
4833 }
4834
4835 void
4836 protocol_addProperty(Protocol *proto_gen, const char *name,
4837 const objc_property_attribute_t *attrs,
4838 unsigned int count,
4839 BOOL isRequiredProperty, BOOL isInstanceProperty)
4840 {
4841 protocol_t *proto = newprotocol(proto_gen);
4842
4843 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4844 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4845
4846 if (!proto) return;
4847 if (!name) return;
4848
4849 mutex_locker_t lock(runtimeLock);
4850
4851 if (proto->ISA() != cls) {
4852 _objc_inform("protocol_addProperty: protocol '%s' is not "
4853 "under construction!", proto->nameForLogging());
4854 return;
4855 }
4856
4857 if (isRequiredProperty && isInstanceProperty) {
4858 protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
4859 }
4860 else if (isRequiredProperty && !isInstanceProperty) {
4861 protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
4862 }
4863 //else if (!isRequiredProperty && isInstanceProperty) {
4864 // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
4865 //}
4866 //else /* !isRequiredProperty && !isInstanceProperty) */ {
4867 // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
4868 //}
4869 }
4870
4871 static int
4872 objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
4873 {
4874 int count = 0;
4875
4876 if (buffer) {
4877 int c = 0;
4878 foreach_realized_class([=, &count, &c](Class cls) {
4879 count++;
4880 if (c < bufferLen) {
4881 buffer[c++] = cls;
4882 }
4883 return true;
4884 });
4885 } else {
4886 foreach_realized_class([&count](Class cls) {
4887 count++;
4888 return true;
4889 });
4890 }
4891
4892 return count;
4893 }
4894
4895 static Class *
4896 objc_copyRealizedClassList_nolock(unsigned int *outCount)
4897 {
4898 Class *result = nil;
4899 unsigned int count = 0;
4900
4901 foreach_realized_class([&count](Class cls) {
4902 count++;
4903 return true;
4904 });
4905
4906 if (count > 0) {
4907 unsigned int c = 0;
4908
4909 result = (Class *)malloc((1+count) * sizeof(Class));
4910 foreach_realized_class([=, &c](Class cls) {
4911 result[c++] = cls;
4912 return true;
4913 });
4914 result[c] = nil;
4915 }
4916
4917 if (outCount) *outCount = count;
4918 return result;
4919 }
4920
4921 static void
4922 class_getImpCache_nolock(Class cls, cache_t &cache, objc_imp_cache_entry *buffer, int len)
4923 {
4924 bucket_t *buckets = cache.buckets();
4925
4926 uintptr_t count = cache.capacity();
4927 uintptr_t index;
4928 int wpos = 0;
4929
4930 for (index = 0; index < count && wpos < len; index += 1) {
4931 if (buckets[index].sel()) {
4932 buffer[wpos].imp = buckets[index].imp(cls);
4933 buffer[wpos].sel = buckets[index].sel();
4934 wpos++;
4935 }
4936 }
4937 }
4938
4939 /***********************************************************************
4940 * objc_getClassList
4941 * Returns pointers to all classes.
4942 * This requires all classes be realized, which is regretfully non-lazy.
4943 * Locking: acquires runtimeLock
4944 **********************************************************************/
4945 int
4946 objc_getClassList(Class *buffer, int bufferLen)
4947 {
4948 mutex_locker_t lock(runtimeLock);
4949
4950 realizeAllClasses();
4951
4952 return objc_getRealizedClassList_nolock(buffer, bufferLen);
4953 }
4954
4955 /***********************************************************************
4956 * objc_copyClassList
4957 * Returns pointers to Realized classes.
4958 *
4959 * outCount may be nil. *outCount is the number of classes returned.
4960 * If the returned array is not nil, it is nil-terminated and must be
4961 * freed with free().
4962 * Locking: write-locks runtimeLock
4963 **********************************************************************/
4964 Class *
4965 objc_copyRealizedClassList(unsigned int *outCount)
4966 {
4967 mutex_locker_t lock(runtimeLock);
4968
4969 return objc_copyRealizedClassList_nolock(outCount);
4970 }
4971
4972
4973 /***********************************************************************
4974 * objc_copyClassList
4975 * Returns pointers to all classes.
4976 * This requires all classes be realized, which is regretfully non-lazy.
4977 *
4978 * outCount may be nil. *outCount is the number of classes returned.
4979 * If the returned array is not nil, it is nil-terminated and must be
4980 * freed with free().
4981 * Locking: write-locks runtimeLock
4982 **********************************************************************/
4983 Class *
4984 objc_copyClassList(unsigned int *outCount)
4985 {
4986 mutex_locker_t lock(runtimeLock);
4987
4988 realizeAllClasses();
4989
4990 return objc_copyRealizedClassList_nolock(outCount);
4991 }
4992
4993 /***********************************************************************
4994 * class_copyImpCache
4995 * Returns the current content of the Class IMP Cache
4996 *
4997 * outCount may be nil. *outCount is the number of entries returned.
4998 * If the returned array is not nil, it is nil-terminated and must be
4999 * freed with free().
5000 * Locking: write-locks cacheUpdateLock
5001 **********************************************************************/
5002 objc_imp_cache_entry *
5003 class_copyImpCache(Class cls, int *outCount)
5004 {
5005 objc_imp_cache_entry *buffer = nullptr;
5006
5007 #if CONFIG_USE_CACHE_LOCK
5008 mutex_locker_t lock(cacheUpdateLock);
5009 #else
5010 mutex_locker_t lock(runtimeLock);
5011 #endif
5012
5013 cache_t &cache = cls->cache;
5014 int count = (int)cache.occupied();
5015
5016 if (count) {
5017 buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
5018 class_getImpCache_nolock(cls, cache, buffer, count);
5019 }
5020
5021 if (outCount) *outCount = count;
5022 return buffer;
5023 }
5024
5025
5026 /***********************************************************************
5027 * objc_copyProtocolList
5028 * Returns pointers to all protocols.
5029 * Locking: read-locks runtimeLock
5030 **********************************************************************/
5031 Protocol * __unsafe_unretained *
5032 objc_copyProtocolList(unsigned int *outCount)
5033 {
5034 mutex_locker_t lock(runtimeLock);
5035
5036 NXMapTable *protocol_map = protocols();
5037
5038 // Find all the protocols from the pre-optimized images. These protocols
5039 // won't be in the protocol map.
5040 objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
5041 if (sharedCacheSupportsProtocolRoots()) {
5042 header_info *hi;
5043 for (hi = FirstHeader; hi; hi = hi->getNext()) {
5044 if (!hi->hasPreoptimizedProtocols())
5045 continue;
5046
5047 size_t count, i;
5048 const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
5049 for (i = 0; i < count; i++) {
5050 const protocol_t* protocol = protolist[i];
5051
5052 // Skip protocols we have in the run time map. These likely
5053 // correspond to protocols added dynamically which have the same
5054 // name as a protocol found later in a dlopen'ed shared cache image.
5055 if (NXMapGet(protocol_map, protocol->mangledName) != nil)
5056 continue;
5057
5058 // The protocols in the shared cache protolist point to their
5059 // original on-disk object, not the optimized one. We can use the name
5060 // to find the optimized one.
5061 Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
5062 preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
5063 }
5064 }
5065 }
5066
5067 unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
5068 if (count == 0) {
5069 if (outCount) *outCount = 0;
5070 return nil;
5071 }
5072
5073 Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
5074
5075 unsigned int i = 0;
5076 Protocol *proto;
5077 const char *name;
5078 NXMapState state = NXInitMapState(protocol_map);
5079 while (NXNextMapState(protocol_map, &state,
5080 (const void **)&name, (const void **)&proto))
5081 {
5082 result[i++] = proto;
5083 }
5084
5085 // Add any protocols found in the pre-optimized table
5086 for (auto it : preoptimizedProtocols) {
5087 result[i++] = it.second;
5088 }
5089
5090 result[i++] = nil;
5091 ASSERT(i == count+1);
5092
5093 if (outCount) *outCount = count;
5094 return result;
5095 }
5096
5097
5098 /***********************************************************************
5099 * objc_getProtocol
5100 * Get a protocol by name, or return nil
5101 * Locking: read-locks runtimeLock
5102 **********************************************************************/
5103 Protocol *objc_getProtocol(const char *name)
5104 {
5105 mutex_locker_t lock(runtimeLock);
5106 return getProtocol(name);
5107 }
5108
5109
5110 /***********************************************************************
5111 * class_copyMethodList
5112 * fixme
5113 * Locking: read-locks runtimeLock
5114 **********************************************************************/
5115 Method *
5116 class_copyMethodList(Class cls, unsigned int *outCount)
5117 {
5118 unsigned int count = 0;
5119 Method *result = nil;
5120
5121 if (!cls) {
5122 if (outCount) *outCount = 0;
5123 return nil;
5124 }
5125
5126 mutex_locker_t lock(runtimeLock);
5127 const auto methods = cls->data()->methods();
5128
5129 ASSERT(cls->isRealized());
5130
5131 count = methods.count();
5132
5133 if (count > 0) {
5134 result = (Method *)malloc((count + 1) * sizeof(Method));
5135
5136 count = 0;
5137 for (auto& meth : methods) {
5138 result[count++] = &meth;
5139 }
5140 result[count] = nil;
5141 }
5142
5143 if (outCount) *outCount = count;
5144 return result;
5145 }
5146
5147
5148 /***********************************************************************
5149 * class_copyIvarList
5150 * fixme
5151 * Locking: read-locks runtimeLock
5152 **********************************************************************/
5153 Ivar *
5154 class_copyIvarList(Class cls, unsigned int *outCount)
5155 {
5156 const ivar_list_t *ivars;
5157 Ivar *result = nil;
5158 unsigned int count = 0;
5159
5160 if (!cls) {
5161 if (outCount) *outCount = 0;
5162 return nil;
5163 }
5164
5165 mutex_locker_t lock(runtimeLock);
5166
5167 ASSERT(cls->isRealized());
5168
5169 if ((ivars = cls->data()->ro()->ivars) && ivars->count) {
5170 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
5171
5172 for (auto& ivar : *ivars) {
5173 if (!ivar.offset) continue; // anonymous bitfield
5174 result[count++] = &ivar;
5175 }
5176 result[count] = nil;
5177 }
5178
5179 if (outCount) *outCount = count;
5180 return result;
5181 }
5182
5183
5184 /***********************************************************************
5185 * class_copyPropertyList. Returns a heap block containing the
5186 * properties declared in the class, or nil if the class
5187 * declares no properties. Caller must free the block.
5188 * Does not copy any superclass's properties.
5189 * Locking: read-locks runtimeLock
5190 **********************************************************************/
5191 objc_property_t *
5192 class_copyPropertyList(Class cls, unsigned int *outCount)
5193 {
5194 if (!cls) {
5195 if (outCount) *outCount = 0;
5196 return nil;
5197 }
5198
5199 mutex_locker_t lock(runtimeLock);
5200
5201 checkIsKnownClass(cls);
5202 ASSERT(cls->isRealized());
5203
5204 auto rw = cls->data();
5205
5206 property_t **result = nil;
5207 auto const properties = rw->properties();
5208 unsigned int count = properties.count();
5209 if (count > 0) {
5210 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
5211
5212 count = 0;
5213 for (auto& prop : properties) {
5214 result[count++] = &prop;
5215 }
5216 result[count] = nil;
5217 }
5218
5219 if (outCount) *outCount = count;
5220 return (objc_property_t *)result;
5221 }
5222
5223
5224 /***********************************************************************
5225 * objc_class::getLoadMethod
5226 * fixme
5227 * Called only from add_class_to_loadable_list.
5228 * Locking: runtimeLock must be read- or write-locked by the caller.
5229 **********************************************************************/
5230 IMP
5231 objc_class::getLoadMethod()
5232 {
5233 runtimeLock.assertLocked();
5234
5235 const method_list_t *mlist;
5236
5237 ASSERT(isRealized());
5238 ASSERT(ISA()->isRealized());
5239 ASSERT(!isMetaClass());
5240 ASSERT(ISA()->isMetaClass());
5241
5242 mlist = ISA()->data()->ro()->baseMethods();
5243 if (mlist) {
5244 for (const auto& meth : *mlist) {
5245 const char *name = sel_cname(meth.name);
5246 if (0 == strcmp(name, "load")) {
5247 return meth.imp;
5248 }
5249 }
5250 }
5251
5252 return nil;
5253 }
5254
5255
5256 /***********************************************************************
5257 * _category_getName
5258 * Returns a category's name.
5259 * Locking: none
5260 **********************************************************************/
5261 const char *
5262 _category_getName(Category cat)
5263 {
5264 return cat->name;
5265 }
5266
5267
5268 /***********************************************************************
5269 * _category_getClassName
5270 * Returns a category's class's name
5271 * Called only from add_category_to_loadable_list and
5272 * remove_category_from_loadable_list for logging purposes.
5273 * Locking: runtimeLock must be read- or write-locked by the caller
5274 **********************************************************************/
5275 const char *
5276 _category_getClassName(Category cat)
5277 {
5278 runtimeLock.assertLocked();
5279 return remapClass(cat->cls)->nameForLogging();
5280 }
5281
5282
5283 /***********************************************************************
5284 * _category_getClass
5285 * Returns a category's class
5286 * Called only by call_category_loads.
5287 * Locking: read-locks runtimeLock
5288 **********************************************************************/
5289 Class
5290 _category_getClass(Category cat)
5291 {
5292 mutex_locker_t lock(runtimeLock);
5293 Class result = remapClass(cat->cls);
5294 ASSERT(result->isRealized()); // ok for call_category_loads' usage
5295 return result;
5296 }
5297
5298
5299 /***********************************************************************
5300 * _category_getLoadMethod
5301 * fixme
5302 * Called only from add_category_to_loadable_list
5303 * Locking: runtimeLock must be read- or write-locked by the caller
5304 **********************************************************************/
5305 IMP
5306 _category_getLoadMethod(Category cat)
5307 {
5308 runtimeLock.assertLocked();
5309
5310 const method_list_t *mlist;
5311
5312 mlist = cat->classMethods;
5313 if (mlist) {
5314 for (const auto& meth : *mlist) {
5315 const char *name = sel_cname(meth.name);
5316 if (0 == strcmp(name, "load")) {
5317 return meth.imp;
5318 }
5319 }
5320 }
5321
5322 return nil;
5323 }
5324
5325
5326 /***********************************************************************
5327 * category_t::propertiesForMeta
5328 * Return a category's instance or class properties.
5329 * hi is the image containing the category.
5330 **********************************************************************/
5331 property_list_t *
5332 category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
5333 {
5334 if (!isMeta) return instanceProperties;
5335 else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
5336 else return nil;
5337 }
5338
5339
5340 /***********************************************************************
5341 * class_copyProtocolList
5342 * fixme
5343 * Locking: read-locks runtimeLock
5344 **********************************************************************/
5345 Protocol * __unsafe_unretained *
5346 class_copyProtocolList(Class cls, unsigned int *outCount)
5347 {
5348 unsigned int count = 0;
5349 Protocol **result = nil;
5350
5351 if (!cls) {
5352 if (outCount) *outCount = 0;
5353 return nil;
5354 }
5355
5356 mutex_locker_t lock(runtimeLock);
5357 const auto protocols = cls->data()->protocols();
5358
5359 checkIsKnownClass(cls);
5360
5361 ASSERT(cls->isRealized());
5362
5363 count = protocols.count();
5364
5365 if (count > 0) {
5366 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
5367
5368 count = 0;
5369 for (const auto& proto : protocols) {
5370 result[count++] = (Protocol *)remapProtocol(proto);
5371 }
5372 result[count] = nil;
5373 }
5374
5375 if (outCount) *outCount = count;
5376 return result;
5377 }
5378
5379
5380 /***********************************************************************
5381 * objc_copyImageNames
5382 * Copies names of loaded images with ObjC contents.
5383 *
5384 * Locking: acquires runtimeLock
5385 **********************************************************************/
5386 const char **objc_copyImageNames(unsigned int *outCount)
5387 {
5388 mutex_locker_t lock(runtimeLock);
5389
5390 int HeaderCount = 0;
5391 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5392 HeaderCount++;
5393 }
5394
5395 #if TARGET_OS_WIN32
5396 const TCHAR **names = (const TCHAR **)
5397 malloc((HeaderCount+1) * sizeof(TCHAR *));
5398 #else
5399 const char **names = (const char **)
5400 malloc((HeaderCount+1) * sizeof(char *));
5401 #endif
5402
5403 unsigned int count = 0;
5404 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5405 #if TARGET_OS_WIN32
5406 if (hi->moduleName) {
5407 names[count++] = hi->moduleName;
5408 }
5409 #else
5410 const char *fname = hi->fname();
5411 if (fname) {
5412 names[count++] = fname;
5413 }
5414 #endif
5415 }
5416 names[count] = nil;
5417
5418 if (count == 0) {
5419 // Return nil instead of empty list if there are no images
5420 free((void *)names);
5421 names = nil;
5422 }
5423
5424 if (outCount) *outCount = count;
5425 return names;
5426 }
5427
5428
5429 /***********************************************************************
5430 * copyClassNamesForImage_nolock
5431 * Copies class names from the given image.
5432 * Missing weak-import classes are omitted.
5433 * Swift class names are demangled.
5434 *
5435 * Locking: runtimeLock must be held by the caller
5436 **********************************************************************/
5437 const char **
5438 copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
5439 {
5440 runtimeLock.assertLocked();
5441 ASSERT(hi);
5442
5443 size_t count;
5444 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5445 const char **names = (const char **)
5446 malloc((count+1) * sizeof(const char *));
5447
5448 size_t shift = 0;
5449 for (size_t i = 0; i < count; i++) {
5450 Class cls = remapClass(classlist[i]);
5451 if (cls) {
5452 names[i-shift] = cls->demangledName(/* needs lock */false);
5453 } else {
5454 shift++; // ignored weak-linked class
5455 }
5456 }
5457 count -= shift;
5458 names[count] = nil;
5459
5460 if (outCount) *outCount = (unsigned int)count;
5461 return names;
5462 }
5463
5464
5465
5466 /***********************************************************************
5467 * objc_copyClassNamesForImage
5468 * Copies class names from the named image.
5469 * The image name must be identical to dladdr's dli_fname value.
5470 * Missing weak-import classes are omitted.
5471 * Swift class names are demangled.
5472 *
5473 * Locking: acquires runtimeLock
5474 **********************************************************************/
5475 const char **
5476 objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
5477 {
5478 if (!image) {
5479 if (outCount) *outCount = 0;
5480 return nil;
5481 }
5482
5483 mutex_locker_t lock(runtimeLock);
5484
5485 // Find the image.
5486 header_info *hi;
5487 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5488 #if TARGET_OS_WIN32
5489 if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
5490 #else
5491 if (0 == strcmp(image, hi->fname())) break;
5492 #endif
5493 }
5494
5495 if (!hi) {
5496 if (outCount) *outCount = 0;
5497 return nil;
5498 }
5499
5500 return copyClassNamesForImage_nolock(hi, outCount);
5501 }
5502
5503
5504 /***********************************************************************
5505 * objc_copyClassNamesForImageHeader
5506 * Copies class names from the given image.
5507 * Missing weak-import classes are omitted.
5508 * Swift class names are demangled.
5509 *
5510 * Locking: acquires runtimeLock
5511 **********************************************************************/
5512 const char **
5513 objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
5514 {
5515 if (!mh) {
5516 if (outCount) *outCount = 0;
5517 return nil;
5518 }
5519
5520 mutex_locker_t lock(runtimeLock);
5521
5522 // Find the image.
5523 header_info *hi;
5524 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5525 if (hi->mhdr() == (const headerType *)mh) break;
5526 }
5527
5528 if (!hi) {
5529 if (outCount) *outCount = 0;
5530 return nil;
5531 }
5532
5533 return copyClassNamesForImage_nolock(hi, outCount);
5534 }
5535
5536
5537 /***********************************************************************
5538 * saveTemporaryString
5539 * Save a string in a thread-local FIFO buffer.
5540 * This is suitable for temporary strings generated for logging purposes.
5541 **********************************************************************/
5542 static void
5543 saveTemporaryString(char *str)
5544 {
5545 // Fixed-size FIFO. We free the first string, shift
5546 // the rest, and add the new string to the end.
5547 _objc_pthread_data *data = _objc_fetch_pthread_data(true);
5548 if (data->printableNames[0]) {
5549 free(data->printableNames[0]);
5550 }
5551 int last = countof(data->printableNames) - 1;
5552 for (int i = 0; i < last; i++) {
5553 data->printableNames[i] = data->printableNames[i+1];
5554 }
5555 data->printableNames[last] = str;
5556 }
5557
5558
5559 /***********************************************************************
5560 * objc_class::nameForLogging
5561 * Returns the class's name, suitable for display.
5562 * The returned memory is TEMPORARY. Print it or copy it immediately.
5563 * Locking: none
5564 **********************************************************************/
5565 const char *
5566 objc_class::nameForLogging()
5567 {
5568 // Handle the easy case directly.
5569 if (isRealized() || isFuture()) {
5570 if (!isAnySwift()) {
5571 return data()->ro()->name;
5572 }
5573 auto rwe = data()->ext();
5574 if (rwe && rwe->demangledName) {
5575 return rwe->demangledName;
5576 }
5577 }
5578
5579 char *result;
5580
5581 const char *name = mangledName();
5582 char *de = copySwiftV1DemangledName(name);
5583 if (de) result = de;
5584 else result = strdup(name);
5585
5586 saveTemporaryString(result);
5587 return result;
5588 }
5589
5590
5591 /***********************************************************************
5592 * objc_class::demangledName
5593 * If realize=false, the class must already be realized or future.
5594 * Locking: runtimeLock may or may not be held by the caller.
5595 **********************************************************************/
5596 mutex_t DemangleCacheLock;
5597 static objc::DenseSet<const char *> *DemangleCache;
5598 const char *
5599 objc_class::demangledName(bool needsLock)
5600 {
5601 if (!needsLock) {
5602 runtimeLock.assertLocked();
5603 }
5604
5605 // Return previously demangled name if available.
5606 if (isRealized() || isFuture()) {
5607 if (!isAnySwift()) {
5608 return data()->ro()->name;
5609 }
5610 auto rwe = data()->ext();
5611 if (rwe && rwe->demangledName) {
5612 return rwe->demangledName;
5613 }
5614 }
5615
5616 // Try demangling the mangled name.
5617 const char *mangled = mangledName();
5618 char *de = copySwiftV1DemangledName(mangled);
5619 class_rw_ext_t *rwe;
5620
5621 if (isRealized() || isFuture()) {
5622 if (needsLock) {
5623 mutex_locker_t lock(runtimeLock);
5624 rwe = data()->extAllocIfNeeded();
5625 } else {
5626 rwe = data()->extAllocIfNeeded();
5627 }
5628 // Class is already realized or future.
5629 // Save demangling result in rw data.
5630 // We may not own runtimeLock so use an atomic operation instead.
5631 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
5632 (void**)&rwe->demangledName))
5633 {
5634 if (de) free(de);
5635 }
5636 return rwe->demangledName;
5637 }
5638
5639 // Class is not yet realized.
5640 if (!de) {
5641 // Name is not mangled. Return it without caching.
5642 return mangled;
5643 }
5644
5645 // Class is not yet realized and name is mangled.
5646 // Allocate the name but don't save it in the class.
5647 // Save the name in a side cache instead to prevent leaks.
5648 // When the class is actually realized we may allocate a second
5649 // copy of the name, but we don't care.
5650 // (Previously we would try to realize the class now and save the
5651 // name there, but realization is more complicated for Swift classes.)
5652
5653 // Only objc_copyClassNamesForImage() should get here.
5654 // fixme lldb's calls to class_getName() can also get here when
5655 // interrogating the dyld shared cache. (rdar://27258517)
5656 // fixme ASSERT(realize);
5657
5658 const char *cached;
5659 {
5660 mutex_locker_t lock(DemangleCacheLock);
5661 if (!DemangleCache) {
5662 DemangleCache = new objc::DenseSet<const char *>{};
5663 }
5664 cached = *DemangleCache->insert(de).first;
5665 }
5666 if (cached != de) free(de);
5667 return cached;
5668 }
5669
5670
5671 /***********************************************************************
5672 * class_getName
5673 * fixme
5674 * Locking: may acquire DemangleCacheLock
5675 **********************************************************************/
5676 const char *class_getName(Class cls)
5677 {
5678 if (!cls) return "nil";
5679 // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
5680 // ASSERT(cls->isRealized() || cls->isFuture());
5681 return cls->demangledName(/* needs lock */true);
5682 }
5683
5684 /***********************************************************************
5685 * objc_debug_class_getNameRaw
5686 * fixme
5687 * Locking: none
5688 **********************************************************************/
5689 const char *objc_debug_class_getNameRaw(Class cls)
5690 {
5691 if (!cls) return "nil";
5692 return cls->mangledName();
5693 }
5694
5695
5696 /***********************************************************************
5697 * class_getVersion
5698 * fixme
5699 * Locking: none
5700 **********************************************************************/
5701 int
5702 class_getVersion(Class cls)
5703 {
5704 if (!cls) return 0;
5705 ASSERT(cls->isRealized());
5706 auto rwe = cls->data()->ext();
5707 if (rwe) {
5708 return rwe->version;
5709 }
5710 return cls->isMetaClass() ? 7 : 0;
5711 }
5712
5713
5714 /***********************************************************************
5715 * class_setVersion
5716 * fixme
5717 * Locking: none
5718 **********************************************************************/
5719 void
5720 class_setVersion(Class cls, int version)
5721 {
5722 if (!cls) return;
5723 ASSERT(cls->isRealized());
5724 auto rwe = cls->data()->ext();
5725 if (!rwe) {
5726 mutex_locker_t lock(runtimeLock);
5727 rwe = cls->data()->extAllocIfNeeded();
5728 }
5729
5730 rwe->version = version;
5731 }
5732
5733 /***********************************************************************
5734 * search_method_list_inline
5735 **********************************************************************/
5736 ALWAYS_INLINE static method_t *
5737 findMethodInSortedMethodList(SEL key, const method_list_t *list)
5738 {
5739 ASSERT(list);
5740
5741 const method_t * const first = &list->first;
5742 const method_t *base = first;
5743 const method_t *probe;
5744 uintptr_t keyValue = (uintptr_t)key;
5745 uint32_t count;
5746
5747 for (count = list->count; count != 0; count >>= 1) {
5748 probe = base + (count >> 1);
5749
5750 uintptr_t probeValue = (uintptr_t)probe->name;
5751
5752 if (keyValue == probeValue) {
5753 // `probe` is a match.
5754 // Rewind looking for the *first* occurrence of this value.
5755 // This is required for correct category overrides.
5756 while (probe > first && keyValue == (uintptr_t)probe[-1].name) {
5757 probe--;
5758 }
5759 return (method_t *)probe;
5760 }
5761
5762 if (keyValue > probeValue) {
5763 base = probe + 1;
5764 count--;
5765 }
5766 }
5767
5768 return nil;
5769 }
5770
5771 ALWAYS_INLINE static method_t *
5772 search_method_list_inline(const method_list_t *mlist, SEL sel)
5773 {
5774 int methodListIsFixedUp = mlist->isFixedUp();
5775 int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
5776
5777 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5778 return findMethodInSortedMethodList(sel, mlist);
5779 } else {
5780 // Linear search of unsorted method list
5781 for (auto& meth : *mlist) {
5782 if (meth.name == sel) return &meth;
5783 }
5784 }
5785
5786 #if DEBUG
5787 // sanity-check negative results
5788 if (mlist->isFixedUp()) {
5789 for (auto& meth : *mlist) {
5790 if (meth.name == sel) {
5791 _objc_fatal("linear search worked when binary search did not");
5792 }
5793 }
5794 }
5795 #endif
5796
5797 return nil;
5798 }
5799
5800 NEVER_INLINE static method_t *
5801 search_method_list(const method_list_t *mlist, SEL sel)
5802 {
5803 return search_method_list_inline(mlist, sel);
5804 }
5805
5806 /***********************************************************************
5807 * method_lists_contains_any
5808 **********************************************************************/
5809 static NEVER_INLINE bool
5810 method_lists_contains_any(method_list_t * const *mlists, method_list_t * const *end,
5811 SEL sels[], size_t selcount)
5812 {
5813 while (mlists < end) {
5814 const method_list_t *mlist = *mlists++;
5815 int methodListIsFixedUp = mlist->isFixedUp();
5816 int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
5817
5818 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5819 for (size_t i = 0; i < selcount; i++) {
5820 if (findMethodInSortedMethodList(sels[i], mlist)) {
5821 return true;
5822 }
5823 }
5824 } else {
5825 for (auto& meth : *mlist) {
5826 for (size_t i = 0; i < selcount; i++) {
5827 if (meth.name == sels[i]) {
5828 return true;
5829 }
5830 }
5831 }
5832 }
5833 }
5834 return false;
5835 }
5836
5837 /***********************************************************************
5838 * getMethodNoSuper_nolock
5839 * fixme
5840 * Locking: runtimeLock must be read- or write-locked by the caller
5841 **********************************************************************/
5842 static method_t *
5843 getMethodNoSuper_nolock(Class cls, SEL sel)
5844 {
5845 runtimeLock.assertLocked();
5846
5847 ASSERT(cls->isRealized());
5848 // fixme nil cls?
5849 // fixme nil sel?
5850
5851 auto const methods = cls->data()->methods();
5852 for (auto mlists = methods.beginLists(),
5853 end = methods.endLists();
5854 mlists != end;
5855 ++mlists)
5856 {
5857 // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
5858 // caller of search_method_list, inlining it turns
5859 // getMethodNoSuper_nolock into a frame-less function and eliminates
5860 // any store from this codepath.
5861 method_t *m = search_method_list_inline(*mlists, sel);
5862 if (m) return m;
5863 }
5864
5865 return nil;
5866 }
5867
5868
5869 /***********************************************************************
5870 * getMethod_nolock
5871 * fixme
5872 * Locking: runtimeLock must be read- or write-locked by the caller
5873 **********************************************************************/
5874 static method_t *
5875 getMethod_nolock(Class cls, SEL sel)
5876 {
5877 method_t *m = nil;
5878
5879 runtimeLock.assertLocked();
5880
5881 // fixme nil cls?
5882 // fixme nil sel?
5883
5884 ASSERT(cls->isRealized());
5885
5886 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
5887 cls = cls->superclass;
5888 }
5889
5890 return m;
5891 }
5892
5893
5894 /***********************************************************************
5895 * _class_getMethod
5896 * fixme
5897 * Locking: read-locks runtimeLock
5898 **********************************************************************/
5899 static Method _class_getMethod(Class cls, SEL sel)
5900 {
5901 mutex_locker_t lock(runtimeLock);
5902 return getMethod_nolock(cls, sel);
5903 }
5904
5905
5906 /***********************************************************************
5907 * class_getInstanceMethod. Return the instance method for the
5908 * specified class and selector.
5909 **********************************************************************/
5910 Method class_getInstanceMethod(Class cls, SEL sel)
5911 {
5912 if (!cls || !sel) return nil;
5913
5914 // This deliberately avoids +initialize because it historically did so.
5915
5916 // This implementation is a bit weird because it's the only place that
5917 // wants a Method instead of an IMP.
5918
5919 #warning fixme build and search caches
5920
5921 // Search method lists, try method resolver, etc.
5922 lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
5923
5924 #warning fixme build and search caches
5925
5926 return _class_getMethod(cls, sel);
5927 }
5928
5929
5930 /***********************************************************************
5931 * resolveClassMethod
5932 * Call +resolveClassMethod, looking for a method to be added to class cls.
5933 * cls should be a metaclass.
5934 * Does not check if the method already exists.
5935 **********************************************************************/
5936 static void resolveClassMethod(id inst, SEL sel, Class cls)
5937 {
5938 runtimeLock.assertUnlocked();
5939 ASSERT(cls->isRealized());
5940 ASSERT(cls->isMetaClass());
5941
5942 if (!lookUpImpOrNil(inst, @selector(resolveClassMethod:), cls)) {
5943 // Resolver not implemented.
5944 return;
5945 }
5946
5947 Class nonmeta;
5948 {
5949 mutex_locker_t lock(runtimeLock);
5950 nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
5951 // +initialize path should have realized nonmeta already
5952 if (!nonmeta->isRealized()) {
5953 _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
5954 nonmeta->nameForLogging(), nonmeta);
5955 }
5956 }
5957 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
5958 bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
5959
5960 // Cache the result (good or bad) so the resolver doesn't fire next time.
5961 // +resolveClassMethod adds to self->ISA() a.k.a. cls
5962 IMP imp = lookUpImpOrNil(inst, sel, cls);
5963
5964 if (resolved && PrintResolving) {
5965 if (imp) {
5966 _objc_inform("RESOLVE: method %c[%s %s] "
5967 "dynamically resolved to %p",
5968 cls->isMetaClass() ? '+' : '-',
5969 cls->nameForLogging(), sel_getName(sel), imp);
5970 }
5971 else {
5972 // Method resolver didn't add anything?
5973 _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
5974 ", but no new implementation of %c[%s %s] was found",
5975 cls->nameForLogging(), sel_getName(sel),
5976 cls->isMetaClass() ? '+' : '-',
5977 cls->nameForLogging(), sel_getName(sel));
5978 }
5979 }
5980 }
5981
5982
5983 /***********************************************************************
5984 * resolveInstanceMethod
5985 * Call +resolveInstanceMethod, looking for a method to be added to class cls.
5986 * cls may be a metaclass or a non-meta class.
5987 * Does not check if the method already exists.
5988 **********************************************************************/
5989 static void resolveInstanceMethod(id inst, SEL sel, Class cls)
5990 {
5991 runtimeLock.assertUnlocked();
5992 ASSERT(cls->isRealized());
5993 SEL resolve_sel = @selector(resolveInstanceMethod:);
5994
5995 if (!lookUpImpOrNil(cls, resolve_sel, cls->ISA())) {
5996 // Resolver not implemented.
5997 return;
5998 }
5999
6000 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6001 bool resolved = msg(cls, resolve_sel, sel);
6002
6003 // Cache the result (good or bad) so the resolver doesn't fire next time.
6004 // +resolveInstanceMethod adds to self a.k.a. cls
6005 IMP imp = lookUpImpOrNil(inst, sel, cls);
6006
6007 if (resolved && PrintResolving) {
6008 if (imp) {
6009 _objc_inform("RESOLVE: method %c[%s %s] "
6010 "dynamically resolved to %p",
6011 cls->isMetaClass() ? '+' : '-',
6012 cls->nameForLogging(), sel_getName(sel), imp);
6013 }
6014 else {
6015 // Method resolver didn't add anything?
6016 _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
6017 ", but no new implementation of %c[%s %s] was found",
6018 cls->nameForLogging(), sel_getName(sel),
6019 cls->isMetaClass() ? '+' : '-',
6020 cls->nameForLogging(), sel_getName(sel));
6021 }
6022 }
6023 }
6024
6025
6026 /***********************************************************************
6027 * resolveMethod_locked
6028 * Call +resolveClassMethod or +resolveInstanceMethod.
6029 *
6030 * Called with the runtimeLock held to avoid pressure in the caller
6031 * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
6032 **********************************************************************/
6033 static NEVER_INLINE IMP
6034 resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
6035 {
6036 runtimeLock.assertLocked();
6037 ASSERT(cls->isRealized());
6038
6039 runtimeLock.unlock();
6040
6041 if (! cls->isMetaClass()) {
6042 // try [cls resolveInstanceMethod:sel]
6043 resolveInstanceMethod(inst, sel, cls);
6044 }
6045 else {
6046 // try [nonMetaClass resolveClassMethod:sel]
6047 // and [cls resolveInstanceMethod:sel]
6048 resolveClassMethod(inst, sel, cls);
6049 if (!lookUpImpOrNil(inst, sel, cls)) {
6050 resolveInstanceMethod(inst, sel, cls);
6051 }
6052 }
6053
6054 // chances are that calling the resolver have populated the cache
6055 // so attempt using it
6056 return lookUpImpOrForward(inst, sel, cls, behavior | LOOKUP_CACHE);
6057 }
6058
6059
6060 /***********************************************************************
6061 * log_and_fill_cache
6062 * Log this method call. If the logger permits it, fill the method cache.
6063 * cls is the method whose cache should be filled.
6064 * implementer is the class that owns the implementation in question.
6065 **********************************************************************/
6066 static void
6067 log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
6068 {
6069 #if SUPPORT_MESSAGE_LOGGING
6070 if (slowpath(objcMsgLogEnabled && implementer)) {
6071 bool cacheIt = logMessageSend(implementer->isMetaClass(),
6072 cls->nameForLogging(),
6073 implementer->nameForLogging(),
6074 sel);
6075 if (!cacheIt) return;
6076 }
6077 #endif
6078 cache_fill(cls, sel, imp, receiver);
6079 }
6080
6081
6082 /***********************************************************************
6083 * lookUpImpOrForward.
6084 * The standard IMP lookup.
6085 * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
6086 * Without LOOKUP_CACHE: skips optimistic unlocked lookup (but uses cache elsewhere)
6087 * Most callers should use LOOKUP_INITIALIZE and LOOKUP_CACHE
6088 * inst is an instance of cls or a subclass thereof, or nil if none is known.
6089 * If cls is an un-initialized metaclass then a non-nil inst is faster.
6090 * May return _objc_msgForward_impcache. IMPs destined for external use
6091 * must be converted to _objc_msgForward or _objc_msgForward_stret.
6092 * If you don't want forwarding at all, use LOOKUP_NIL.
6093 **********************************************************************/
6094 IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
6095 {
6096 const IMP forward_imp = (IMP)_objc_msgForward_impcache;
6097 IMP imp = nil;
6098 Class curClass;
6099
6100 runtimeLock.assertUnlocked();
6101
6102 // Optimistic cache lookup
6103 if (fastpath(behavior & LOOKUP_CACHE)) {
6104 imp = cache_getImp(cls, sel);
6105 if (imp) goto done_nolock;
6106 }
6107
6108 // runtimeLock is held during isRealized and isInitialized checking
6109 // to prevent races against concurrent realization.
6110
6111 // runtimeLock is held during method search to make
6112 // method-lookup + cache-fill atomic with respect to method addition.
6113 // Otherwise, a category could be added but ignored indefinitely because
6114 // the cache was re-filled with the old value after the cache flush on
6115 // behalf of the category.
6116
6117 runtimeLock.lock();
6118
6119 // We don't want people to be able to craft a binary blob that looks like
6120 // a class but really isn't one and do a CFI attack.
6121 //
6122 // To make these harder we want to make sure this is a class that was
6123 // either built into the binary or legitimately registered through
6124 // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
6125 //
6126 // TODO: this check is quite costly during process startup.
6127 checkIsKnownClass(cls);
6128
6129 if (slowpath(!cls->isRealized())) {
6130 cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
6131 // runtimeLock may have been dropped but is now locked again
6132 }
6133
6134 if (slowpath((behavior & LOOKUP_INITIALIZE) && !cls->isInitialized())) {
6135 cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
6136 // runtimeLock may have been dropped but is now locked again
6137
6138 // If sel == initialize, class_initialize will send +initialize and
6139 // then the messenger will send +initialize again after this
6140 // procedure finishes. Of course, if this is not being called
6141 // from the messenger then it won't happen. 2778172
6142 }
6143
6144 runtimeLock.assertLocked();
6145 curClass = cls;
6146
6147 // The code used to lookpu the class's cache again right after
6148 // we take the lock but for the vast majority of the cases
6149 // evidence shows this is a miss most of the time, hence a time loss.
6150 //
6151 // The only codepath calling into this without having performed some
6152 // kind of cache lookup is class_getInstanceMethod().
6153
6154 for (unsigned attempts = unreasonableClassCount();;) {
6155 // curClass method list.
6156 Method meth = getMethodNoSuper_nolock(curClass, sel);
6157 if (meth) {
6158 imp = meth->imp;
6159 goto done;
6160 }
6161
6162 if (slowpath((curClass = curClass->superclass) == nil)) {
6163 // No implementation found, and method resolver didn't help.
6164 // Use forwarding.
6165 imp = forward_imp;
6166 break;
6167 }
6168
6169 // Halt if there is a cycle in the superclass chain.
6170 if (slowpath(--attempts == 0)) {
6171 _objc_fatal("Memory corruption in class list.");
6172 }
6173
6174 // Superclass cache.
6175 imp = cache_getImp(curClass, sel);
6176 if (slowpath(imp == forward_imp)) {
6177 // Found a forward:: entry in a superclass.
6178 // Stop searching, but don't cache yet; call method
6179 // resolver for this class first.
6180 break;
6181 }
6182 if (fastpath(imp)) {
6183 // Found the method in a superclass. Cache it in this class.
6184 goto done;
6185 }
6186 }
6187
6188 // No implementation found. Try method resolver once.
6189
6190 if (slowpath(behavior & LOOKUP_RESOLVER)) {
6191 behavior ^= LOOKUP_RESOLVER;
6192 return resolveMethod_locked(inst, sel, cls, behavior);
6193 }
6194
6195 done:
6196 log_and_fill_cache(cls, imp, sel, inst, curClass);
6197 runtimeLock.unlock();
6198 done_nolock:
6199 if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
6200 return nil;
6201 }
6202 return imp;
6203 }
6204
6205 /***********************************************************************
6206 * lookupMethodInClassAndLoadCache.
6207 * Like lookUpImpOrForward, but does not search superclasses.
6208 * Caches and returns objc_msgForward if the method is not found in the class.
6209 **********************************************************************/
6210 IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
6211 {
6212 Method meth;
6213 IMP imp;
6214
6215 // fixme this is incomplete - no resolver, +initialize -
6216 // but it's only used for .cxx_construct/destruct so we don't care
6217 ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
6218
6219 // Search cache first.
6220 imp = cache_getImp(cls, sel);
6221 if (imp) return imp;
6222
6223 // Cache miss. Search method list.
6224
6225 mutex_locker_t lock(runtimeLock);
6226
6227 meth = getMethodNoSuper_nolock(cls, sel);
6228
6229 if (meth) {
6230 // Hit in method list. Cache it.
6231 cache_fill(cls, sel, meth->imp, nil);
6232 return meth->imp;
6233 } else {
6234 // Miss in method list. Cache objc_msgForward.
6235 cache_fill(cls, sel, _objc_msgForward_impcache, nil);
6236 return _objc_msgForward_impcache;
6237 }
6238 }
6239
6240
6241 /***********************************************************************
6242 * class_getProperty
6243 * fixme
6244 * Locking: read-locks runtimeLock
6245 **********************************************************************/
6246 objc_property_t class_getProperty(Class cls, const char *name)
6247 {
6248 if (!cls || !name) return nil;
6249
6250 mutex_locker_t lock(runtimeLock);
6251
6252 checkIsKnownClass(cls);
6253
6254 ASSERT(cls->isRealized());
6255
6256 for ( ; cls; cls = cls->superclass) {
6257 for (auto& prop : cls->data()->properties()) {
6258 if (0 == strcmp(name, prop.name)) {
6259 return (objc_property_t)&prop;
6260 }
6261 }
6262 }
6263
6264 return nil;
6265 }
6266
6267
6268 /***********************************************************************
6269 * Locking: fixme
6270 **********************************************************************/
6271
6272 Class gdb_class_getClass(Class cls)
6273 {
6274 const char *className = cls->mangledName();
6275 if(!className || !strlen(className)) return Nil;
6276 Class rCls = look_up_class(className, NO, NO);
6277 return rCls;
6278 }
6279
6280 Class gdb_object_getClass(id obj)
6281 {
6282 if (!obj) return nil;
6283 return gdb_class_getClass(obj->getIsa());
6284 }
6285
6286
6287 /***********************************************************************
6288 * Locking: write-locks runtimeLock
6289 **********************************************************************/
6290 void
6291 objc_class::setInitialized()
6292 {
6293 Class metacls;
6294 Class cls;
6295
6296 ASSERT(!isMetaClass());
6297
6298 cls = (Class)this;
6299 metacls = cls->ISA();
6300
6301 mutex_locker_t lock(runtimeLock);
6302
6303 // Special cases:
6304 // - NSObject AWZ class methods are default.
6305 // - NSObject RR class and instance methods are default.
6306 // - NSObject Core class and instance methods are default.
6307 // adjustCustomFlagsForMethodChange() also knows these special cases.
6308 // attachMethodLists() also knows these special cases.
6309
6310 objc::AWZScanner::scanInitializedClass(cls, metacls);
6311 objc::RRScanner::scanInitializedClass(cls, metacls);
6312 objc::CoreScanner::scanInitializedClass(cls, metacls);
6313
6314 // Update the +initialize flags.
6315 // Do this last.
6316 metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
6317 }
6318
6319
6320 void
6321 objc_class::printInstancesRequireRawIsa(bool inherited)
6322 {
6323 ASSERT(PrintRawIsa);
6324 ASSERT(instancesRequireRawIsa());
6325 _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
6326 isMetaClass() ? " (meta)" : "",
6327 inherited ? " (inherited)" : "");
6328 }
6329
6330 /***********************************************************************
6331 * Mark this class and all of its subclasses as requiring raw isa pointers
6332 **********************************************************************/
6333 void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
6334 {
6335 Class cls = (Class)this;
6336 runtimeLock.assertLocked();
6337
6338 if (instancesRequireRawIsa()) return;
6339
6340 foreach_realized_class_and_subclass(cls, [=](Class c){
6341 if (c->instancesRequireRawIsa()) {
6342 return false;
6343 }
6344
6345 c->setInstancesRequireRawIsa();
6346
6347 if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
6348 return true;
6349 });
6350 }
6351
6352
6353 /***********************************************************************
6354 * Choose a class index.
6355 * Set instancesRequireRawIsa if no more class indexes are available.
6356 **********************************************************************/
6357 void objc_class::chooseClassArrayIndex()
6358 {
6359 #if SUPPORT_INDEXED_ISA
6360 Class cls = (Class)this;
6361 runtimeLock.assertLocked();
6362
6363 if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
6364 // No more indexes available.
6365 ASSERT(cls->classArrayIndex() == 0);
6366 cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
6367 return;
6368 }
6369
6370 unsigned index = objc_indexed_classes_count++;
6371 if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
6372 classForIndex(index) = cls;
6373 cls->setClassArrayIndex(index);
6374 #endif
6375 }
6376
6377
6378 /***********************************************************************
6379 * Update custom RR and AWZ when a method changes its IMP
6380 **********************************************************************/
6381 static void
6382 adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
6383 {
6384 objc::AWZScanner::scanChangedMethod(cls, meth);
6385 objc::RRScanner::scanChangedMethod(cls, meth);
6386 objc::CoreScanner::scanChangedMethod(cls, meth);
6387 }
6388
6389
6390 /***********************************************************************
6391 * class_getIvarLayout
6392 * Called by the garbage collector.
6393 * The class must be nil or already realized.
6394 * Locking: none
6395 **********************************************************************/
6396 const uint8_t *
6397 class_getIvarLayout(Class cls)
6398 {
6399 if (cls) return cls->data()->ro()->ivarLayout;
6400 else return nil;
6401 }
6402
6403
6404 /***********************************************************************
6405 * class_getWeakIvarLayout
6406 * Called by the garbage collector.
6407 * The class must be nil or already realized.
6408 * Locking: none
6409 **********************************************************************/
6410 const uint8_t *
6411 class_getWeakIvarLayout(Class cls)
6412 {
6413 if (cls) return cls->data()->ro()->weakIvarLayout;
6414 else return nil;
6415 }
6416
6417
6418 /***********************************************************************
6419 * class_setIvarLayout
6420 * Changes the class's ivar layout.
6421 * nil layout means no unscanned ivars
6422 * The class must be under construction.
6423 * fixme: sanity-check layout vs instance size?
6424 * fixme: sanity-check layout vs superclass?
6425 * Locking: acquires runtimeLock
6426 **********************************************************************/
6427 void
6428 class_setIvarLayout(Class cls, const uint8_t *layout)
6429 {
6430 if (!cls) return;
6431
6432 mutex_locker_t lock(runtimeLock);
6433
6434 checkIsKnownClass(cls);
6435
6436 // Can only change layout of in-construction classes.
6437 // note: if modifications to post-construction classes were
6438 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6439 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6440 _objc_inform("*** Can't set ivar layout for already-registered "
6441 "class '%s'", cls->nameForLogging());
6442 return;
6443 }
6444
6445 class_ro_t *ro_w = make_ro_writeable(cls->data());
6446
6447 try_free(ro_w->ivarLayout);
6448 ro_w->ivarLayout = ustrdupMaybeNil(layout);
6449 }
6450
6451
6452 /***********************************************************************
6453 * class_setWeakIvarLayout
6454 * Changes the class's weak ivar layout.
6455 * nil layout means no weak ivars
6456 * The class must be under construction.
6457 * fixme: sanity-check layout vs instance size?
6458 * fixme: sanity-check layout vs superclass?
6459 * Locking: acquires runtimeLock
6460 **********************************************************************/
6461 void
6462 class_setWeakIvarLayout(Class cls, const uint8_t *layout)
6463 {
6464 if (!cls) return;
6465
6466 mutex_locker_t lock(runtimeLock);
6467
6468 checkIsKnownClass(cls);
6469
6470 // Can only change layout of in-construction classes.
6471 // note: if modifications to post-construction classes were
6472 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6473 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6474 _objc_inform("*** Can't set weak ivar layout for already-registered "
6475 "class '%s'", cls->nameForLogging());
6476 return;
6477 }
6478
6479 class_ro_t *ro_w = make_ro_writeable(cls->data());
6480
6481 try_free(ro_w->weakIvarLayout);
6482 ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
6483 }
6484
6485
6486 /***********************************************************************
6487 * getIvar
6488 * Look up an ivar by name.
6489 * Locking: runtimeLock must be read- or write-locked by the caller.
6490 **********************************************************************/
6491 static ivar_t *getIvar(Class cls, const char *name)
6492 {
6493 runtimeLock.assertLocked();
6494
6495 const ivar_list_t *ivars;
6496 ASSERT(cls->isRealized());
6497 if ((ivars = cls->data()->ro()->ivars)) {
6498 for (auto& ivar : *ivars) {
6499 if (!ivar.offset) continue; // anonymous bitfield
6500
6501 // ivar.name may be nil for anonymous bitfields etc.
6502 if (ivar.name && 0 == strcmp(name, ivar.name)) {
6503 return &ivar;
6504 }
6505 }
6506 }
6507
6508 return nil;
6509 }
6510
6511
6512 /***********************************************************************
6513 * _class_getClassForIvar
6514 * Given a class and an ivar that is in it or one of its superclasses,
6515 * find the actual class that defined the ivar.
6516 **********************************************************************/
6517 Class _class_getClassForIvar(Class cls, Ivar ivar)
6518 {
6519 mutex_locker_t lock(runtimeLock);
6520
6521 for ( ; cls; cls = cls->superclass) {
6522 if (auto ivars = cls->data()->ro()->ivars) {
6523 if (ivars->containsIvar(ivar)) {
6524 return cls;
6525 }
6526 }
6527 }
6528
6529 return nil;
6530 }
6531
6532
6533 /***********************************************************************
6534 * _class_getVariable
6535 * fixme
6536 * Locking: read-locks runtimeLock
6537 **********************************************************************/
6538 Ivar
6539 _class_getVariable(Class cls, const char *name)
6540 {
6541 mutex_locker_t lock(runtimeLock);
6542
6543 for ( ; cls; cls = cls->superclass) {
6544 ivar_t *ivar = getIvar(cls, name);
6545 if (ivar) {
6546 return ivar;
6547 }
6548 }
6549
6550 return nil;
6551 }
6552
6553
6554 /***********************************************************************
6555 * class_conformsToProtocol
6556 * fixme
6557 * Locking: read-locks runtimeLock
6558 **********************************************************************/
6559 BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
6560 {
6561 protocol_t *proto = newprotocol(proto_gen);
6562
6563 if (!cls) return NO;
6564 if (!proto_gen) return NO;
6565
6566 mutex_locker_t lock(runtimeLock);
6567
6568 checkIsKnownClass(cls);
6569
6570 ASSERT(cls->isRealized());
6571
6572 for (const auto& proto_ref : cls->data()->protocols()) {
6573 protocol_t *p = remapProtocol(proto_ref);
6574 if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
6575 return YES;
6576 }
6577 }
6578
6579 return NO;
6580 }
6581
6582
6583 /**********************************************************************
6584 * addMethod
6585 * fixme
6586 * Locking: runtimeLock must be held by the caller
6587 **********************************************************************/
6588 static IMP
6589 addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
6590 {
6591 IMP result = nil;
6592
6593 runtimeLock.assertLocked();
6594
6595 checkIsKnownClass(cls);
6596
6597 ASSERT(types);
6598 ASSERT(cls->isRealized());
6599
6600 method_t *m;
6601 if ((m = getMethodNoSuper_nolock(cls, name))) {
6602 // already exists
6603 if (!replace) {
6604 result = m->imp;
6605 } else {
6606 result = _method_setImplementation(cls, m, imp);
6607 }
6608 } else {
6609 auto rwe = cls->data()->extAllocIfNeeded();
6610
6611 // fixme optimize
6612 method_list_t *newlist;
6613 newlist = (method_list_t *)calloc(sizeof(*newlist), 1);
6614 newlist->entsizeAndFlags =
6615 (uint32_t)sizeof(method_t) | fixed_up_method_list;
6616 newlist->count = 1;
6617 newlist->first.name = name;
6618 newlist->first.types = strdupIfMutable(types);
6619 newlist->first.imp = imp;
6620
6621 prepareMethodLists(cls, &newlist, 1, NO, NO);
6622 rwe->methods.attachLists(&newlist, 1);
6623 flushCaches(cls);
6624
6625 result = nil;
6626 }
6627
6628 return result;
6629 }
6630
6631 /**********************************************************************
6632 * addMethods
6633 * Add the given methods to a class in bulk.
6634 * Returns the selectors which could not be added, when replace == NO and a
6635 * method already exists. The returned selectors are NULL terminated and must be
6636 * freed by the caller. They are NULL if no failures occurred.
6637 * Locking: runtimeLock must be held by the caller
6638 **********************************************************************/
6639 static SEL *
6640 addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
6641 uint32_t count, bool replace, uint32_t *outFailedCount)
6642 {
6643 runtimeLock.assertLocked();
6644
6645 ASSERT(names);
6646 ASSERT(imps);
6647 ASSERT(types);
6648 ASSERT(cls->isRealized());
6649
6650 method_list_t *newlist;
6651 size_t newlistSize = method_list_t::byteSize(sizeof(method_t), count);
6652 newlist = (method_list_t *)calloc(newlistSize, 1);
6653 newlist->entsizeAndFlags =
6654 (uint32_t)sizeof(method_t) | fixed_up_method_list;
6655 newlist->count = 0;
6656
6657 method_t *newlistMethods = &newlist->first;
6658
6659 SEL *failedNames = nil;
6660 uint32_t failedCount = 0;
6661
6662 for (uint32_t i = 0; i < count; i++) {
6663 method_t *m;
6664 if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
6665 // already exists
6666 if (!replace) {
6667 // report failure
6668 if (failedNames == nil) {
6669 // allocate an extra entry for a trailing NULL in case
6670 // every method fails
6671 failedNames = (SEL *)calloc(sizeof(*failedNames),
6672 count + 1);
6673 }
6674 failedNames[failedCount] = m->name;
6675 failedCount++;
6676 } else {
6677 _method_setImplementation(cls, m, imps[i]);
6678 }
6679 } else {
6680 method_t *newmethod = &newlistMethods[newlist->count];
6681 newmethod->name = names[i];
6682 newmethod->types = strdupIfMutable(types[i]);
6683 newmethod->imp = imps[i];
6684 newlist->count++;
6685 }
6686 }
6687
6688 if (newlist->count > 0) {
6689 auto rwe = cls->data()->extAllocIfNeeded();
6690
6691 // fixme resize newlist because it may have been over-allocated above.
6692 // Note that realloc() alone doesn't work due to ptrauth.
6693
6694 method_t::SortBySELAddress sorter;
6695 std::stable_sort(newlist->begin(), newlist->end(), sorter);
6696
6697 prepareMethodLists(cls, &newlist, 1, NO, NO);
6698 rwe->methods.attachLists(&newlist, 1);
6699 flushCaches(cls);
6700 } else {
6701 // Attaching the method list to the class consumes it. If we don't
6702 // do that, we have to free the memory ourselves.
6703 free(newlist);
6704 }
6705
6706 if (outFailedCount) *outFailedCount = failedCount;
6707
6708 return failedNames;
6709 }
6710
6711
6712 BOOL
6713 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
6714 {
6715 if (!cls) return NO;
6716
6717 mutex_locker_t lock(runtimeLock);
6718 return ! addMethod(cls, name, imp, types ?: "", NO);
6719 }
6720
6721
6722 IMP
6723 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
6724 {
6725 if (!cls) return nil;
6726
6727 mutex_locker_t lock(runtimeLock);
6728 return addMethod(cls, name, imp, types ?: "", YES);
6729 }
6730
6731
6732 SEL *
6733 class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6734 const char **types, uint32_t count,
6735 uint32_t *outFailedCount)
6736 {
6737 if (!cls) {
6738 if (outFailedCount) *outFailedCount = count;
6739 return (SEL *)memdup(names, count * sizeof(*names));
6740 }
6741
6742 mutex_locker_t lock(runtimeLock);
6743 return addMethods(cls, names, imps, types, count, NO, outFailedCount);
6744 }
6745
6746 void
6747 class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6748 const char **types, uint32_t count)
6749 {
6750 if (!cls) return;
6751
6752 mutex_locker_t lock(runtimeLock);
6753 addMethods(cls, names, imps, types, count, YES, nil);
6754 }
6755
6756
6757 /***********************************************************************
6758 * class_addIvar
6759 * Adds an ivar to a class.
6760 * Locking: acquires runtimeLock
6761 **********************************************************************/
6762 BOOL
6763 class_addIvar(Class cls, const char *name, size_t size,
6764 uint8_t alignment, const char *type)
6765 {
6766 if (!cls) return NO;
6767
6768 if (!type) type = "";
6769 if (name && 0 == strcmp(name, "")) name = nil;
6770
6771 mutex_locker_t lock(runtimeLock);
6772
6773 checkIsKnownClass(cls);
6774 ASSERT(cls->isRealized());
6775
6776 // No class variables
6777 if (cls->isMetaClass()) {
6778 return NO;
6779 }
6780
6781 // Can only add ivars to in-construction classes.
6782 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6783 return NO;
6784 }
6785
6786 // Check for existing ivar with this name, unless it's anonymous.
6787 // Check for too-big ivar.
6788 // fixme check for superclass ivar too?
6789 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
6790 return NO;
6791 }
6792
6793 class_ro_t *ro_w = make_ro_writeable(cls->data());
6794
6795 // fixme allocate less memory here
6796
6797 ivar_list_t *oldlist, *newlist;
6798 if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) {
6799 size_t oldsize = oldlist->byteSize();
6800 newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
6801 memcpy(newlist, oldlist, oldsize);
6802 free(oldlist);
6803 } else {
6804 newlist = (ivar_list_t *)calloc(sizeof(ivar_list_t), 1);
6805 newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
6806 }
6807
6808 uint32_t offset = cls->unalignedInstanceSize();
6809 uint32_t alignMask = (1<<alignment)-1;
6810 offset = (offset + alignMask) & ~alignMask;
6811
6812 ivar_t& ivar = newlist->get(newlist->count++);
6813 #if __x86_64__
6814 // Deliberately over-allocate the ivar offset variable.
6815 // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
6816 ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
6817 #else
6818 ivar.offset = (int32_t *)malloc(sizeof(int32_t));
6819 #endif
6820 *ivar.offset = offset;
6821 ivar.name = name ? strdupIfMutable(name) : nil;
6822 ivar.type = strdupIfMutable(type);
6823 ivar.alignment_raw = alignment;
6824 ivar.size = (uint32_t)size;
6825
6826 ro_w->ivars = newlist;
6827 cls->setInstanceSize((uint32_t)(offset + size));
6828
6829 // Ivar layout updated in registerClass.
6830
6831 return YES;
6832 }
6833
6834
6835 /***********************************************************************
6836 * class_addProtocol
6837 * Adds a protocol to a class.
6838 * Locking: acquires runtimeLock
6839 **********************************************************************/
6840 BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
6841 {
6842 protocol_t *protocol = newprotocol(protocol_gen);
6843
6844 if (!cls) return NO;
6845 if (class_conformsToProtocol(cls, protocol_gen)) return NO;
6846
6847 mutex_locker_t lock(runtimeLock);
6848 auto rwe = cls->data()->extAllocIfNeeded();
6849
6850 ASSERT(cls->isRealized());
6851
6852 // fixme optimize
6853 protocol_list_t *protolist = (protocol_list_t *)
6854 malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
6855 protolist->count = 1;
6856 protolist->list[0] = (protocol_ref_t)protocol;
6857
6858 rwe->protocols.attachLists(&protolist, 1);
6859
6860 // fixme metaclass?
6861
6862 return YES;
6863 }
6864
6865
6866 /***********************************************************************
6867 * class_addProperty
6868 * Adds a property to a class.
6869 * Locking: acquires runtimeLock
6870 **********************************************************************/
6871 static bool
6872 _class_addProperty(Class cls, const char *name,
6873 const objc_property_attribute_t *attrs, unsigned int count,
6874 bool replace)
6875 {
6876 if (!cls) return NO;
6877 if (!name) return NO;
6878
6879 property_t *prop = class_getProperty(cls, name);
6880 if (prop && !replace) {
6881 // already exists, refuse to replace
6882 return NO;
6883 }
6884 else if (prop) {
6885 // replace existing
6886 mutex_locker_t lock(runtimeLock);
6887 try_free(prop->attributes);
6888 prop->attributes = copyPropertyAttributeString(attrs, count);
6889 return YES;
6890 }
6891 else {
6892 mutex_locker_t lock(runtimeLock);
6893 auto rwe = cls->data()->extAllocIfNeeded();
6894
6895 ASSERT(cls->isRealized());
6896
6897 property_list_t *proplist = (property_list_t *)
6898 malloc(sizeof(*proplist));
6899 proplist->count = 1;
6900 proplist->entsizeAndFlags = sizeof(proplist->first);
6901 proplist->first.name = strdupIfMutable(name);
6902 proplist->first.attributes = copyPropertyAttributeString(attrs, count);
6903
6904 rwe->properties.attachLists(&proplist, 1);
6905
6906 return YES;
6907 }
6908 }
6909
6910 BOOL
6911 class_addProperty(Class cls, const char *name,
6912 const objc_property_attribute_t *attrs, unsigned int n)
6913 {
6914 return _class_addProperty(cls, name, attrs, n, NO);
6915 }
6916
6917 void
6918 class_replaceProperty(Class cls, const char *name,
6919 const objc_property_attribute_t *attrs, unsigned int n)
6920 {
6921 _class_addProperty(cls, name, attrs, n, YES);
6922 }
6923
6924
6925 /***********************************************************************
6926 * look_up_class
6927 * Look up a class by name, and realize it.
6928 * Locking: acquires runtimeLock
6929 **********************************************************************/
6930 static BOOL empty_getClass(const char *name, Class *outClass)
6931 {
6932 *outClass = nil;
6933 return NO;
6934 }
6935
6936 static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
6937
6938 void objc_setHook_getClass(objc_hook_getClass newValue,
6939 objc_hook_getClass *outOldValue)
6940 {
6941 GetClassHook.set(newValue, outOldValue);
6942 }
6943
6944 Class
6945 look_up_class(const char *name,
6946 bool includeUnconnected __attribute__((unused)),
6947 bool includeClassHandler __attribute__((unused)))
6948 {
6949 if (!name) return nil;
6950
6951 Class result;
6952 bool unrealized;
6953 {
6954 runtimeLock.lock();
6955 result = getClassExceptSomeSwift(name);
6956 unrealized = result && !result->isRealized();
6957 if (unrealized) {
6958 result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
6959 // runtimeLock is now unlocked
6960 } else {
6961 runtimeLock.unlock();
6962 }
6963 }
6964
6965 if (!result) {
6966 // Ask Swift about its un-instantiated classes.
6967
6968 // We use thread-local storage to prevent infinite recursion
6969 // if the hook function provokes another lookup of the same name
6970 // (for example, if the hook calls objc_allocateClassPair)
6971
6972 auto *tls = _objc_fetch_pthread_data(true);
6973
6974 // Stop if this thread is already looking up this name.
6975 for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
6976 if (0 == strcmp(name, tls->classNameLookups[i])) {
6977 return nil;
6978 }
6979 }
6980
6981 // Save this lookup in tls.
6982 if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
6983 tls->classNameLookupsAllocated =
6984 (tls->classNameLookupsAllocated * 2 ?: 1);
6985 size_t size = tls->classNameLookupsAllocated *
6986 sizeof(tls->classNameLookups[0]);
6987 tls->classNameLookups = (const char **)
6988 realloc(tls->classNameLookups, size);
6989 }
6990 tls->classNameLookups[tls->classNameLookupsUsed++] = name;
6991
6992 // Call the hook.
6993 Class swiftcls = nil;
6994 if (GetClassHook.get()(name, &swiftcls)) {
6995 ASSERT(swiftcls->isRealized());
6996 result = swiftcls;
6997 }
6998
6999 // Erase the name from tls.
7000 unsigned slot = --tls->classNameLookupsUsed;
7001 ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
7002 ASSERT(name == tls->classNameLookups[slot]);
7003 tls->classNameLookups[slot] = nil;
7004 }
7005
7006 return result;
7007 }
7008
7009
7010 /***********************************************************************
7011 * objc_duplicateClass
7012 * fixme
7013 * Locking: acquires runtimeLock
7014 **********************************************************************/
7015 Class
7016 objc_duplicateClass(Class original, const char *name,
7017 size_t extraBytes)
7018 {
7019 Class duplicate;
7020
7021 mutex_locker_t lock(runtimeLock);
7022
7023 checkIsKnownClass(original);
7024
7025 auto orig_rw = original->data();
7026 auto orig_rwe = orig_rw->ext();
7027 auto orig_ro = orig_rw->ro();
7028
7029 ASSERT(original->isRealized());
7030 ASSERT(!original->isMetaClass());
7031
7032 duplicate = alloc_class_for_subclass(original, extraBytes);
7033
7034 duplicate->initClassIsa(original->ISA());
7035 duplicate->superclass = original->superclass;
7036
7037 duplicate->cache.initializeToEmpty();
7038
7039 class_rw_t *rw = objc::zalloc<class_rw_t>();
7040 rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING);
7041 rw->firstSubclass = nil;
7042 rw->nextSiblingClass = nil;
7043
7044 duplicate->bits = original->bits;
7045 duplicate->setData(rw);
7046
7047 auto ro = orig_ro->duplicate();
7048 *(char **)&ro->name = strdupIfMutable(name);
7049 rw->set_ro(ro);
7050
7051 if (orig_rwe) {
7052 auto rwe = rw->extAllocIfNeeded();
7053 rwe->version = orig_rwe->version;
7054 rwe->methods = orig_rwe->methods.duplicate();
7055
7056 // fixme dies when categories are added to the base
7057 rwe->properties = orig_rwe->properties;
7058 rwe->protocols = orig_rwe->protocols;
7059 } else if (ro->baseMethods()) {
7060 // if we have base methods, we need to make a deep copy
7061 // which requires a class_rw_ext_t to be allocated
7062 rw->deepCopy(ro);
7063 }
7064
7065 duplicate->chooseClassArrayIndex();
7066
7067 if (duplicate->superclass) {
7068 addSubclass(duplicate->superclass, duplicate);
7069 // duplicate->isa == original->isa so don't addSubclass() for it
7070 } else {
7071 addRootClass(duplicate);
7072 }
7073
7074 // Don't methodize class - construction above is correct
7075
7076 addNamedClass(duplicate, ro->name);
7077 addClassTableEntry(duplicate, /*addMeta=*/false);
7078
7079 if (PrintConnecting) {
7080 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
7081 name, original->nameForLogging(), (void*)duplicate, ro);
7082 }
7083
7084 duplicate->clearInfo(RW_REALIZING);
7085
7086 return duplicate;
7087 }
7088
7089 /***********************************************************************
7090 * objc_initializeClassPair
7091 * Locking: runtimeLock must be write-locked by the caller
7092 **********************************************************************/
7093
7094 // &UnsetLayout is the default ivar layout during class construction
7095 static const uint8_t UnsetLayout = 0;
7096
7097 static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
7098 {
7099 runtimeLock.assertLocked();
7100
7101 class_ro_t *cls_ro_w, *meta_ro_w;
7102 class_rw_t *cls_rw_w, *meta_rw_w;
7103
7104 cls_rw_w = objc::zalloc<class_rw_t>();
7105 meta_rw_w = objc::zalloc<class_rw_t>();
7106 cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7107 meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7108
7109 cls->setData(cls_rw_w);
7110 cls_rw_w->set_ro(cls_ro_w);
7111 meta->setData(meta_rw_w);
7112 meta_rw_w->set_ro(meta_ro_w);
7113
7114 // Set basic info
7115
7116 cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
7117 meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META;
7118
7119 cls_ro_w->flags = 0;
7120 meta_ro_w->flags = RO_META;
7121 if (superclass) {
7122 uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
7123 cls_rw_w->flags |= superclass->data()->flags & flagsToCopy;
7124 cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
7125 meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
7126 cls->setInstanceSize(cls_ro_w->instanceStart);
7127 meta->setInstanceSize(meta_ro_w->instanceStart);
7128 } else {
7129 cls_ro_w->flags |= RO_ROOT;
7130 meta_ro_w->flags |= RO_ROOT;
7131 cls_ro_w->instanceStart = 0;
7132 meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
7133 cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
7134 meta->setInstanceSize(meta_ro_w->instanceStart);
7135 }
7136
7137 cls_ro_w->name = strdupIfMutable(name);
7138 meta_ro_w->name = strdupIfMutable(name);
7139
7140 cls_ro_w->ivarLayout = &UnsetLayout;
7141 cls_ro_w->weakIvarLayout = &UnsetLayout;
7142
7143 meta->chooseClassArrayIndex();
7144 cls->chooseClassArrayIndex();
7145
7146 // This absolutely needs to be done before addSubclass
7147 // as initializeToEmpty() clobbers the FAST_CACHE bits
7148 cls->cache.initializeToEmpty();
7149 meta->cache.initializeToEmpty();
7150
7151 #if FAST_CACHE_META
7152 meta->cache.setBit(FAST_CACHE_META);
7153 #endif
7154 meta->setInstancesRequireRawIsa();
7155
7156 // Connect to superclasses and metaclasses
7157 cls->initClassIsa(meta);
7158
7159 if (superclass) {
7160 meta->initClassIsa(superclass->ISA()->ISA());
7161 cls->superclass = superclass;
7162 meta->superclass = superclass->ISA();
7163 addSubclass(superclass, cls);
7164 addSubclass(superclass->ISA(), meta);
7165 } else {
7166 meta->initClassIsa(meta);
7167 cls->superclass = Nil;
7168 meta->superclass = cls;
7169 addRootClass(cls);
7170 addSubclass(cls, meta);
7171 }
7172
7173 addClassTableEntry(cls);
7174 }
7175
7176
7177 /***********************************************************************
7178 * verifySuperclass
7179 * Sanity-check the superclass provided to
7180 * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
7181 **********************************************************************/
7182 bool
7183 verifySuperclass(Class superclass, bool rootOK)
7184 {
7185 if (!superclass) {
7186 // Superclass does not exist.
7187 // If subclass may be a root class, this is OK.
7188 // If subclass must not be a root class, this is bad.
7189 return rootOK;
7190 }
7191
7192 // Superclass must be realized.
7193 if (! superclass->isRealized()) return false;
7194
7195 // Superclass must not be under construction.
7196 if (superclass->data()->flags & RW_CONSTRUCTING) return false;
7197
7198 return true;
7199 }
7200
7201
7202 /***********************************************************************
7203 * objc_initializeClassPair
7204 **********************************************************************/
7205 Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
7206 {
7207 // Fail if the class name is in use.
7208 if (look_up_class(name, NO, NO)) return nil;
7209
7210 mutex_locker_t lock(runtimeLock);
7211
7212 // Fail if the class name is in use.
7213 // Fail if the superclass isn't kosher.
7214 if (getClassExceptSomeSwift(name) ||
7215 !verifySuperclass(superclass, true/*rootOK*/))
7216 {
7217 return nil;
7218 }
7219
7220 objc_initializeClassPair_internal(superclass, name, cls, meta);
7221
7222 return cls;
7223 }
7224
7225
7226 /***********************************************************************
7227 * objc_allocateClassPair
7228 * fixme
7229 * Locking: acquires runtimeLock
7230 **********************************************************************/
7231 Class objc_allocateClassPair(Class superclass, const char *name,
7232 size_t extraBytes)
7233 {
7234 Class cls, meta;
7235
7236 // Fail if the class name is in use.
7237 if (look_up_class(name, NO, NO)) return nil;
7238
7239 mutex_locker_t lock(runtimeLock);
7240
7241 // Fail if the class name is in use.
7242 // Fail if the superclass isn't kosher.
7243 if (getClassExceptSomeSwift(name) ||
7244 !verifySuperclass(superclass, true/*rootOK*/))
7245 {
7246 return nil;
7247 }
7248
7249 // Allocate new classes.
7250 cls = alloc_class_for_subclass(superclass, extraBytes);
7251 meta = alloc_class_for_subclass(superclass, extraBytes);
7252
7253 // fixme mangle the name if it looks swift-y?
7254 objc_initializeClassPair_internal(superclass, name, cls, meta);
7255
7256 return cls;
7257 }
7258
7259
7260 /***********************************************************************
7261 * objc_registerClassPair
7262 * fixme
7263 * Locking: acquires runtimeLock
7264 **********************************************************************/
7265 void objc_registerClassPair(Class cls)
7266 {
7267 mutex_locker_t lock(runtimeLock);
7268
7269 checkIsKnownClass(cls);
7270
7271 if ((cls->data()->flags & RW_CONSTRUCTED) ||
7272 (cls->ISA()->data()->flags & RW_CONSTRUCTED))
7273 {
7274 _objc_inform("objc_registerClassPair: class '%s' was already "
7275 "registered!", cls->data()->ro()->name);
7276 return;
7277 }
7278
7279 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
7280 !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
7281 {
7282 _objc_inform("objc_registerClassPair: class '%s' was not "
7283 "allocated with objc_allocateClassPair!",
7284 cls->data()->ro()->name);
7285 return;
7286 }
7287
7288 // Clear "under construction" bit, set "done constructing" bit
7289 cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7290 cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7291
7292 // Add to named class table.
7293 addNamedClass(cls, cls->data()->ro()->name);
7294 }
7295
7296
7297 /***********************************************************************
7298 * objc_readClassPair()
7299 * Read a class and metaclass as written by a compiler.
7300 * Assumes the class and metaclass are not referenced by other things
7301 * that might need to be fixed up (such as categories and subclasses).
7302 * Does not call +load.
7303 * Returns the class pointer, or nil.
7304 *
7305 * Locking: runtimeLock acquired by map_images
7306 **********************************************************************/
7307 Class objc_readClassPair(Class bits, const struct objc_image_info *info)
7308 {
7309 mutex_locker_t lock(runtimeLock);
7310
7311 // No info bits are significant yet.
7312 (void)info;
7313
7314 // Fail if the superclass isn't kosher.
7315 bool rootOK = bits->data()->flags & RO_ROOT;
7316 if (!verifySuperclass(bits->superclass, rootOK)){
7317 return nil;
7318 }
7319
7320 // Duplicate classes are allowed, just like they are for image loading.
7321 // readClass will complain about the duplicate.
7322
7323 Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
7324 if (cls != bits) {
7325 // This function isn't allowed to remap anything.
7326 _objc_fatal("objc_readClassPair for class %s changed %p to %p",
7327 cls->nameForLogging(), bits, cls);
7328 }
7329
7330 // The only client of this function is old Swift.
7331 // Stable Swift won't use it.
7332 // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
7333 cls = realizeClassWithoutSwift(cls, nil);
7334
7335 return cls;
7336 }
7337
7338
7339 /***********************************************************************
7340 * detach_class
7341 * Disconnect a class from other data structures.
7342 * Exception: does not remove the class from the +load list
7343 * Call this before free_class.
7344 * Locking: runtimeLock must be held by the caller.
7345 **********************************************************************/
7346 static void detach_class(Class cls, bool isMeta)
7347 {
7348 runtimeLock.assertLocked();
7349
7350 // categories not yet attached to this class
7351 objc::unattachedCategories.eraseClass(cls);
7352
7353 // superclass's subclass list
7354 if (cls->isRealized()) {
7355 Class supercls = cls->superclass;
7356 if (supercls) {
7357 removeSubclass(supercls, cls);
7358 } else {
7359 removeRootClass(cls);
7360 }
7361 }
7362
7363 // class tables and +load queue
7364 if (!isMeta) {
7365 removeNamedClass(cls, cls->mangledName());
7366 }
7367 objc::allocatedClasses.get().erase(cls);
7368 }
7369
7370
7371 /***********************************************************************
7372 * free_class
7373 * Frees a class's data structures.
7374 * Call this after detach_class.
7375 * Locking: runtimeLock must be held by the caller
7376 **********************************************************************/
7377 static void free_class(Class cls)
7378 {
7379 runtimeLock.assertLocked();
7380
7381 if (! cls->isRealized()) return;
7382
7383 auto rw = cls->data();
7384 auto rwe = rw->ext();
7385 auto ro = rw->ro();
7386
7387 cache_delete(cls);
7388
7389 if (rwe) {
7390 for (auto& meth : rwe->methods) {
7391 try_free(meth.types);
7392 }
7393 rwe->methods.tryFree();
7394 }
7395
7396 const ivar_list_t *ivars = ro->ivars;
7397 if (ivars) {
7398 for (auto& ivar : *ivars) {
7399 try_free(ivar.offset);
7400 try_free(ivar.name);
7401 try_free(ivar.type);
7402 }
7403 try_free(ivars);
7404 }
7405
7406 if (rwe) {
7407 for (auto& prop : rwe->properties) {
7408 try_free(prop.name);
7409 try_free(prop.attributes);
7410 }
7411 rwe->properties.tryFree();
7412
7413 rwe->protocols.tryFree();
7414 }
7415
7416 try_free(ro->ivarLayout);
7417 try_free(ro->weakIvarLayout);
7418 try_free(ro->name);
7419 try_free(ro);
7420 objc::zfree(rwe);
7421 objc::zfree(rw);
7422 try_free(cls);
7423 }
7424
7425
7426 void objc_disposeClassPair(Class cls)
7427 {
7428 mutex_locker_t lock(runtimeLock);
7429
7430 checkIsKnownClass(cls);
7431
7432 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
7433 !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
7434 {
7435 // class not allocated with objc_allocateClassPair
7436 // disposing still-unregistered class is OK!
7437 _objc_inform("objc_disposeClassPair: class '%s' was not "
7438 "allocated with objc_allocateClassPair!",
7439 cls->data()->ro()->name);
7440 return;
7441 }
7442
7443 if (cls->isMetaClass()) {
7444 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
7445 "not a class!", cls->data()->ro()->name);
7446 return;
7447 }
7448
7449 // Shouldn't have any live subclasses.
7450 if (cls->data()->firstSubclass) {
7451 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7452 "including '%s'!", cls->data()->ro()->name,
7453 cls->data()->firstSubclass->nameForLogging());
7454 }
7455 if (cls->ISA()->data()->firstSubclass) {
7456 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7457 "including '%s'!", cls->data()->ro()->name,
7458 cls->ISA()->data()->firstSubclass->nameForLogging());
7459 }
7460
7461 // don't remove_class_from_loadable_list()
7462 // - it's not there and we don't have the lock
7463 detach_class(cls->ISA(), YES);
7464 detach_class(cls, NO);
7465 free_class(cls->ISA());
7466 free_class(cls);
7467 }
7468
7469
7470 /***********************************************************************
7471 * objc_constructInstance
7472 * Creates an instance of `cls` at the location pointed to by `bytes`.
7473 * `bytes` must point to at least class_getInstanceSize(cls) bytes of
7474 * well-aligned zero-filled memory.
7475 * The new object's isa is set. Any C++ constructors are called.
7476 * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
7477 * nil, or if C++ constructors fail.
7478 * Note: class_createInstance() and class_createInstances() preflight this.
7479 **********************************************************************/
7480 id
7481 objc_constructInstance(Class cls, void *bytes)
7482 {
7483 if (!cls || !bytes) return nil;
7484
7485 id obj = (id)bytes;
7486
7487 // Read class's info bits all at once for performance
7488 bool hasCxxCtor = cls->hasCxxCtor();
7489 bool hasCxxDtor = cls->hasCxxDtor();
7490 bool fast = cls->canAllocNonpointer();
7491
7492 if (fast) {
7493 obj->initInstanceIsa(cls, hasCxxDtor);
7494 } else {
7495 obj->initIsa(cls);
7496 }
7497
7498 if (hasCxxCtor) {
7499 return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
7500 } else {
7501 return obj;
7502 }
7503 }
7504
7505
7506 /***********************************************************************
7507 * class_createInstance
7508 * fixme
7509 * Locking: none
7510 *
7511 * Note: this function has been carefully written so that the fastpath
7512 * takes no branch.
7513 **********************************************************************/
7514 static ALWAYS_INLINE id
7515 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
7516 int construct_flags = OBJECT_CONSTRUCT_NONE,
7517 bool cxxConstruct = true,
7518 size_t *outAllocatedSize = nil)
7519 {
7520 ASSERT(cls->isRealized());
7521
7522 // Read class's info bits all at once for performance
7523 bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
7524 bool hasCxxDtor = cls->hasCxxDtor();
7525 bool fast = cls->canAllocNonpointer();
7526 size_t size;
7527
7528 size = cls->instanceSize(extraBytes);
7529 if (outAllocatedSize) *outAllocatedSize = size;
7530
7531 id obj;
7532 if (zone) {
7533 obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
7534 } else {
7535 obj = (id)calloc(1, size);
7536 }
7537 if (slowpath(!obj)) {
7538 if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
7539 return _objc_callBadAllocHandler(cls);
7540 }
7541 return nil;
7542 }
7543
7544 if (!zone && fast) {
7545 obj->initInstanceIsa(cls, hasCxxDtor);
7546 } else {
7547 // Use raw pointer isa on the assumption that they might be
7548 // doing something weird with the zone or RR.
7549 obj->initIsa(cls);
7550 }
7551
7552 if (fastpath(!hasCxxCtor)) {
7553 return obj;
7554 }
7555
7556 construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
7557 return object_cxxConstructFromClass(obj, cls, construct_flags);
7558 }
7559
7560 id
7561 class_createInstance(Class cls, size_t extraBytes)
7562 {
7563 if (!cls) return nil;
7564 return _class_createInstanceFromZone(cls, extraBytes, nil);
7565 }
7566
7567 NEVER_INLINE
7568 id
7569 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
7570 {
7571 // allocWithZone under __OBJC2__ ignores the zone parameter
7572 return _class_createInstanceFromZone(cls, 0, nil,
7573 OBJECT_CONSTRUCT_CALL_BADALLOC);
7574 }
7575
7576 /***********************************************************************
7577 * class_createInstances
7578 * fixme
7579 * Locking: none
7580 **********************************************************************/
7581 #if SUPPORT_NONPOINTER_ISA
7582 #warning fixme optimize class_createInstances
7583 #endif
7584 unsigned
7585 class_createInstances(Class cls, size_t extraBytes,
7586 id *results, unsigned num_requested)
7587 {
7588 return _class_createInstancesFromZone(cls, extraBytes, nil,
7589 results, num_requested);
7590 }
7591
7592 /***********************************************************************
7593 * object_copyFromZone
7594 * fixme
7595 * Locking: none
7596 **********************************************************************/
7597 static id
7598 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7599 {
7600 if (!oldObj) return nil;
7601 if (oldObj->isTaggedPointer()) return oldObj;
7602
7603 // fixme this doesn't handle C++ ivars correctly (#4619414)
7604
7605 Class cls = oldObj->ISA();
7606 size_t size;
7607 id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
7608 OBJECT_CONSTRUCT_NONE, false, &size);
7609 if (!obj) return nil;
7610
7611 // Copy everything except the isa, which was already set above.
7612 uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
7613 uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
7614 size_t copySize = size - sizeof(Class);
7615 memmove(copyDst, copySrc, copySize);
7616
7617 fixupCopiedIvars(obj, oldObj);
7618
7619 return obj;
7620 }
7621
7622
7623 /***********************************************************************
7624 * object_copy
7625 * fixme
7626 * Locking: none
7627 **********************************************************************/
7628 id
7629 object_copy(id oldObj, size_t extraBytes)
7630 {
7631 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
7632 }
7633
7634
7635 #if SUPPORT_ZONES
7636
7637 /***********************************************************************
7638 * class_createInstanceFromZone
7639 * fixme
7640 * Locking: none
7641 **********************************************************************/
7642 id
7643 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
7644 {
7645 if (!cls) return nil;
7646 return _class_createInstanceFromZone(cls, extraBytes, zone);
7647 }
7648
7649 /***********************************************************************
7650 * object_copyFromZone
7651 * fixme
7652 * Locking: none
7653 **********************************************************************/
7654 id
7655 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7656 {
7657 return _object_copyFromZone(oldObj, extraBytes, zone);
7658 }
7659
7660 #endif
7661
7662
7663 /***********************************************************************
7664 * objc_destructInstance
7665 * Destroys an instance without freeing memory.
7666 * Calls C++ destructors.
7667 * Calls ARC ivar cleanup.
7668 * Removes associative references.
7669 * Returns `obj`. Does nothing if `obj` is nil.
7670 **********************************************************************/
7671 void *objc_destructInstance(id obj)
7672 {
7673 if (obj) {
7674 // Read all of the flags at once for performance.
7675 bool cxx = obj->hasCxxDtor();
7676 bool assoc = obj->hasAssociatedObjects();
7677
7678 // This order is important.
7679 if (cxx) object_cxxDestruct(obj);
7680 if (assoc) _object_remove_assocations(obj);
7681 obj->clearDeallocating();
7682 }
7683
7684 return obj;
7685 }
7686
7687
7688 /***********************************************************************
7689 * object_dispose
7690 * fixme
7691 * Locking: none
7692 **********************************************************************/
7693 id
7694 object_dispose(id obj)
7695 {
7696 if (!obj) return nil;
7697
7698 objc_destructInstance(obj);
7699 free(obj);
7700
7701 return nil;
7702 }
7703
7704
7705 /***********************************************************************
7706 * _objc_getFreedObjectClass
7707 * fixme
7708 * Locking: none
7709 **********************************************************************/
7710 Class _objc_getFreedObjectClass (void)
7711 {
7712 return nil;
7713 }
7714
7715
7716
7717 /***********************************************************************
7718 * Tagged pointer objects.
7719 *
7720 * Tagged pointer objects store the class and the object value in the
7721 * object pointer; the "pointer" does not actually point to anything.
7722 *
7723 * Tagged pointer objects currently use this representation:
7724 * (LSB)
7725 * 1 bit set if tagged, clear if ordinary object pointer
7726 * 3 bits tag index
7727 * 60 bits payload
7728 * (MSB)
7729 * The tag index defines the object's class.
7730 * The payload format is defined by the object's class.
7731 *
7732 * If the tag index is 0b111, the tagged pointer object uses an
7733 * "extended" representation, allowing more classes but with smaller payloads:
7734 * (LSB)
7735 * 1 bit set if tagged, clear if ordinary object pointer
7736 * 3 bits 0b111
7737 * 8 bits extended tag index
7738 * 52 bits payload
7739 * (MSB)
7740 *
7741 * Some architectures reverse the MSB and LSB in these representations.
7742 *
7743 * This representation is subject to change. Representation-agnostic SPI is:
7744 * objc-internal.h for class implementers.
7745 * objc-gdb.h for debuggers.
7746 **********************************************************************/
7747 #if !SUPPORT_TAGGED_POINTERS
7748
7749 // These variables are always provided for debuggers.
7750 uintptr_t objc_debug_taggedpointer_obfuscator = 0;
7751 uintptr_t objc_debug_taggedpointer_mask = 0;
7752 unsigned objc_debug_taggedpointer_slot_shift = 0;
7753 uintptr_t objc_debug_taggedpointer_slot_mask = 0;
7754 unsigned objc_debug_taggedpointer_payload_lshift = 0;
7755 unsigned objc_debug_taggedpointer_payload_rshift = 0;
7756 Class objc_debug_taggedpointer_classes[1] = { nil };
7757
7758 uintptr_t objc_debug_taggedpointer_ext_mask = 0;
7759 unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
7760 uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
7761 unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
7762 unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
7763 Class objc_debug_taggedpointer_ext_classes[1] = { nil };
7764
7765 static void
7766 disableTaggedPointers() { }
7767
7768 static void
7769 initializeTaggedPointerObfuscator(void) { }
7770
7771 #else
7772
7773 // The "slot" used in the class table and given to the debugger
7774 // includes the is-tagged bit. This makes objc_msgSend faster.
7775 // The "ext" representation doesn't do that.
7776
7777 uintptr_t objc_debug_taggedpointer_obfuscator;
7778 uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
7779 unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
7780 uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
7781 unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
7782 unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
7783 // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
7784
7785 uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
7786 unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
7787 uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
7788 unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
7789 unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
7790 // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
7791
7792 static void
7793 disableTaggedPointers()
7794 {
7795 objc_debug_taggedpointer_mask = 0;
7796 objc_debug_taggedpointer_slot_shift = 0;
7797 objc_debug_taggedpointer_slot_mask = 0;
7798 objc_debug_taggedpointer_payload_lshift = 0;
7799 objc_debug_taggedpointer_payload_rshift = 0;
7800
7801 objc_debug_taggedpointer_ext_mask = 0;
7802 objc_debug_taggedpointer_ext_slot_shift = 0;
7803 objc_debug_taggedpointer_ext_slot_mask = 0;
7804 objc_debug_taggedpointer_ext_payload_lshift = 0;
7805 objc_debug_taggedpointer_ext_payload_rshift = 0;
7806 }
7807
7808
7809 // Returns a pointer to the class's storage in the tagged class arrays.
7810 // Assumes the tag is a valid basic tag.
7811 static Class *
7812 classSlotForBasicTagIndex(objc_tag_index_t tag)
7813 {
7814 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7815 >> _OBJC_TAG_INDEX_SHIFT)
7816 & _OBJC_TAG_INDEX_MASK);
7817 uintptr_t obfuscatedTag = tag ^ tagObfuscator;
7818 // Array index in objc_tag_classes includes the tagged bit itself
7819 #if SUPPORT_MSB_TAGGED_POINTERS
7820 return &objc_tag_classes[0x8 | obfuscatedTag];
7821 #else
7822 return &objc_tag_classes[(obfuscatedTag << 1) | 1];
7823 #endif
7824 }
7825
7826
7827 // Returns a pointer to the class's storage in the tagged class arrays,
7828 // or nil if the tag is out of range.
7829 static Class *
7830 classSlotForTagIndex(objc_tag_index_t tag)
7831 {
7832 if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
7833 return classSlotForBasicTagIndex(tag);
7834 }
7835
7836 if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
7837 int index = tag - OBJC_TAG_First52BitPayload;
7838 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7839 >> _OBJC_TAG_EXT_INDEX_SHIFT)
7840 & _OBJC_TAG_EXT_INDEX_MASK);
7841 return &objc_tag_ext_classes[index ^ tagObfuscator];
7842 }
7843
7844 return nil;
7845 }
7846
7847 /***********************************************************************
7848 * initializeTaggedPointerObfuscator
7849 * Initialize objc_debug_taggedpointer_obfuscator with randomness.
7850 *
7851 * The tagged pointer obfuscator is intended to make it more difficult
7852 * for an attacker to construct a particular object as a tagged pointer,
7853 * in the presence of a buffer overflow or other write control over some
7854 * memory. The obfuscator is XORed with the tagged pointers when setting
7855 * or retrieving payload values. They are filled with randomness on first
7856 * use.
7857 **********************************************************************/
7858 static void
7859 initializeTaggedPointerObfuscator(void)
7860 {
7861 if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) ||
7862 // Set the obfuscator to zero for apps linked against older SDKs,
7863 // in case they're relying on the tagged pointer representation.
7864 DisableTaggedPointerObfuscation) {
7865 objc_debug_taggedpointer_obfuscator = 0;
7866 } else {
7867 // Pull random data into the variable, then shift away all non-payload bits.
7868 arc4random_buf(&objc_debug_taggedpointer_obfuscator,
7869 sizeof(objc_debug_taggedpointer_obfuscator));
7870 objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
7871 }
7872 }
7873
7874
7875 /***********************************************************************
7876 * _objc_registerTaggedPointerClass
7877 * Set the class to use for the given tagged pointer index.
7878 * Aborts if the tag is out of range, or if the tag is already
7879 * used by some other class.
7880 **********************************************************************/
7881 void
7882 _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
7883 {
7884 if (objc_debug_taggedpointer_mask == 0) {
7885 _objc_fatal("tagged pointers are disabled");
7886 }
7887
7888 Class *slot = classSlotForTagIndex(tag);
7889 if (!slot) {
7890 _objc_fatal("tag index %u is invalid", (unsigned int)tag);
7891 }
7892
7893 Class oldCls = *slot;
7894
7895 if (cls && oldCls && cls != oldCls) {
7896 _objc_fatal("tag index %u used for two different classes "
7897 "(was %p %s, now %p %s)", tag,
7898 oldCls, oldCls->nameForLogging(),
7899 cls, cls->nameForLogging());
7900 }
7901
7902 *slot = cls;
7903
7904 // Store a placeholder class in the basic tag slot that is
7905 // reserved for the extended tag space, if it isn't set already.
7906 // Do this lazily when the first extended tag is registered so
7907 // that old debuggers characterize bogus pointers correctly more often.
7908 if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
7909 Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
7910 if (*extSlot == nil) {
7911 extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7912 *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7913 }
7914 }
7915 }
7916
7917
7918 /***********************************************************************
7919 * _objc_getClassForTag
7920 * Returns the class that is using the given tagged pointer tag.
7921 * Returns nil if no class is using that tag or the tag is out of range.
7922 **********************************************************************/
7923 Class
7924 _objc_getClassForTag(objc_tag_index_t tag)
7925 {
7926 Class *slot = classSlotForTagIndex(tag);
7927 if (slot) return *slot;
7928 else return nil;
7929 }
7930
7931 #endif
7932
7933
7934 #if SUPPORT_FIXUP
7935
7936 OBJC_EXTERN void objc_msgSend_fixup(void);
7937 OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
7938 OBJC_EXTERN void objc_msgSend_stret_fixup(void);
7939 OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
7940 #if defined(__i386__) || defined(__x86_64__)
7941 OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
7942 #endif
7943 #if defined(__x86_64__)
7944 OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
7945 #endif
7946
7947 OBJC_EXTERN void objc_msgSend_fixedup(void);
7948 OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
7949 OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
7950 OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
7951 #if defined(__i386__) || defined(__x86_64__)
7952 OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
7953 #endif
7954 #if defined(__x86_64__)
7955 OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
7956 #endif
7957
7958 /***********************************************************************
7959 * fixupMessageRef
7960 * Repairs an old vtable dispatch call site.
7961 * vtable dispatch itself is not supported.
7962 **********************************************************************/
7963 static void
7964 fixupMessageRef(message_ref_t *msg)
7965 {
7966 msg->sel = sel_registerName((const char *)msg->sel);
7967
7968 if (msg->imp == &objc_msgSend_fixup) {
7969 if (msg->sel == @selector(alloc)) {
7970 msg->imp = (IMP)&objc_alloc;
7971 } else if (msg->sel == @selector(allocWithZone:)) {
7972 msg->imp = (IMP)&objc_allocWithZone;
7973 } else if (msg->sel == @selector(retain)) {
7974 msg->imp = (IMP)&objc_retain;
7975 } else if (msg->sel == @selector(release)) {
7976 msg->imp = (IMP)&objc_release;
7977 } else if (msg->sel == @selector(autorelease)) {
7978 msg->imp = (IMP)&objc_autorelease;
7979 } else {
7980 msg->imp = &objc_msgSend_fixedup;
7981 }
7982 }
7983 else if (msg->imp == &objc_msgSendSuper2_fixup) {
7984 msg->imp = &objc_msgSendSuper2_fixedup;
7985 }
7986 else if (msg->imp == &objc_msgSend_stret_fixup) {
7987 msg->imp = &objc_msgSend_stret_fixedup;
7988 }
7989 else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
7990 msg->imp = &objc_msgSendSuper2_stret_fixedup;
7991 }
7992 #if defined(__i386__) || defined(__x86_64__)
7993 else if (msg->imp == &objc_msgSend_fpret_fixup) {
7994 msg->imp = &objc_msgSend_fpret_fixedup;
7995 }
7996 #endif
7997 #if defined(__x86_64__)
7998 else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
7999 msg->imp = &objc_msgSend_fp2ret_fixedup;
8000 }
8001 #endif
8002 }
8003
8004 // SUPPORT_FIXUP
8005 #endif
8006
8007
8008 // ProKit SPI
8009 static Class setSuperclass(Class cls, Class newSuper)
8010 {
8011 Class oldSuper;
8012
8013 runtimeLock.assertLocked();
8014
8015 ASSERT(cls->isRealized());
8016 ASSERT(newSuper->isRealized());
8017
8018 oldSuper = cls->superclass;
8019 removeSubclass(oldSuper, cls);
8020 removeSubclass(oldSuper->ISA(), cls->ISA());
8021
8022 cls->superclass = newSuper;
8023 cls->ISA()->superclass = newSuper->ISA();
8024 addSubclass(newSuper, cls);
8025 addSubclass(newSuper->ISA(), cls->ISA());
8026
8027 // Flush subclass's method caches.
8028 flushCaches(cls);
8029 flushCaches(cls->ISA());
8030
8031 return oldSuper;
8032 }
8033
8034
8035 Class class_setSuperclass(Class cls, Class newSuper)
8036 {
8037 mutex_locker_t lock(runtimeLock);
8038 return setSuperclass(cls, newSuper);
8039 }
8040
8041 void runtime_init(void)
8042 {
8043 objc::unattachedCategories.init(32);
8044 objc::allocatedClasses.init();
8045 }
8046
8047 // __OBJC2__
8048 #endif