]>
Commit | Line | Data |
---|---|---|
b3962a83 | 1 | /* |
8972963c | 2 | * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved. |
b3962a83 A |
3 | * |
4 | * @APPLE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * This file contains Original Code and/or Modifications of Original Code | |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. Please obtain a copy of the License at | |
10 | * http://www.opensource.apple.com/apsl/ and read it before using this | |
11 | * file. | |
12 | * | |
13 | * The Original Code and all software distributed under the License are | |
14 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
15 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
16 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
17 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
18 | * Please see the License for the specific language governing rights and | |
19 | * limitations under the License. | |
20 | * | |
21 | * @APPLE_LICENSE_HEADER_END@ | |
22 | */ | |
23 | ||
24 | /*********************************************************************** | |
25 | * objc-runtime-new.m | |
26 | * Support for new-ABI classes and images. | |
27 | **********************************************************************/ | |
28 | ||
29 | #if __OBJC2__ | |
30 | ||
1807f628 | 31 | #include "DenseMapExtras.h" |
b3962a83 A |
32 | #include "objc-private.h" |
33 | #include "objc-runtime-new.h" | |
8972963c | 34 | #include "objc-file.h" |
f192a3e2 | 35 | #include "objc-zalloc.h" |
8070259c | 36 | #include <Block.h> |
7af964d1 | 37 | #include <objc/message.h> |
8972963c A |
38 | #include <mach/shared_region.h> |
39 | ||
8972963c | 40 | #define newprotocol(p) ((protocol_t *)p) |
7257e56c A |
41 | |
42 | static void disableTaggedPointers(); | |
31875a97 | 43 | static void detach_class(Class cls, bool isMeta); |
7257e56c | 44 | static void free_class(Class cls); |
31875a97 | 45 | static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace); |
1807f628 | 46 | static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth); |
cd5f04f5 | 47 | static method_t *search_method_list(const method_list_t *mlist, SEL sel); |
bc4fafce | 48 | template<typename T> static bool method_lists_contains_any(T *mlists, T *end, |
1807f628 | 49 | SEL sels[], size_t selcount); |
34d5b5e8 | 50 | static void flushCaches(Class cls, const char *func, bool (^predicate)(Class c)); |
66799735 | 51 | static void initializeTaggedPointerObfuscator(void); |
7257e56c A |
52 | #if SUPPORT_FIXUP |
53 | static void fixupMessageRef(message_ref_t *msg); | |
54 | #endif | |
13ba007e A |
55 | static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock); |
56 | static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized); | |
7af964d1 | 57 | |
1807f628 A |
58 | struct locstamped_category_t { |
59 | category_t *cat; | |
60 | struct header_info *hi; | |
61 | }; | |
62 | enum { | |
63 | ATTACH_CLASS = 1 << 0, | |
64 | ATTACH_METACLASS = 1 << 1, | |
65 | ATTACH_CLASS_AND_METACLASS = 1 << 2, | |
66 | ATTACH_EXISTING = 1 << 3, | |
67 | }; | |
68 | static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags); | |
8070259c | 69 | |
7257e56c | 70 | |
7af964d1 A |
71 | /*********************************************************************** |
72 | * Lock management | |
7af964d1 | 73 | **********************************************************************/ |
66799735 A |
74 | mutex_t runtimeLock; |
75 | mutex_t selLock; | |
1807f628 | 76 | #if CONFIG_USE_CACHE_LOCK |
31875a97 | 77 | mutex_t cacheUpdateLock; |
1807f628 | 78 | #endif |
31875a97 | 79 | recursive_mutex_t loadMethodLock; |
7af964d1 | 80 | |
4a109af3 A |
81 | /*********************************************************************** |
82 | * Class structure decoding | |
83 | **********************************************************************/ | |
84 | ||
85 | const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK; | |
86 | ||
87 | ||
8070259c A |
88 | /*********************************************************************** |
89 | * Non-pointer isa decoding | |
90 | **********************************************************************/ | |
c1e772c4 A |
91 | #if SUPPORT_INDEXED_ISA |
92 | ||
93 | // Indexed non-pointer isa. | |
94 | ||
95 | // These are used to mask the ISA and see if its got an index or not. | |
96 | const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK; | |
97 | const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE; | |
98 | ||
99 | // die if masks overlap | |
100 | STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0); | |
101 | ||
102 | // die if magic is wrong | |
103 | STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0); | |
104 | ||
105 | // Then these are used to extract the index from the ISA. | |
106 | const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK; | |
107 | const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT; | |
108 | ||
109 | asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \ | |
110 | "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK)); | |
111 | asm("\n .globl _objc_absolute_indexed_isa_magic_value" \ | |
112 | "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE)); | |
113 | asm("\n .globl _objc_absolute_indexed_isa_index_mask" \ | |
114 | "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK)); | |
115 | asm("\n .globl _objc_absolute_indexed_isa_index_shift" \ | |
116 | "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT)); | |
117 | ||
118 | ||
119 | // And then we can use that index to get the class from this array. Note | |
120 | // the size is provided so that clients can ensure the index they get is in | |
121 | // bounds and not read off the end of the array. | |
122 | // Defined in the objc-msg-*.s files | |
123 | // const Class objc_indexed_classes[] | |
124 | ||
125 | // When we don't have enough bits to store a class*, we can instead store an | |
126 | // index in to this array. Classes are added here when they are realized. | |
127 | // Note, an index of 0 is illegal. | |
128 | uintptr_t objc_indexed_classes_count = 0; | |
129 | ||
130 | // SUPPORT_INDEXED_ISA | |
131 | #else | |
132 | // not SUPPORT_INDEXED_ISA | |
133 | ||
134 | // These variables exist but are all set to 0 so that they are ignored. | |
135 | const uintptr_t objc_debug_indexed_isa_magic_mask = 0; | |
136 | const uintptr_t objc_debug_indexed_isa_magic_value = 0; | |
137 | const uintptr_t objc_debug_indexed_isa_index_mask = 0; | |
138 | const uintptr_t objc_debug_indexed_isa_index_shift = 0; | |
139 | Class objc_indexed_classes[1] = { nil }; | |
140 | uintptr_t objc_indexed_classes_count = 0; | |
141 | ||
142 | // not SUPPORT_INDEXED_ISA | |
143 | #endif | |
144 | ||
145 | ||
146 | #if SUPPORT_PACKED_ISA | |
147 | ||
148 | // Packed non-pointer isa. | |
149 | ||
150 | asm("\n .globl _objc_absolute_packed_isa_class_mask" \ | |
151 | "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK)); | |
8070259c | 152 | |
34d5b5e8 A |
153 | // a better definition is |
154 | // (uintptr_t)ptrauth_strip((void *)ISA_MASK, ISA_SIGNING_KEY) | |
155 | // however we know that PAC uses bits outside of MACH_VM_MAX_ADDRESS | |
156 | // so approximate the definition here to be constant | |
157 | template <typename T> | |
158 | static constexpr T coveringMask(T n) { | |
159 | for (T mask = 0; mask != ~T{0}; mask = (mask << 1) | 1) { | |
160 | if ((n & mask) == n) return mask; | |
161 | } | |
162 | return ~T{0}; | |
163 | } | |
164 | const uintptr_t objc_debug_isa_class_mask = ISA_MASK & coveringMask(MACH_VM_MAX_ADDRESS - 1); | |
165 | ||
8070259c A |
166 | const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK; |
167 | const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE; | |
168 | ||
169 | // die if masks overlap | |
170 | STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0); | |
171 | ||
172 | // die if magic is wrong | |
173 | STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0); | |
174 | ||
175 | // die if virtual address space bound goes up | |
31875a97 A |
176 | STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 || |
177 | ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS); | |
8070259c | 178 | |
c1e772c4 | 179 | // SUPPORT_PACKED_ISA |
8070259c | 180 | #else |
c1e772c4 | 181 | // not SUPPORT_PACKED_ISA |
8070259c A |
182 | |
183 | // These variables exist but enforce pointer alignment only. | |
184 | const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK); | |
185 | const uintptr_t objc_debug_isa_magic_mask = WORD_MASK; | |
186 | const uintptr_t objc_debug_isa_magic_value = 0; | |
187 | ||
c1e772c4 | 188 | // not SUPPORT_PACKED_ISA |
8070259c A |
189 | #endif |
190 | ||
191 | ||
13ba007e A |
192 | /*********************************************************************** |
193 | * Swift marker bits | |
194 | **********************************************************************/ | |
195 | const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE; | |
196 | ||
197 | ||
66799735 A |
198 | /*********************************************************************** |
199 | * allocatedClasses | |
200 | * A table of all classes (and metaclasses) which have been allocated | |
201 | * with objc_allocateClassPair. | |
202 | **********************************************************************/ | |
1807f628 A |
203 | namespace objc { |
204 | static ExplicitInitDenseSet<Class> allocatedClasses; | |
205 | } | |
b3962a83 | 206 | |
1807f628 A |
207 | /*********************************************************************** |
208 | * _firstRealizedClass | |
209 | * The root of all realized classes | |
210 | **********************************************************************/ | |
211 | static Class _firstRealizedClass = nil; | |
b3962a83 | 212 | |
f192a3e2 A |
213 | /*********************************************************************** |
214 | * didInitialAttachCategories | |
215 | * Whether the initial attachment of categories present at startup has | |
216 | * been done. | |
217 | **********************************************************************/ | |
218 | static bool didInitialAttachCategories = false; | |
219 | ||
220 | /*********************************************************************** | |
221 | * didCallDyldNotifyRegister | |
222 | * Whether the call to _dyld_objc_notify_register has completed. | |
223 | **********************************************************************/ | |
224 | bool didCallDyldNotifyRegister = false; | |
225 | ||
bc4fafce A |
226 | |
227 | /*********************************************************************** | |
228 | * smallMethodIMPMap | |
229 | * The map from small method pointers to replacement IMPs. | |
230 | * | |
231 | * Locking: runtimeLock must be held when accessing this map. | |
232 | **********************************************************************/ | |
233 | namespace objc { | |
234 | static objc::LazyInitDenseMap<const method_t *, IMP> smallMethodIMPMap; | |
235 | } | |
236 | ||
237 | static IMP method_t_remappedImp_nolock(const method_t *m) { | |
238 | runtimeLock.assertLocked(); | |
239 | auto *map = objc::smallMethodIMPMap.get(false); | |
240 | if (!map) | |
241 | return nullptr; | |
242 | auto iter = map->find(m); | |
243 | if (iter == map->end()) | |
244 | return nullptr; | |
245 | return iter->second; | |
246 | } | |
247 | ||
248 | IMP method_t::remappedImp(bool needsLock) const { | |
249 | ASSERT(isSmall()); | |
250 | if (needsLock) { | |
251 | mutex_locker_t guard(runtimeLock); | |
252 | return method_t_remappedImp_nolock(this); | |
253 | } else { | |
254 | return method_t_remappedImp_nolock(this); | |
255 | } | |
256 | } | |
257 | ||
258 | void method_t::remapImp(IMP imp) { | |
259 | ASSERT(isSmall()); | |
260 | runtimeLock.assertLocked(); | |
261 | auto *map = objc::smallMethodIMPMap.get(true); | |
262 | (*map)[this] = imp; | |
263 | } | |
264 | ||
265 | objc_method_description *method_t::getSmallDescription() const { | |
266 | static objc::LazyInitDenseMap<const method_t *, objc_method_description *> map; | |
267 | ||
268 | mutex_locker_t guard(runtimeLock); | |
269 | ||
270 | auto &ptr = (*map.get(true))[this]; | |
271 | if (!ptr) { | |
272 | ptr = (objc_method_description *)malloc(sizeof *ptr); | |
273 | ptr->name = name(); | |
274 | ptr->types = (char *)types(); | |
275 | } | |
276 | return ptr; | |
277 | } | |
278 | ||
7af964d1 A |
279 | /* |
280 | Low two bits of mlist->entsize is used as the fixed-up marker. | |
8070259c A |
281 | Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted). |
282 | (Protocol method lists are not sorted because of their extra parallel data) | |
283 | Runtime fixed-up method lists get 3. | |
31875a97 A |
284 | |
285 | High two bits of protocol->flags is used as the fixed-up marker. | |
286 | PREOPTIMIZED VERSION: | |
287 | Protocols from shared cache are 1<<30. | |
288 | Runtime fixed-up protocols get 1<<30. | |
289 | UN-PREOPTIMIZED VERSION: | |
290 | Protocols from shared cache are 1<<30. | |
291 | Shared cache's fixups are not trusted. | |
292 | Runtime fixed-up protocols get 3<<30. | |
7af964d1 A |
293 | */ |
294 | ||
34d5b5e8 A |
295 | static const uint32_t fixed_up_method_list = 3; |
296 | static const uint32_t uniqued_method_list = 1; | |
31875a97 | 297 | static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1; |
1807f628 | 298 | static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL; |
7af964d1 | 299 | |
cd5f04f5 | 300 | void |
8972963c | 301 | disableSharedCacheOptimizations(void) |
7af964d1 | 302 | { |
31875a97 | 303 | fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2; |
1807f628 A |
304 | // Its safe to just set canonical protocol to 0 as we'll never call |
305 | // clearIsCanonical() unless isCanonical() returned true, which can't happen | |
306 | // with a 0 mask | |
307 | canonical_protocol = 0; | |
308 | } | |
309 | ||
310 | bool method_list_t::isUniqued() const { | |
311 | return (flags() & uniqued_method_list) != 0; | |
7af964d1 A |
312 | } |
313 | ||
31875a97 | 314 | bool method_list_t::isFixedUp() const { |
bc4fafce A |
315 | // Ignore any flags in the top bits, just look at the bottom two. |
316 | return (flags() & 0x3) == fixed_up_method_list; | |
cd5f04f5 A |
317 | } |
318 | ||
31875a97 | 319 | void method_list_t::setFixedUp() { |
66799735 | 320 | runtimeLock.assertLocked(); |
1807f628 | 321 | ASSERT(!isFixedUp()); |
31875a97 | 322 | entsizeAndFlags = entsize() | fixed_up_method_list; |
cd5f04f5 A |
323 | } |
324 | ||
31875a97 A |
325 | bool protocol_t::isFixedUp() const { |
326 | return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol; | |
b3962a83 A |
327 | } |
328 | ||
31875a97 | 329 | void protocol_t::setFixedUp() { |
66799735 | 330 | runtimeLock.assertLocked(); |
1807f628 | 331 | ASSERT(!isFixedUp()); |
31875a97 | 332 | flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol; |
b3962a83 A |
333 | } |
334 | ||
1807f628 A |
335 | bool protocol_t::isCanonical() const { |
336 | return (flags & canonical_protocol) != 0; | |
337 | } | |
338 | ||
339 | void protocol_t::clearIsCanonical() { | |
340 | runtimeLock.assertLocked(); | |
341 | ASSERT(isCanonical()); | |
342 | flags = flags & ~canonical_protocol; | |
343 | } | |
344 | ||
b3962a83 | 345 | |
bc4fafce | 346 | const method_list_t_authed_ptr<method_list_t> *method_array_t::endCategoryMethodLists(Class cls) const |
7af964d1 | 347 | { |
f192a3e2 A |
348 | auto mlists = beginLists(); |
349 | auto mlistsEnd = endLists(); | |
bc4fafce | 350 | |
f192a3e2 | 351 | if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods()) |
31875a97 A |
352 | { |
353 | // No methods, or no base methods. | |
354 | // Everything here is a category method. | |
355 | return mlistsEnd; | |
356 | } | |
357 | ||
358 | // Have base methods. Category methods are | |
359 | // everything except the last method list. | |
360 | return mlistsEnd - 1; | |
7af964d1 A |
361 | } |
362 | ||
31875a97 | 363 | static const char *sel_cname(SEL sel) |
7af964d1 | 364 | { |
31875a97 | 365 | return (const char *)(void *)sel; |
7af964d1 A |
366 | } |
367 | ||
368 | ||
31875a97 | 369 | static size_t protocol_list_size(const protocol_list_t *plist) |
7af964d1 | 370 | { |
31875a97 | 371 | return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *); |
7af964d1 A |
372 | } |
373 | ||
374 | ||
b3962a83 A |
375 | static void try_free(const void *p) |
376 | { | |
377 | if (p && malloc_size(p)) free((void *)p); | |
378 | } | |
379 | ||
380 | ||
1807f628 A |
381 | using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass, |
382 | Class _Nonnull newClass); | |
383 | // Normally there's only one handler registered. | |
384 | static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers; | |
66799735 A |
385 | |
386 | void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler) | |
387 | (Class _Nonnull oldClass, Class _Nonnull newClass)) { | |
1807f628 A |
388 | mutex_locker_t lock(runtimeLock); |
389 | ||
390 | classCopyFixupHandlers.append(newFixupHandler); | |
66799735 A |
391 | } |
392 | ||
8070259c A |
393 | static Class |
394 | alloc_class_for_subclass(Class supercls, size_t extraBytes) | |
395 | { | |
66799735 | 396 | if (!supercls || !supercls->isAnySwift()) { |
8070259c A |
397 | return _calloc_class(sizeof(objc_class) + extraBytes); |
398 | } | |
399 | ||
400 | // Superclass is a Swift class. New subclass must duplicate its extra bits. | |
401 | ||
402 | // Allocate the new class, with space for super's prefix and suffix | |
403 | // and self's extraBytes. | |
404 | swift_class_t *swiftSupercls = (swift_class_t *)supercls; | |
405 | size_t superSize = swiftSupercls->classSize; | |
406 | void *superBits = swiftSupercls->baseAddress(); | |
31875a97 | 407 | void *bits = malloc(superSize + extraBytes); |
8070259c A |
408 | |
409 | // Copy all of the superclass's data to the new class. | |
410 | memcpy(bits, superBits, superSize); | |
411 | ||
412 | // Erase the objc data and the Swift description in the new class. | |
413 | swift_class_t *swcls = (swift_class_t *) | |
414 | ((uint8_t *)bits + swiftSupercls->classAddressOffset); | |
415 | bzero(swcls, sizeof(objc_class)); | |
416 | swcls->description = nil; | |
417 | ||
1807f628 A |
418 | for (auto handler : classCopyFixupHandlers) { |
419 | handler(supercls, (Class)swcls); | |
66799735 A |
420 | } |
421 | ||
8070259c | 422 | // Mark this class as Swift-enhanced. |
66799735 A |
423 | if (supercls->isSwiftStable()) { |
424 | swcls->bits.setIsSwiftStable(); | |
425 | } | |
426 | if (supercls->isSwiftLegacy()) { | |
427 | swcls->bits.setIsSwiftLegacy(); | |
428 | } | |
8070259c A |
429 | |
430 | return (Class)swcls; | |
431 | } | |
432 | ||
433 | ||
434 | /*********************************************************************** | |
435 | * object_getIndexedIvars. | |
436 | **********************************************************************/ | |
437 | void *object_getIndexedIvars(id obj) | |
438 | { | |
439 | uint8_t *base = (uint8_t *)obj; | |
440 | ||
34d5b5e8 | 441 | if (obj->isTaggedPointerOrNil()) return nil; |
8070259c A |
442 | |
443 | if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize(); | |
444 | ||
445 | Class cls = (Class)obj; | |
66799735 | 446 | if (!cls->isAnySwift()) return base + sizeof(objc_class); |
8070259c A |
447 | |
448 | swift_class_t *swcls = (swift_class_t *)cls; | |
449 | return base - swcls->classAddressOffset + word_align(swcls->classSize); | |
450 | } | |
451 | ||
452 | ||
b3962a83 A |
453 | /*********************************************************************** |
454 | * make_ro_writeable | |
455 | * Reallocates rw->ro if necessary to make it writeable. | |
456 | * Locking: runtimeLock must be held by the caller. | |
457 | **********************************************************************/ | |
458 | static class_ro_t *make_ro_writeable(class_rw_t *rw) | |
459 | { | |
66799735 | 460 | runtimeLock.assertLocked(); |
b3962a83 A |
461 | |
462 | if (rw->flags & RW_COPIED_RO) { | |
463 | // already writeable, do nothing | |
464 | } else { | |
f192a3e2 | 465 | rw->set_ro(rw->ro()->duplicate()); |
b3962a83 A |
466 | rw->flags |= RW_COPIED_RO; |
467 | } | |
f192a3e2 | 468 | return const_cast<class_ro_t *>(rw->ro()); |
b3962a83 A |
469 | } |
470 | ||
471 | ||
66799735 A |
472 | /*********************************************************************** |
473 | * dataSegmentsContain | |
474 | * Returns true if the given address lies within a data segment in any | |
475 | * loaded image. | |
66799735 | 476 | **********************************************************************/ |
1807f628 A |
477 | NEVER_INLINE |
478 | static bool | |
479 | dataSegmentsContain(Class cls) | |
480 | { | |
481 | uint32_t index; | |
482 | if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) { | |
483 | // if the class is realized (hence has a class_rw_t), | |
484 | // memorize where we found the range | |
485 | if (cls->isRealized()) { | |
486 | cls->data()->witness = (uint16_t)index; | |
66799735 | 487 | } |
66799735 A |
488 | return true; |
489 | } | |
66799735 A |
490 | return false; |
491 | } | |
492 | ||
493 | ||
494 | /*********************************************************************** | |
495 | * isKnownClass | |
496 | * Return true if the class is known to the runtime (located within the | |
497 | * shared cache, within the data segment of a loaded image, or has been | |
498 | * allocated with obj_allocateClassPair). | |
1807f628 A |
499 | * |
500 | * The result of this operation is cached on the class in a "witness" | |
501 | * value that is cheaply checked in the fastpath. | |
66799735 | 502 | **********************************************************************/ |
1807f628 A |
503 | ALWAYS_INLINE |
504 | static bool | |
505 | isKnownClass(Class cls) | |
506 | { | |
507 | if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) { | |
508 | return true; | |
509 | } | |
510 | auto &set = objc::allocatedClasses.get(); | |
511 | return set.find(cls) != set.end() || dataSegmentsContain(cls); | |
66799735 A |
512 | } |
513 | ||
514 | ||
515 | /*********************************************************************** | |
516 | * addClassTableEntry | |
517 | * Add a class to the table of all classes. If addMeta is true, | |
518 | * automatically adds the metaclass of the class as well. | |
519 | * Locking: runtimeLock must be held by the caller. | |
520 | **********************************************************************/ | |
1807f628 A |
521 | static void |
522 | addClassTableEntry(Class cls, bool addMeta = true) | |
523 | { | |
66799735 A |
524 | runtimeLock.assertLocked(); |
525 | ||
526 | // This class is allowed to be a known class via the shared cache or via | |
527 | // data segments, but it is not allowed to be in the dynamic table already. | |
1807f628 A |
528 | auto &set = objc::allocatedClasses.get(); |
529 | ||
530 | ASSERT(set.find(cls) == set.end()); | |
66799735 A |
531 | |
532 | if (!isKnownClass(cls)) | |
1807f628 | 533 | set.insert(cls); |
66799735 A |
534 | if (addMeta) |
535 | addClassTableEntry(cls->ISA(), false); | |
536 | } | |
537 | ||
538 | ||
539 | /*********************************************************************** | |
540 | * checkIsKnownClass | |
541 | * Checks the given class against the list of all known classes. Dies | |
542 | * with a fatal error if the class is not known. | |
543 | * Locking: runtimeLock must be held by the caller. | |
544 | **********************************************************************/ | |
1807f628 A |
545 | ALWAYS_INLINE |
546 | static void | |
547 | checkIsKnownClass(Class cls) | |
66799735 | 548 | { |
1807f628 | 549 | if (slowpath(!isKnownClass(cls))) { |
66799735 | 550 | _objc_fatal("Attempt to use unknown class %p.", cls); |
b3962a83 | 551 | } |
b3962a83 A |
552 | } |
553 | ||
cd5f04f5 A |
554 | /*********************************************************************** |
555 | * classNSObject | |
556 | * Returns class NSObject. | |
557 | * Locking: none | |
558 | **********************************************************************/ | |
7257e56c | 559 | static Class classNSObject(void) |
cd5f04f5 | 560 | { |
7257e56c A |
561 | extern objc_class OBJC_CLASS_$_NSObject; |
562 | return (Class)&OBJC_CLASS_$_NSObject; | |
cd5f04f5 A |
563 | } |
564 | ||
1807f628 A |
565 | static Class metaclassNSObject(void) |
566 | { | |
567 | extern objc_class OBJC_METACLASS_$_NSObject; | |
568 | return (Class)&OBJC_METACLASS_$_NSObject; | |
569 | } | |
cd5f04f5 | 570 | |
b3962a83 | 571 | /*********************************************************************** |
7af964d1 A |
572 | * printReplacements |
573 | * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS. | |
574 | * Warn about methods from cats that override other methods in cats or cls. | |
575 | * Assumes no methods from cats have been added to cls yet. | |
b3962a83 | 576 | **********************************************************************/ |
1807f628 A |
577 | __attribute__((cold, noinline)) |
578 | static void | |
579 | printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count) | |
7af964d1 A |
580 | { |
581 | uint32_t c; | |
31875a97 | 582 | bool isMeta = cls->isMetaClass(); |
7af964d1 | 583 | |
7af964d1 A |
584 | // Newest categories are LAST in cats |
585 | // Later categories override earlier ones. | |
1807f628 A |
586 | for (c = 0; c < cats_count; c++) { |
587 | category_t *cat = cats_list[c].cat; | |
31875a97 A |
588 | |
589 | method_list_t *mlist = cat->methodsForMeta(isMeta); | |
590 | if (!mlist) continue; | |
591 | ||
592 | for (const auto& meth : *mlist) { | |
bc4fafce | 593 | SEL s = sel_registerName(sel_cname(meth.name())); |
7af964d1 | 594 | |
31875a97 A |
595 | // Search for replaced methods in method lookup order. |
596 | // Complain about the first duplicate only. | |
597 | ||
7af964d1 | 598 | // Look for method in earlier categories |
31875a97 | 599 | for (uint32_t c2 = 0; c2 < c; c2++) { |
1807f628 | 600 | category_t *cat2 = cats_list[c2].cat; |
31875a97 A |
601 | |
602 | const method_list_t *mlist2 = cat2->methodsForMeta(isMeta); | |
603 | if (!mlist2) continue; | |
604 | ||
605 | for (const auto& meth2 : *mlist2) { | |
bc4fafce | 606 | SEL s2 = sel_registerName(sel_cname(meth2.name())); |
31875a97 A |
607 | if (s == s2) { |
608 | logReplacedMethod(cls->nameForLogging(), s, | |
609 | cls->isMetaClass(), cat->name, | |
bc4fafce | 610 | meth2.imp(false), meth.imp(false)); |
31875a97 A |
611 | goto complained; |
612 | } | |
7af964d1 A |
613 | } |
614 | } | |
615 | ||
616 | // Look for method in cls | |
f192a3e2 | 617 | for (const auto& meth2 : cls->data()->methods()) { |
bc4fafce | 618 | SEL s2 = sel_registerName(sel_cname(meth2.name())); |
31875a97 A |
619 | if (s == s2) { |
620 | logReplacedMethod(cls->nameForLogging(), s, | |
621 | cls->isMetaClass(), cat->name, | |
bc4fafce | 622 | meth2.imp(false), meth.imp(false)); |
31875a97 | 623 | goto complained; |
7af964d1 | 624 | } |
31875a97 | 625 | } |
7af964d1 | 626 | |
31875a97 A |
627 | complained: |
628 | ; | |
7af964d1 A |
629 | } |
630 | } | |
631 | } | |
632 | ||
633 | ||
1807f628 A |
634 | /*********************************************************************** |
635 | * unreasonableClassCount | |
636 | * Provides an upper bound for any iteration of classes, | |
637 | * to prevent spins when runtime metadata is corrupted. | |
638 | **********************************************************************/ | |
639 | static unsigned unreasonableClassCount() | |
640 | { | |
641 | runtimeLock.assertLocked(); | |
642 | ||
643 | int base = NXCountMapTable(gdb_objc_realized_classes) + | |
644 | getPreoptimizedClassUnreasonableCount(); | |
645 | ||
646 | // Provide lots of slack here. Some iterations touch metaclasses too. | |
647 | // Some iterations backtrack (like realized class iteration). | |
648 | // We don't need an efficient bound, merely one that prevents spins. | |
649 | return (base + 1) * 16; | |
650 | } | |
651 | ||
652 | ||
653 | /*********************************************************************** | |
654 | * Class enumerators | |
655 | * The passed in block returns `false` if subclasses can be skipped | |
656 | * Locking: runtimeLock must be held by the caller. | |
657 | **********************************************************************/ | |
658 | static inline void | |
659 | foreach_realized_class_and_subclass_2(Class top, unsigned &count, | |
660 | bool skip_metaclass, | |
661 | bool (^code)(Class) __attribute((noescape))) | |
662 | { | |
663 | Class cls = top; | |
664 | ||
665 | runtimeLock.assertLocked(); | |
666 | ASSERT(top); | |
667 | ||
668 | while (1) { | |
669 | if (--count == 0) { | |
670 | _objc_fatal("Memory corruption in class list."); | |
671 | } | |
672 | ||
673 | bool skip_subclasses; | |
674 | ||
675 | if (skip_metaclass && cls->isMetaClass()) { | |
676 | skip_subclasses = true; | |
677 | } else { | |
678 | skip_subclasses = !code(cls); | |
679 | } | |
680 | ||
681 | if (!skip_subclasses && cls->data()->firstSubclass) { | |
682 | cls = cls->data()->firstSubclass; | |
683 | } else { | |
684 | while (!cls->data()->nextSiblingClass && cls != top) { | |
34d5b5e8 | 685 | cls = cls->getSuperclass(); |
1807f628 A |
686 | if (--count == 0) { |
687 | _objc_fatal("Memory corruption in class list."); | |
688 | } | |
689 | } | |
690 | if (cls == top) break; | |
691 | cls = cls->data()->nextSiblingClass; | |
692 | } | |
693 | } | |
694 | } | |
695 | ||
696 | // Enumerates a class and all of its realized subclasses. | |
697 | static void | |
698 | foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape))) | |
699 | { | |
700 | unsigned int count = unreasonableClassCount(); | |
701 | ||
702 | foreach_realized_class_and_subclass_2(top, count, false, code); | |
703 | } | |
704 | ||
705 | // Enumerates all realized classes and metaclasses. | |
706 | static void | |
707 | foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape))) | |
708 | { | |
709 | unsigned int count = unreasonableClassCount(); | |
710 | ||
711 | for (Class top = _firstRealizedClass; | |
712 | top != nil; | |
713 | top = top->data()->nextSiblingClass) | |
714 | { | |
715 | foreach_realized_class_and_subclass_2(top, count, false, code); | |
716 | } | |
717 | } | |
718 | ||
719 | // Enumerates all realized classes (ignoring metaclasses). | |
720 | static void | |
721 | foreach_realized_class(bool (^code)(Class) __attribute((noescape))) | |
722 | { | |
723 | unsigned int count = unreasonableClassCount(); | |
724 | ||
725 | for (Class top = _firstRealizedClass; | |
726 | top != nil; | |
727 | top = top->data()->nextSiblingClass) | |
728 | { | |
729 | foreach_realized_class_and_subclass_2(top, count, true, code); | |
730 | } | |
731 | } | |
732 | ||
733 | ||
734 | /*********************************************************************** | |
735 | * Method Scanners / Optimization tracking | |
736 | * Implementation of scanning for various implementations of methods. | |
737 | **********************************************************************/ | |
738 | ||
739 | namespace objc { | |
740 | ||
741 | enum SelectorBundle { | |
742 | AWZ, | |
743 | RR, | |
744 | Core, | |
745 | }; | |
746 | ||
747 | namespace scanner { | |
748 | ||
749 | // The current state of NSObject swizzling for every scanner | |
750 | // | |
751 | // It allows for cheap checks of global swizzles, and also lets | |
752 | // things like IMP Swizzling before NSObject has been initialized | |
753 | // to be remembered, as setInitialized() would miss these. | |
754 | // | |
755 | // Every pair of bits describes a SelectorBundle. | |
756 | // even bits: is NSObject class swizzled for this bundle | |
757 | // odd bits: is NSObject meta class swizzled for this bundle | |
758 | static uintptr_t NSObjectSwizzledMask; | |
759 | ||
760 | static ALWAYS_INLINE uintptr_t | |
761 | swizzlingBit(SelectorBundle bundle, bool isMeta) | |
762 | { | |
763 | return 1UL << (2 * bundle + isMeta); | |
764 | } | |
765 | ||
766 | static void __attribute__((cold, noinline)) | |
767 | printCustom(Class cls, SelectorBundle bundle, bool inherited) | |
768 | { | |
769 | static char const * const SelectorBundleName[] = { | |
770 | [AWZ] = "CUSTOM AWZ", | |
771 | [RR] = "CUSTOM RR", | |
772 | [Core] = "CUSTOM Core", | |
773 | }; | |
774 | ||
775 | _objc_inform("%s: %s%s%s", SelectorBundleName[bundle], | |
776 | cls->nameForLogging(), | |
777 | cls->isMetaClass() ? " (meta)" : "", | |
778 | inherited ? " (inherited)" : ""); | |
779 | } | |
780 | ||
781 | enum class Scope { Instances, Classes, Both }; | |
782 | ||
783 | template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both> | |
784 | class Mixin { | |
785 | ||
786 | // work around compiler being broken with templates using Class/objc_class, | |
787 | // probably some weird confusion with Class being builtin | |
788 | ALWAYS_INLINE static objc_class *as_objc_class(Class cls) { | |
789 | return (objc_class *)cls; | |
790 | } | |
791 | ||
792 | static void | |
793 | setCustomRecursively(Class cls, bool inherited = false) | |
794 | { | |
795 | foreach_realized_class_and_subclass(cls, [=](Class c){ | |
796 | if (c != cls && !as_objc_class(c)->isInitialized()) { | |
797 | // Subclass not yet initialized. Wait for setInitialized() to do it | |
798 | return false; | |
799 | } | |
800 | if (Traits::isCustom(c)) { | |
801 | return false; | |
802 | } | |
803 | Traits::setCustom(c); | |
804 | if (ShouldPrint) { | |
805 | printCustom(cls, Bundle, inherited || c != cls); | |
806 | } | |
807 | return true; | |
808 | }); | |
809 | } | |
810 | ||
811 | static bool | |
812 | isNSObjectSwizzled(bool isMeta) | |
813 | { | |
814 | return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta); | |
815 | } | |
816 | ||
817 | static void | |
818 | setNSObjectSwizzled(Class NSOClass, bool isMeta) | |
819 | { | |
820 | NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta); | |
821 | if (as_objc_class(NSOClass)->isInitialized()) { | |
822 | setCustomRecursively(NSOClass); | |
823 | } | |
824 | } | |
825 | ||
826 | static void | |
827 | scanChangedMethodForUnknownClass(const method_t *meth) | |
828 | { | |
829 | Class cls; | |
830 | ||
831 | cls = classNSObject(); | |
832 | if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) { | |
f192a3e2 | 833 | for (const auto &meth2: as_objc_class(cls)->data()->methods()) { |
1807f628 A |
834 | if (meth == &meth2) { |
835 | setNSObjectSwizzled(cls, NO); | |
836 | break; | |
837 | } | |
838 | } | |
839 | } | |
840 | ||
841 | cls = metaclassNSObject(); | |
842 | if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) { | |
f192a3e2 | 843 | for (const auto &meth2: as_objc_class(cls)->data()->methods()) { |
1807f628 A |
844 | if (meth == &meth2) { |
845 | setNSObjectSwizzled(cls, YES); | |
846 | break; | |
847 | } | |
848 | } | |
849 | } | |
850 | } | |
851 | ||
852 | static void | |
853 | scanAddedClassImpl(Class cls, bool isMeta) | |
854 | { | |
1807f628 A |
855 | bool setCustom = NO, inherited = NO; |
856 | ||
857 | if (isNSObjectSwizzled(isMeta)) { | |
858 | setCustom = YES; | |
34d5b5e8 A |
859 | } else if (Traits::knownClassHasDefaultImpl(cls, isMeta)) { |
860 | // This class is known to have the default implementations, | |
861 | // but we need to check categories. | |
f192a3e2 | 862 | auto &methods = as_objc_class(cls)->data()->methods(); |
1807f628 A |
863 | setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(), |
864 | methods.endCategoryMethodLists(cls)); | |
34d5b5e8 | 865 | } else if (!isMeta && !as_objc_class(cls)->getSuperclass()) { |
1807f628 A |
866 | // Custom Root class |
867 | setCustom = YES; | |
34d5b5e8 | 868 | } else if (Traits::isCustom(as_objc_class(cls)->getSuperclass())) { |
1807f628 A |
869 | // Superclass is custom, therefore we are too. |
870 | setCustom = YES; | |
871 | inherited = YES; | |
872 | } else { | |
873 | // Not NSObject. | |
f192a3e2 | 874 | auto &methods = as_objc_class(cls)->data()->methods(); |
1807f628 A |
875 | setCustom = Traits::scanMethodLists(methods.beginLists(), |
876 | methods.endLists()); | |
877 | } | |
878 | if (slowpath(setCustom)) { | |
879 | if (ShouldPrint) printCustom(cls, Bundle, inherited); | |
880 | } else { | |
881 | Traits::setDefault(cls); | |
882 | } | |
883 | } | |
884 | ||
885 | public: | |
34d5b5e8 A |
886 | static bool knownClassHasDefaultImpl(Class cls, bool isMeta) { |
887 | // Typically only NSObject has default implementations. | |
888 | // Allow this to be extended by overriding (to allow | |
889 | // SwiftObject, for example). | |
890 | Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject()); | |
891 | return cls == NSOClass; | |
892 | } | |
893 | ||
1807f628 A |
894 | // Scan a class that is about to be marked Initialized for particular |
895 | // bundles of selectors, and mark the class and its children | |
896 | // accordingly. | |
897 | // | |
898 | // This also handles inheriting properties from its superclass. | |
899 | // | |
900 | // Caller: objc_class::setInitialized() | |
901 | static void | |
902 | scanInitializedClass(Class cls, Class metacls) | |
903 | { | |
904 | if (Domain != Scope::Classes) { | |
905 | scanAddedClassImpl(cls, false); | |
906 | } | |
907 | if (Domain != Scope::Instances) { | |
908 | scanAddedClassImpl(metacls, true); | |
909 | } | |
910 | } | |
911 | ||
912 | // Inherit various properties from the superclass when a class | |
913 | // is being added to the graph. | |
914 | // | |
915 | // Caller: addSubclass() | |
916 | static void | |
917 | scanAddedSubClass(Class subcls, Class supercls) | |
918 | { | |
919 | if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) { | |
920 | setCustomRecursively(subcls, true); | |
921 | } | |
922 | } | |
923 | ||
924 | // Scan Method lists for selectors that would override things | |
925 | // in a Bundle. | |
926 | // | |
927 | // This is used to detect when categories override problematic selectors | |
928 | // are injected in a class after it has been initialized. | |
929 | // | |
930 | // Caller: prepareMethodLists() | |
931 | static void | |
932 | scanAddedMethodLists(Class cls, method_list_t **mlists, int count) | |
933 | { | |
934 | if (slowpath(Traits::isCustom(cls))) { | |
935 | return; | |
936 | } | |
937 | if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) { | |
938 | setCustomRecursively(cls); | |
939 | } | |
940 | } | |
941 | ||
942 | // Handle IMP Swizzling (the IMP for an exisiting method being changed). | |
943 | // | |
944 | // In almost all cases, IMP swizzling does not affect custom bits. | |
945 | // Custom search will already find the method whether or not | |
946 | // it is swizzled, so it does not transition from non-custom to custom. | |
947 | // | |
948 | // The only cases where IMP swizzling can affect the custom bits is | |
949 | // if the swizzled method is one of the methods that is assumed to be | |
950 | // non-custom. These special cases are listed in setInitialized(). | |
951 | // We look for such cases here. | |
952 | // | |
953 | // Caller: Swizzling methods via adjustCustomFlagsForMethodChange() | |
954 | static void | |
955 | scanChangedMethod(Class cls, const method_t *meth) | |
956 | { | |
bc4fafce | 957 | if (fastpath(!Traits::isInterestingSelector(meth->name()))) { |
1807f628 A |
958 | return; |
959 | } | |
960 | ||
961 | if (cls) { | |
962 | bool isMeta = as_objc_class(cls)->isMetaClass(); | |
963 | if (isMeta && Domain != Scope::Instances) { | |
964 | if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) { | |
965 | setNSObjectSwizzled(cls, isMeta); | |
966 | } | |
967 | } | |
968 | if (!isMeta && Domain != Scope::Classes) { | |
969 | if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) { | |
970 | setNSObjectSwizzled(cls, isMeta); | |
971 | } | |
972 | } | |
973 | } else { | |
974 | // We're called from method_exchangeImplementations, only NSObject | |
975 | // class and metaclass may be problematic (exchanging the default | |
976 | // builtin IMP of an interesting seleector, is a swizzling that, | |
977 | // may flip our scanned property. For other classes, the previous | |
978 | // value had already flipped the property). | |
979 | // | |
980 | // However, as we don't know the class, we need to scan all of | |
981 | // NSObject class and metaclass methods (this is SLOW). | |
982 | scanChangedMethodForUnknownClass(meth); | |
983 | } | |
984 | } | |
985 | }; | |
986 | ||
987 | } // namespace scanner | |
988 | ||
989 | // AWZ methods: +alloc / +allocWithZone: | |
990 | struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> { | |
991 | static bool isCustom(Class cls) { | |
992 | return cls->hasCustomAWZ(); | |
993 | } | |
994 | static void setCustom(Class cls) { | |
995 | cls->setHasCustomAWZ(); | |
996 | } | |
997 | static void setDefault(Class cls) { | |
998 | cls->setHasDefaultAWZ(); | |
999 | } | |
1000 | static bool isInterestingSelector(SEL sel) { | |
1001 | return sel == @selector(alloc) || sel == @selector(allocWithZone:); | |
1002 | } | |
bc4fafce A |
1003 | template<typename T> |
1004 | static bool scanMethodLists(T *mlists, T *end) { | |
1807f628 A |
1005 | SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), }; |
1006 | return method_lists_contains_any(mlists, end, sels, 2); | |
1007 | } | |
1008 | }; | |
1009 | ||
1010 | // Retain/Release methods that are extremely rarely overridden | |
1011 | // | |
1012 | // retain/release/autorelease/retainCount/ | |
1013 | // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference | |
1014 | struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR | |
1015 | #if !SUPPORT_NONPOINTER_ISA | |
1016 | , scanner::Scope::Instances | |
1017 | #endif | |
1018 | > { | |
1019 | static bool isCustom(Class cls) { | |
1020 | return cls->hasCustomRR(); | |
1021 | } | |
1022 | static void setCustom(Class cls) { | |
1023 | cls->setHasCustomRR(); | |
1024 | } | |
1025 | static void setDefault(Class cls) { | |
1026 | cls->setHasDefaultRR(); | |
1027 | } | |
1028 | static bool isInterestingSelector(SEL sel) { | |
1029 | return sel == @selector(retain) || | |
1030 | sel == @selector(release) || | |
1031 | sel == @selector(autorelease) || | |
1032 | sel == @selector(_tryRetain) || | |
1033 | sel == @selector(_isDeallocating) || | |
1034 | sel == @selector(retainCount) || | |
1035 | sel == @selector(allowsWeakReference) || | |
1036 | sel == @selector(retainWeakReference); | |
1037 | } | |
bc4fafce A |
1038 | template <typename T> |
1039 | static bool scanMethodLists(T *mlists, T *end) { | |
1807f628 A |
1040 | SEL sels[8] = { |
1041 | @selector(retain), | |
1042 | @selector(release), | |
1043 | @selector(autorelease), | |
1044 | @selector(_tryRetain), | |
1045 | @selector(_isDeallocating), | |
1046 | @selector(retainCount), | |
1047 | @selector(allowsWeakReference), | |
1048 | @selector(retainWeakReference), | |
1049 | }; | |
1050 | return method_lists_contains_any(mlists, end, sels, 8); | |
1051 | } | |
1052 | }; | |
1053 | ||
1054 | // Core NSObject methods that are extremely rarely overridden | |
1055 | // | |
1056 | // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector | |
1057 | struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> { | |
34d5b5e8 A |
1058 | static bool knownClassHasDefaultImpl(Class cls, bool isMeta) { |
1059 | if (scanner::Mixin<CoreScanner, Core, PrintCustomCore>::knownClassHasDefaultImpl(cls, isMeta)) | |
1060 | return true; | |
1061 | if ((cls->isRootClass() || cls->isRootMetaclass()) | |
1062 | && strcmp(cls->mangledName(), "_TtCs12_SwiftObject") == 0) | |
1063 | return true; | |
1064 | ||
1065 | return false; | |
1066 | } | |
1067 | ||
1807f628 A |
1068 | static bool isCustom(Class cls) { |
1069 | return cls->hasCustomCore(); | |
1070 | } | |
1071 | static void setCustom(Class cls) { | |
1072 | cls->setHasCustomCore(); | |
1073 | } | |
1074 | static void setDefault(Class cls) { | |
1075 | cls->setHasDefaultCore(); | |
1076 | } | |
1077 | static bool isInterestingSelector(SEL sel) { | |
1078 | return sel == @selector(new) || | |
1079 | sel == @selector(self) || | |
1080 | sel == @selector(class) || | |
1081 | sel == @selector(isKindOfClass:) || | |
1082 | sel == @selector(respondsToSelector:); | |
1083 | } | |
bc4fafce A |
1084 | template <typename T> |
1085 | static bool scanMethodLists(T *mlists, T *end) { | |
1807f628 A |
1086 | SEL sels[5] = { |
1087 | @selector(new), | |
1088 | @selector(self), | |
1089 | @selector(class), | |
1090 | @selector(isKindOfClass:), | |
1091 | @selector(respondsToSelector:) | |
1092 | }; | |
1093 | return method_lists_contains_any(mlists, end, sels, 5); | |
1094 | } | |
1095 | }; | |
1096 | ||
1097 | class category_list : nocopy_t { | |
1098 | union { | |
1099 | locstamped_category_t lc; | |
1100 | struct { | |
1101 | locstamped_category_t *array; | |
1102 | // this aliases with locstamped_category_t::hi | |
1103 | // which is an aliased pointer | |
1104 | uint32_t is_array : 1; | |
1105 | uint32_t count : 31; | |
1106 | uint32_t size : 32; | |
1107 | }; | |
1108 | } _u; | |
1109 | ||
1110 | public: | |
1111 | category_list() : _u{{nullptr, nullptr}} { } | |
1112 | category_list(locstamped_category_t lc) : _u{{lc}} { } | |
1113 | category_list(category_list &&other) : category_list() { | |
1114 | std::swap(_u, other._u); | |
1115 | } | |
1116 | ~category_list() | |
1117 | { | |
1118 | if (_u.is_array) { | |
1119 | free(_u.array); | |
1120 | } | |
1121 | } | |
1122 | ||
1123 | uint32_t count() const | |
1124 | { | |
1125 | if (_u.is_array) return _u.count; | |
1126 | return _u.lc.cat ? 1 : 0; | |
1127 | } | |
1128 | ||
1129 | uint32_t arrayByteSize(uint32_t size) const | |
1130 | { | |
1131 | return sizeof(locstamped_category_t) * size; | |
1132 | } | |
1133 | ||
1134 | const locstamped_category_t *array() const | |
1135 | { | |
1136 | return _u.is_array ? _u.array : &_u.lc; | |
1137 | } | |
1138 | ||
1139 | void append(locstamped_category_t lc) | |
1140 | { | |
1141 | if (_u.is_array) { | |
1142 | if (_u.count == _u.size) { | |
1143 | // Have a typical malloc growth: | |
1144 | // - size <= 8: grow by 2 | |
1145 | // - size <= 16: grow by 4 | |
1146 | // - size <= 32: grow by 8 | |
1147 | // ... etc | |
1148 | _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2); | |
1149 | _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size)); | |
1150 | } | |
1151 | _u.array[_u.count++] = lc; | |
1152 | } else if (_u.lc.cat == NULL) { | |
1153 | _u.lc = lc; | |
1154 | } else { | |
1155 | locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2)); | |
1156 | arr[0] = _u.lc; | |
1157 | arr[1] = lc; | |
1158 | ||
1159 | _u.array = arr; | |
1160 | _u.is_array = true; | |
1161 | _u.count = 2; | |
1162 | _u.size = 2; | |
1163 | } | |
1164 | } | |
1165 | ||
1166 | void erase(category_t *cat) | |
1167 | { | |
1168 | if (_u.is_array) { | |
1169 | for (int i = 0; i < _u.count; i++) { | |
1170 | if (_u.array[i].cat == cat) { | |
1171 | // shift entries to preserve list order | |
1172 | memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1)); | |
1173 | return; | |
1174 | } | |
1175 | } | |
1176 | } else if (_u.lc.cat == cat) { | |
1177 | _u.lc.cat = NULL; | |
1178 | _u.lc.hi = NULL; | |
1179 | } | |
1180 | } | |
1181 | }; | |
1182 | ||
1183 | class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list> | |
1184 | { | |
1185 | public: | |
1186 | void addForClass(locstamped_category_t lc, Class cls) | |
1187 | { | |
1188 | runtimeLock.assertLocked(); | |
1189 | ||
1190 | if (slowpath(PrintConnecting)) { | |
1191 | _objc_inform("CLASS: found category %c%s(%s)", | |
34d5b5e8 | 1192 | cls->isMetaClassMaybeUnrealized() ? '+' : '-', |
1807f628 A |
1193 | cls->nameForLogging(), lc.cat->name); |
1194 | } | |
1195 | ||
1196 | auto result = get().try_emplace(cls, lc); | |
1197 | if (!result.second) { | |
1198 | result.first->second.append(lc); | |
1199 | } | |
1200 | } | |
1201 | ||
1202 | void attachToClass(Class cls, Class previously, int flags) | |
1203 | { | |
1204 | runtimeLock.assertLocked(); | |
1205 | ASSERT((flags & ATTACH_CLASS) || | |
1206 | (flags & ATTACH_METACLASS) || | |
1207 | (flags & ATTACH_CLASS_AND_METACLASS)); | |
1208 | ||
1209 | auto &map = get(); | |
1210 | auto it = map.find(previously); | |
1211 | ||
1212 | if (it != map.end()) { | |
1213 | category_list &list = it->second; | |
1214 | if (flags & ATTACH_CLASS_AND_METACLASS) { | |
1215 | int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS; | |
1216 | attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS); | |
1217 | attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS); | |
1218 | } else { | |
1219 | attachCategories(cls, list.array(), list.count(), flags); | |
1220 | } | |
1221 | map.erase(it); | |
1222 | } | |
1223 | } | |
1224 | ||
1225 | void eraseCategoryForClass(category_t *cat, Class cls) | |
1226 | { | |
1227 | runtimeLock.assertLocked(); | |
1228 | ||
1229 | auto &map = get(); | |
1230 | auto it = map.find(cls); | |
1231 | if (it != map.end()) { | |
1232 | category_list &list = it->second; | |
1233 | list.erase(cat); | |
1234 | if (list.count() == 0) { | |
1235 | map.erase(it); | |
1236 | } | |
1237 | } | |
1238 | } | |
1239 | ||
1240 | void eraseClass(Class cls) | |
1241 | { | |
1242 | runtimeLock.assertLocked(); | |
1243 | ||
1244 | get().erase(cls); | |
1245 | } | |
1246 | }; | |
1247 | ||
1248 | static UnattachedCategories unattachedCategories; | |
1249 | ||
1250 | } // namespace objc | |
1251 | ||
31875a97 | 1252 | static bool isBundleClass(Class cls) |
b3962a83 | 1253 | { |
f192a3e2 | 1254 | return cls->data()->ro()->flags & RO_FROM_BUNDLE; |
b3962a83 A |
1255 | } |
1256 | ||
7af964d1 | 1257 | |
31875a97 | 1258 | static void |
cd5f04f5 | 1259 | fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort) |
b3962a83 | 1260 | { |
66799735 | 1261 | runtimeLock.assertLocked(); |
1807f628 | 1262 | ASSERT(!mlist->isFixedUp()); |
8972963c | 1263 | |
7af964d1 | 1264 | // fixme lock less in attachMethodLists ? |
1807f628 A |
1265 | // dyld3 may have already uniqued, but not sorted, the list |
1266 | if (!mlist->isUniqued()) { | |
66799735 | 1267 | mutex_locker_t lock(selLock); |
8070259c | 1268 | |
66799735 A |
1269 | // Unique selectors in list. |
1270 | for (auto& meth : *mlist) { | |
bc4fafce | 1271 | const char *name = sel_cname(meth.name()); |
34d5b5e8 | 1272 | meth.setName(sel_registerNameNoLock(name, bundleCopy)); |
66799735 | 1273 | } |
b3962a83 | 1274 | } |
7af964d1 | 1275 | |
8972963c | 1276 | // Sort by selector address. |
bc4fafce A |
1277 | // Don't try to sort small lists, as they're immutable. |
1278 | // Don't try to sort big lists of nonstandard size, as stable_sort | |
1279 | // won't copy the entries properly. | |
1280 | if (sort && !mlist->isSmallList() && mlist->entsize() == method_t::bigSize) { | |
cd5f04f5 | 1281 | method_t::SortBySELAddress sorter; |
bc4fafce | 1282 | std::stable_sort(&mlist->begin()->big(), &mlist->end()->big(), sorter); |
cd5f04f5 | 1283 | } |
8972963c | 1284 | |
bc4fafce A |
1285 | // Mark method list as uniqued and sorted. |
1286 | // Can't mark small lists, since they're immutable. | |
1287 | if (!mlist->isSmallList()) { | |
1288 | mlist->setFixedUp(); | |
1289 | } | |
7af964d1 A |
1290 | } |
1291 | ||
8972963c | 1292 | |
7af964d1 | 1293 | static void |
1807f628 | 1294 | prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount, |
34d5b5e8 | 1295 | bool baseMethods, bool methodsFromBundle, const char *why) |
7af964d1 | 1296 | { |
66799735 | 1297 | runtimeLock.assertLocked(); |
31875a97 A |
1298 | |
1299 | if (addedCount == 0) return; | |
7af964d1 | 1300 | |
1807f628 A |
1301 | // There exist RR/AWZ/Core special cases for some class's base methods. |
1302 | // But this code should never need to scan base methods for RR/AWZ/Core: | |
1303 | // default RR/AWZ/Core cannot be set before setInitialized(). | |
8070259c A |
1304 | // Therefore we need not handle any special cases here. |
1305 | if (baseMethods) { | |
1807f628 | 1306 | ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore()); |
34d5b5e8 A |
1307 | } else if (cls->cache.isConstantOptimizedCache()) { |
1308 | cls->setDisallowPreoptCachesRecursively(why); | |
1309 | } else if (cls->allowsPreoptInlinedSels()) { | |
1310 | #if CONFIG_USE_PREOPT_CACHES | |
1311 | SEL *sels = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_START]; | |
1312 | SEL *sels_end = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_END]; | |
1313 | if (method_lists_contains_any(addedLists, addedLists + addedCount, sels, sels_end - sels)) { | |
1314 | cls->setDisallowPreoptInlinedSelsRecursively(why); | |
1315 | } | |
1316 | #endif | |
cd5f04f5 | 1317 | } |
7af964d1 | 1318 | |
7af964d1 A |
1319 | // Add method lists to array. |
1320 | // Reallocate un-fixed method lists. | |
8972963c | 1321 | // The new methods are PREPENDED to the method list array. |
7af964d1 | 1322 | |
31875a97 | 1323 | for (int i = 0; i < addedCount; i++) { |
8972963c | 1324 | method_list_t *mlist = addedLists[i]; |
1807f628 | 1325 | ASSERT(mlist); |
7af964d1 A |
1326 | |
1327 | // Fixup selectors if necessary | |
31875a97 A |
1328 | if (!mlist->isFixedUp()) { |
1329 | fixupMethodList(mlist, methodsFromBundle, true/*sort*/); | |
7af964d1 | 1330 | } |
1807f628 | 1331 | } |
7af964d1 | 1332 | |
1807f628 A |
1333 | // If the class is initialized, then scan for method implementations |
1334 | // tracked by the class's flags. If it's not initialized yet, | |
1335 | // then objc_class::setInitialized() will take care of it. | |
1336 | if (cls->isInitialized()) { | |
1337 | objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount); | |
1338 | objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount); | |
1339 | objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount); | |
cd5f04f5 | 1340 | } |
7af964d1 A |
1341 | } |
1342 | ||
f192a3e2 A |
1343 | class_rw_ext_t * |
1344 | class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy) | |
1345 | { | |
1346 | runtimeLock.assertLocked(); | |
1347 | ||
1348 | auto rwe = objc::zalloc<class_rw_ext_t>(); | |
1349 | ||
1350 | rwe->version = (ro->flags & RO_META) ? 7 : 0; | |
1351 | ||
1352 | method_list_t *list = ro->baseMethods(); | |
1353 | if (list) { | |
1354 | if (deepCopy) list = list->duplicate(); | |
1355 | rwe->methods.attachLists(&list, 1); | |
1356 | } | |
1357 | ||
1358 | // See comments in objc_duplicateClass | |
1359 | // property lists and protocol lists historically | |
1360 | // have not been deep-copied | |
1361 | // | |
1362 | // This is probably wrong and ought to be fixed some day | |
1363 | property_list_t *proplist = ro->baseProperties; | |
1364 | if (proplist) { | |
1365 | rwe->properties.attachLists(&proplist, 1); | |
1366 | } | |
1367 | ||
1368 | protocol_list_t *protolist = ro->baseProtocols; | |
1369 | if (protolist) { | |
1370 | rwe->protocols.attachLists(&protolist, 1); | |
1371 | } | |
1372 | ||
1373 | set_ro_or_rwe(rwe, ro); | |
1374 | return rwe; | |
1375 | } | |
31875a97 A |
1376 | |
1377 | // Attach method lists and properties and protocols from categories to a class. | |
1378 | // Assumes the categories in cats are all loaded and sorted by load order, | |
1379 | // oldest categories first. | |
1807f628 A |
1380 | static void |
1381 | attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count, | |
1382 | int flags) | |
1383 | { | |
1384 | if (slowpath(PrintReplacedMethods)) { | |
1385 | printReplacements(cls, cats_list, cats_count); | |
1386 | } | |
1387 | if (slowpath(PrintConnecting)) { | |
1388 | _objc_inform("CLASS: attaching %d categories to%s class '%s'%s", | |
1389 | cats_count, (flags & ATTACH_EXISTING) ? " existing" : "", | |
1390 | cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : ""); | |
1391 | } | |
1392 | ||
1393 | /* | |
1394 | * Only a few classes have more than 64 categories during launch. | |
1395 | * This uses a little stack, and avoids malloc. | |
1396 | * | |
1397 | * Categories must be added in the proper order, which is back | |
1398 | * to front. To do that with the chunking, we iterate cats_list | |
1399 | * from front to back, build up the local buffers backwards, | |
1400 | * and call attachLists on the chunks. attachLists prepends the | |
1401 | * lists, so the final result is in the expected order. | |
1402 | */ | |
1403 | constexpr uint32_t ATTACH_BUFSIZ = 64; | |
1404 | method_list_t *mlists[ATTACH_BUFSIZ]; | |
1405 | property_list_t *proplists[ATTACH_BUFSIZ]; | |
1406 | protocol_list_t *protolists[ATTACH_BUFSIZ]; | |
1407 | ||
1408 | uint32_t mcount = 0; | |
1409 | uint32_t propcount = 0; | |
1410 | uint32_t protocount = 0; | |
31875a97 | 1411 | bool fromBundle = NO; |
1807f628 | 1412 | bool isMeta = (flags & ATTACH_METACLASS); |
f192a3e2 | 1413 | auto rwe = cls->data()->extAllocIfNeeded(); |
1807f628 A |
1414 | |
1415 | for (uint32_t i = 0; i < cats_count; i++) { | |
1416 | auto& entry = cats_list[i]; | |
31875a97 A |
1417 | |
1418 | method_list_t *mlist = entry.cat->methodsForMeta(isMeta); | |
7af964d1 | 1419 | if (mlist) { |
1807f628 | 1420 | if (mcount == ATTACH_BUFSIZ) { |
34d5b5e8 | 1421 | prepareMethodLists(cls, mlists, mcount, NO, fromBundle, __func__); |
f192a3e2 | 1422 | rwe->methods.attachLists(mlists, mcount); |
1807f628 A |
1423 | mcount = 0; |
1424 | } | |
1425 | mlists[ATTACH_BUFSIZ - ++mcount] = mlist; | |
31875a97 | 1426 | fromBundle |= entry.hi->isBundle(); |
b3962a83 | 1427 | } |
b3962a83 | 1428 | |
1807f628 | 1429 | property_list_t *proplist = |
c1e772c4 | 1430 | entry.cat->propertiesForMeta(isMeta, entry.hi); |
31875a97 | 1431 | if (proplist) { |
1807f628 | 1432 | if (propcount == ATTACH_BUFSIZ) { |
f192a3e2 | 1433 | rwe->properties.attachLists(proplists, propcount); |
1807f628 A |
1434 | propcount = 0; |
1435 | } | |
1436 | proplists[ATTACH_BUFSIZ - ++propcount] = proplist; | |
b3962a83 | 1437 | } |
b3962a83 | 1438 | |
1807f628 | 1439 | protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta); |
31875a97 | 1440 | if (protolist) { |
1807f628 | 1441 | if (protocount == ATTACH_BUFSIZ) { |
f192a3e2 | 1442 | rwe->protocols.attachLists(protolists, protocount); |
1807f628 A |
1443 | protocount = 0; |
1444 | } | |
1445 | protolists[ATTACH_BUFSIZ - ++protocount] = protolist; | |
31875a97 | 1446 | } |
b3962a83 A |
1447 | } |
1448 | ||
1807f628 | 1449 | if (mcount > 0) { |
34d5b5e8 A |
1450 | prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, |
1451 | NO, fromBundle, __func__); | |
f192a3e2 | 1452 | rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount); |
34d5b5e8 A |
1453 | if (flags & ATTACH_EXISTING) { |
1454 | flushCaches(cls, __func__, [](Class c){ | |
1455 | // constant caches have been dealt with in prepareMethodLists | |
1456 | // if the class still is constant here, it's fine to keep | |
1457 | return !c->cache.isConstantOptimizedCache(); | |
1458 | }); | |
1459 | } | |
1807f628 | 1460 | } |
b3962a83 | 1461 | |
f192a3e2 | 1462 | rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount); |
b3962a83 | 1463 | |
f192a3e2 | 1464 | rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount); |
b3962a83 A |
1465 | } |
1466 | ||
1467 | ||
1468 | /*********************************************************************** | |
1469 | * methodizeClass | |
1470 | * Fixes up cls's method list, protocol list, and property list. | |
1471 | * Attaches any outstanding categories. | |
1472 | * Locking: runtimeLock must be held by the caller | |
1473 | **********************************************************************/ | |
1807f628 | 1474 | static void methodizeClass(Class cls, Class previously) |
b3962a83 | 1475 | { |
66799735 | 1476 | runtimeLock.assertLocked(); |
b3962a83 | 1477 | |
31875a97 A |
1478 | bool isMeta = cls->isMetaClass(); |
1479 | auto rw = cls->data(); | |
f192a3e2 A |
1480 | auto ro = rw->ro(); |
1481 | auto rwe = rw->ext(); | |
b3962a83 | 1482 | |
7af964d1 A |
1483 | // Methodizing for the first time |
1484 | if (PrintConnecting) { | |
1485 | _objc_inform("CLASS: methodizing class '%s' %s", | |
8070259c | 1486 | cls->nameForLogging(), isMeta ? "(meta)" : ""); |
7af964d1 | 1487 | } |
b3962a83 | 1488 | |
31875a97 A |
1489 | // Install methods and properties that the class implements itself. |
1490 | method_list_t *list = ro->baseMethods(); | |
1491 | if (list) { | |
34d5b5e8 | 1492 | prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls), nullptr); |
f192a3e2 | 1493 | if (rwe) rwe->methods.attachLists(&list, 1); |
31875a97 A |
1494 | } |
1495 | ||
1496 | property_list_t *proplist = ro->baseProperties; | |
f192a3e2 A |
1497 | if (rwe && proplist) { |
1498 | rwe->properties.attachLists(&proplist, 1); | |
31875a97 A |
1499 | } |
1500 | ||
1501 | protocol_list_t *protolist = ro->baseProtocols; | |
f192a3e2 A |
1502 | if (rwe && protolist) { |
1503 | rwe->protocols.attachLists(&protolist, 1); | |
31875a97 | 1504 | } |
b3962a83 | 1505 | |
8972963c A |
1506 | // Root classes get bonus method implementations if they don't have |
1507 | // them already. These apply before category replacements. | |
cd5f04f5 | 1508 | if (cls->isRootMetaclass()) { |
8972963c | 1509 | // root metaclass |
1807f628 | 1510 | addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO); |
8972963c A |
1511 | } |
1512 | ||
31875a97 | 1513 | // Attach categories. |
1807f628 A |
1514 | if (previously) { |
1515 | if (isMeta) { | |
1516 | objc::unattachedCategories.attachToClass(cls, previously, | |
1517 | ATTACH_METACLASS); | |
1518 | } else { | |
1519 | // When a class relocates, categories with class methods | |
1520 | // may be registered on the class itself rather than on | |
1521 | // the metaclass. Tell attachToClass to look for those. | |
1522 | objc::unattachedCategories.attachToClass(cls, previously, | |
1523 | ATTACH_CLASS_AND_METACLASS); | |
b3962a83 | 1524 | } |
7af964d1 | 1525 | } |
1807f628 A |
1526 | objc::unattachedCategories.attachToClass(cls, cls, |
1527 | isMeta ? ATTACH_METACLASS : ATTACH_CLASS); | |
b3962a83 | 1528 | |
31875a97 | 1529 | #if DEBUG |
8972963c | 1530 | // Debug: sanity-check all SELs; log method list contents |
f192a3e2 | 1531 | for (const auto& meth : rw->methods()) { |
31875a97 A |
1532 | if (PrintConnecting) { |
1533 | _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-', | |
bc4fafce | 1534 | cls->nameForLogging(), sel_getName(meth.name())); |
8972963c | 1535 | } |
bc4fafce | 1536 | ASSERT(sel_registerName(sel_getName(meth.name())) == meth.name()); |
31875a97 | 1537 | } |
8972963c | 1538 | #endif |
7af964d1 | 1539 | } |
b3962a83 | 1540 | |
b3962a83 | 1541 | |
8070259c A |
1542 | /*********************************************************************** |
1543 | * nonMetaClasses | |
1544 | * Returns the secondary metaclass => class map | |
1545 | * Used for some cases of +initialize and +resolveClassMethod:. | |
1546 | * This map does not contain all class and metaclass pairs. It only | |
1547 | * contains metaclasses whose classes would be in the runtime-allocated | |
1548 | * named-class table, but are not because some other class with the same name | |
1549 | * is in that table. | |
1550 | * Classes with no duplicates are not included. | |
1551 | * Classes in the preoptimized named-class table are not included. | |
1552 | * Classes whose duplicates are in the preoptimized table are not included. | |
13ba007e A |
1553 | * Most code should use getMaybeUnrealizedNonMetaClass() |
1554 | * instead of reading this table. | |
8070259c A |
1555 | * Locking: runtimeLock must be read- or write-locked by the caller |
1556 | **********************************************************************/ | |
1557 | static NXMapTable *nonmeta_class_map = nil; | |
1558 | static NXMapTable *nonMetaClasses(void) | |
1559 | { | |
31875a97 | 1560 | runtimeLock.assertLocked(); |
8070259c A |
1561 | |
1562 | if (nonmeta_class_map) return nonmeta_class_map; | |
1563 | ||
1564 | // nonmeta_class_map is typically small | |
1565 | INIT_ONCE_PTR(nonmeta_class_map, | |
31875a97 | 1566 | NXCreateMapTable(NXPtrValueMapPrototype, 32), |
8070259c A |
1567 | NXFreeMapTable(v)); |
1568 | ||
1569 | return nonmeta_class_map; | |
1570 | } | |
1571 | ||
1572 | ||
1573 | /*********************************************************************** | |
1574 | * addNonMetaClass | |
1575 | * Adds metacls => cls to the secondary metaclass map | |
1576 | * Locking: runtimeLock must be held by the caller | |
1577 | **********************************************************************/ | |
1578 | static void addNonMetaClass(Class cls) | |
1579 | { | |
66799735 | 1580 | runtimeLock.assertLocked(); |
8070259c A |
1581 | void *old; |
1582 | old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls); | |
1583 | ||
1807f628 A |
1584 | ASSERT(!cls->isMetaClassMaybeUnrealized()); |
1585 | ASSERT(cls->ISA()->isMetaClassMaybeUnrealized()); | |
1586 | ASSERT(!old); | |
8070259c A |
1587 | } |
1588 | ||
1589 | ||
1590 | static void removeNonMetaClass(Class cls) | |
1591 | { | |
66799735 | 1592 | runtimeLock.assertLocked(); |
8070259c A |
1593 | NXMapRemove(nonMetaClasses(), cls->ISA()); |
1594 | } | |
1595 | ||
1596 | ||
1597 | static bool scanMangledField(const char *&string, const char *end, | |
1598 | const char *&field, int& length) | |
1599 | { | |
1600 | // Leading zero not allowed. | |
1601 | if (*string == '0') return false; | |
1602 | ||
1603 | length = 0; | |
1604 | field = string; | |
1605 | while (field < end) { | |
1606 | char c = *field; | |
1607 | if (!isdigit(c)) break; | |
1608 | field++; | |
1609 | if (__builtin_smul_overflow(length, 10, &length)) return false; | |
1610 | if (__builtin_sadd_overflow(length, c - '0', &length)) return false; | |
1611 | } | |
1612 | ||
1613 | string = field + length; | |
1614 | return length > 0 && string <= end; | |
1615 | } | |
1616 | ||
1617 | ||
1618 | /*********************************************************************** | |
1619 | * copySwiftV1DemangledName | |
1620 | * Returns the pretty form of the given Swift-v1-mangled class or protocol name. | |
1621 | * Returns nil if the string doesn't look like a mangled Swift v1 name. | |
1622 | * The result must be freed with free(). | |
1623 | **********************************************************************/ | |
1624 | static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false) | |
1625 | { | |
1626 | if (!string) return nil; | |
1627 | ||
1628 | // Swift mangling prefix. | |
1629 | if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil; | |
1630 | string += 4; | |
1631 | ||
1632 | const char *end = string + strlen(string); | |
1633 | ||
1634 | // Module name. | |
1635 | const char *prefix; | |
1636 | int prefixLength; | |
c1e772c4 A |
1637 | if (string[0] == 's') { |
1638 | // "s" is the Swift module. | |
8070259c A |
1639 | prefix = "Swift"; |
1640 | prefixLength = 5; | |
c1e772c4 | 1641 | string += 1; |
8070259c A |
1642 | } else { |
1643 | if (! scanMangledField(string, end, prefix, prefixLength)) return nil; | |
1644 | } | |
1645 | ||
1646 | // Class or protocol name. | |
1647 | const char *suffix; | |
1648 | int suffixLength; | |
1649 | if (! scanMangledField(string, end, suffix, suffixLength)) return nil; | |
1650 | ||
1651 | if (isProtocol) { | |
1652 | // Remainder must be "_". | |
1653 | if (strcmp(string, "_") != 0) return nil; | |
1654 | } else { | |
1655 | // Remainder must be empty. | |
1656 | if (string != end) return nil; | |
1657 | } | |
1658 | ||
1659 | char *result; | |
1660 | asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix); | |
1661 | return result; | |
1662 | } | |
1663 | ||
1664 | ||
1665 | /*********************************************************************** | |
1666 | * copySwiftV1MangledName | |
1667 | * Returns the Swift 1.0 mangled form of the given class or protocol name. | |
1668 | * Returns nil if the string doesn't look like an unmangled Swift name. | |
1669 | * The result must be freed with free(). | |
1670 | **********************************************************************/ | |
1671 | static char *copySwiftV1MangledName(const char *string, bool isProtocol = false) | |
1672 | { | |
1673 | if (!string) return nil; | |
1674 | ||
1675 | size_t dotCount = 0; | |
1676 | size_t dotIndex; | |
1677 | const char *s; | |
1678 | for (s = string; *s; s++) { | |
1679 | if (*s == '.') { | |
1680 | dotCount++; | |
1681 | dotIndex = s - string; | |
1682 | } | |
1683 | } | |
1684 | size_t stringLength = s - string; | |
1685 | ||
1686 | if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) { | |
1687 | return nil; | |
1688 | } | |
1689 | ||
1690 | const char *prefix = string; | |
1691 | size_t prefixLength = dotIndex; | |
1692 | const char *suffix = string + dotIndex + 1; | |
1693 | size_t suffixLength = stringLength - (dotIndex + 1); | |
1694 | ||
1695 | char *name; | |
1696 | ||
31875a97 | 1697 | if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) { |
c1e772c4 | 1698 | asprintf(&name, "_Tt%cs%zu%.*s%s", |
8070259c A |
1699 | isProtocol ? 'P' : 'C', |
1700 | suffixLength, (int)suffixLength, suffix, | |
1701 | isProtocol ? "_" : ""); | |
1702 | } else { | |
1703 | asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s", | |
1704 | isProtocol ? 'P' : 'C', | |
1705 | prefixLength, (int)prefixLength, prefix, | |
1706 | suffixLength, (int)suffixLength, suffix, | |
1707 | isProtocol ? "_" : ""); | |
1708 | } | |
1709 | return name; | |
1710 | } | |
1711 | ||
1712 | ||
b3962a83 | 1713 | /*********************************************************************** |
13ba007e | 1714 | * getClassExceptSomeSwift |
cd5f04f5 | 1715 | * Looks up a class by name. The class MIGHT NOT be realized. |
8070259c | 1716 | * Demangled Swift names are recognized. |
13ba007e A |
1717 | * Classes known to the Swift runtime but not yet used are NOT recognized. |
1718 | * (such as subclasses of un-instantiated generics) | |
1719 | * Use look_up_class() to find them as well. | |
cd5f04f5 | 1720 | * Locking: runtimeLock must be read- or write-locked by the caller. |
b3962a83 | 1721 | **********************************************************************/ |
7af964d1 | 1722 | |
cd5f04f5 A |
1723 | // This is a misnomer: gdb_objc_realized_classes is actually a list of |
1724 | // named classes not in the dyld shared cache, whether realized or not. | |
34d5b5e8 A |
1725 | // This list excludes lazily named classes, which have to be looked up |
1726 | // using a getClass hook. | |
7af964d1 | 1727 | NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h |
1807f628 | 1728 | uintptr_t objc_debug_realized_class_generation_count; |
7af964d1 | 1729 | |
8070259c | 1730 | static Class getClass_impl(const char *name) |
b3962a83 | 1731 | { |
31875a97 | 1732 | runtimeLock.assertLocked(); |
b3962a83 | 1733 | |
8972963c | 1734 | // allocated in _read_images |
1807f628 | 1735 | ASSERT(gdb_objc_realized_classes); |
b3962a83 | 1736 | |
cd5f04f5 | 1737 | // Try runtime-allocated table |
7257e56c | 1738 | Class result = (Class)NXMapGet(gdb_objc_realized_classes, name); |
cd5f04f5 A |
1739 | if (result) return result; |
1740 | ||
1807f628 A |
1741 | // Try table from dyld shared cache. |
1742 | // Note we do this last to handle the case where we dlopen'ed a shared cache | |
1743 | // dylib with duplicates of classes already present in the main executable. | |
1744 | // In that case, we put the class from the main executable in | |
1745 | // gdb_objc_realized_classes and want to check that before considering any | |
1746 | // newly loaded shared cache binaries. | |
cd5f04f5 | 1747 | return getPreoptimizedClass(name); |
7af964d1 | 1748 | } |
b3962a83 | 1749 | |
13ba007e | 1750 | static Class getClassExceptSomeSwift(const char *name) |
8070259c | 1751 | { |
31875a97 | 1752 | runtimeLock.assertLocked(); |
8070259c A |
1753 | |
1754 | // Try name as-is | |
1755 | Class result = getClass_impl(name); | |
1756 | if (result) return result; | |
1757 | ||
1758 | // Try Swift-mangled equivalent of the given name. | |
1759 | if (char *swName = copySwiftV1MangledName(name)) { | |
1760 | result = getClass_impl(swName); | |
1761 | free(swName); | |
1762 | return result; | |
1763 | } | |
1764 | ||
1765 | return nil; | |
1766 | } | |
1767 | ||
7af964d1 A |
1768 | |
1769 | /*********************************************************************** | |
1770 | * addNamedClass | |
1771 | * Adds name => cls to the named non-meta class map. | |
1772 | * Warns about duplicate class names and keeps the old mapping. | |
1773 | * Locking: runtimeLock must be held by the caller | |
1774 | **********************************************************************/ | |
31875a97 | 1775 | static void addNamedClass(Class cls, const char *name, Class replacing = nil) |
7af964d1 | 1776 | { |
66799735 | 1777 | runtimeLock.assertLocked(); |
7257e56c | 1778 | Class old; |
13ba007e | 1779 | if ((old = getClassExceptSomeSwift(name)) && old != replacing) { |
7257e56c | 1780 | inform_duplicate(name, old, cls); |
8070259c | 1781 | |
13ba007e A |
1782 | // getMaybeUnrealizedNonMetaClass uses name lookups. |
1783 | // Classes not found by name lookup must be in the | |
1784 | // secondary meta->nonmeta table. | |
8070259c | 1785 | addNonMetaClass(cls); |
7af964d1 | 1786 | } else { |
cd5f04f5 | 1787 | NXMapInsert(gdb_objc_realized_classes, name, cls); |
7af964d1 | 1788 | } |
1807f628 | 1789 | ASSERT(!(cls->data()->flags & RO_META)); |
7af964d1 A |
1790 | |
1791 | // wrong: constructed classes are already realized when they get here | |
1807f628 | 1792 | // ASSERT(!cls->isRealized()); |
b3962a83 A |
1793 | } |
1794 | ||
1795 | ||
1796 | /*********************************************************************** | |
7af964d1 A |
1797 | * removeNamedClass |
1798 | * Removes cls from the name => cls map. | |
b3962a83 A |
1799 | * Locking: runtimeLock must be held by the caller |
1800 | **********************************************************************/ | |
7257e56c | 1801 | static void removeNamedClass(Class cls, const char *name) |
b3962a83 | 1802 | { |
66799735 | 1803 | runtimeLock.assertLocked(); |
1807f628 | 1804 | ASSERT(!(cls->data()->flags & RO_META)); |
cd5f04f5 A |
1805 | if (cls == NXMapGet(gdb_objc_realized_classes, name)) { |
1806 | NXMapRemove(gdb_objc_realized_classes, name); | |
7af964d1 A |
1807 | } else { |
1808 | // cls has a name collision with another class - don't remove the other | |
8070259c A |
1809 | // but do remove cls from the secondary metaclass->class map. |
1810 | removeNonMetaClass(cls); | |
7af964d1 A |
1811 | } |
1812 | } | |
1813 | ||
1814 | ||
b3962a83 | 1815 | /*********************************************************************** |
cd5f04f5 | 1816 | * futureNamedClasses |
b3962a83 A |
1817 | * Returns the classname => future class map for unrealized future classes. |
1818 | * Locking: runtimeLock must be held by the caller | |
1819 | **********************************************************************/ | |
31875a97 A |
1820 | static NXMapTable *future_named_class_map = nil; |
1821 | static NXMapTable *futureNamedClasses() | |
b3962a83 | 1822 | { |
66799735 | 1823 | runtimeLock.assertLocked(); |
b3962a83 | 1824 | |
cd5f04f5 | 1825 | if (future_named_class_map) return future_named_class_map; |
b3962a83 | 1826 | |
cd5f04f5 A |
1827 | // future_named_class_map is big enough for CF's classes and a few others |
1828 | future_named_class_map = | |
31875a97 | 1829 | NXCreateMapTable(NXStrValueMapPrototype, 32); |
b3962a83 | 1830 | |
cd5f04f5 | 1831 | return future_named_class_map; |
b3962a83 A |
1832 | } |
1833 | ||
1834 | ||
c1e772c4 A |
1835 | static bool haveFutureNamedClasses() { |
1836 | return future_named_class_map && NXCountMapTable(future_named_class_map); | |
1837 | } | |
1838 | ||
1839 | ||
b3962a83 | 1840 | /*********************************************************************** |
cd5f04f5 | 1841 | * addFutureNamedClass |
b3962a83 A |
1842 | * Installs cls as the class structure to use for the named class if it appears. |
1843 | * Locking: runtimeLock must be held by the caller | |
1844 | **********************************************************************/ | |
7257e56c | 1845 | static void addFutureNamedClass(const char *name, Class cls) |
b3962a83 | 1846 | { |
7af964d1 A |
1847 | void *old; |
1848 | ||
66799735 | 1849 | runtimeLock.assertLocked(); |
b3962a83 A |
1850 | |
1851 | if (PrintFuture) { | |
7257e56c | 1852 | _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name); |
b3962a83 A |
1853 | } |
1854 | ||
f192a3e2 | 1855 | class_rw_t *rw = objc::zalloc<class_rw_t>(); |
31875a97 | 1856 | class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1); |
34d5b5e8 | 1857 | ro->name.store(strdupIfMutable(name), std::memory_order_relaxed); |
f192a3e2 | 1858 | rw->set_ro(ro); |
7257e56c | 1859 | cls->setData(rw); |
8972963c | 1860 | cls->data()->flags = RO_FUTURE; |
7af964d1 | 1861 | |
cd5f04f5 | 1862 | old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls); |
1807f628 | 1863 | ASSERT(!old); |
b3962a83 A |
1864 | } |
1865 | ||
1866 | ||
1867 | /*********************************************************************** | |
31875a97 | 1868 | * popFutureNamedClass |
b3962a83 A |
1869 | * Removes the named class from the unrealized future class list, |
1870 | * because it has been realized. | |
31875a97 | 1871 | * Returns nil if the name is not used by a future class. |
b3962a83 A |
1872 | * Locking: runtimeLock must be held by the caller |
1873 | **********************************************************************/ | |
31875a97 | 1874 | static Class popFutureNamedClass(const char *name) |
b3962a83 | 1875 | { |
66799735 | 1876 | runtimeLock.assertLocked(); |
b3962a83 | 1877 | |
31875a97 A |
1878 | Class cls = nil; |
1879 | ||
1880 | if (future_named_class_map) { | |
1881 | cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name); | |
1882 | if (cls && NXCountMapTable(future_named_class_map) == 0) { | |
1883 | NXFreeMapTable(future_named_class_map); | |
1884 | future_named_class_map = nil; | |
1885 | } | |
1886 | } | |
1887 | ||
1888 | return cls; | |
b3962a83 A |
1889 | } |
1890 | ||
1891 | ||
1892 | /*********************************************************************** | |
1893 | * remappedClasses | |
1894 | * Returns the oldClass => newClass map for realized future classes. | |
7257e56c | 1895 | * Returns the oldClass => nil map for ignored weak-linked classes. |
7af964d1 | 1896 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 1897 | **********************************************************************/ |
1807f628 | 1898 | static objc::DenseMap<Class, Class> *remappedClasses(bool create) |
b3962a83 | 1899 | { |
1807f628 | 1900 | static objc::LazyInitDenseMap<Class, Class> remapped_class_map; |
b3962a83 | 1901 | |
31875a97 | 1902 | runtimeLock.assertLocked(); |
b3962a83 | 1903 | |
1807f628 A |
1904 | // start big enough to hold CF's classes and a few others |
1905 | return remapped_class_map.get(create, 32); | |
b3962a83 A |
1906 | } |
1907 | ||
1908 | ||
1909 | /*********************************************************************** | |
1910 | * noClassesRemapped | |
1911 | * Returns YES if no classes have been remapped | |
7af964d1 | 1912 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 1913 | **********************************************************************/ |
31875a97 | 1914 | static bool noClassesRemapped(void) |
b3962a83 | 1915 | { |
31875a97 | 1916 | runtimeLock.assertLocked(); |
7af964d1 | 1917 | |
31875a97 A |
1918 | bool result = (remappedClasses(NO) == nil); |
1919 | #if DEBUG | |
1920 | // Catch construction of an empty table, which defeats optimization. | |
1807f628 A |
1921 | auto *map = remappedClasses(NO); |
1922 | if (map) ASSERT(map->size() > 0); | |
31875a97 | 1923 | #endif |
b3962a83 A |
1924 | return result; |
1925 | } | |
1926 | ||
1927 | ||
1928 | /*********************************************************************** | |
1929 | * addRemappedClass | |
1930 | * newcls is a realized future class, replacing oldcls. | |
7257e56c | 1931 | * OR newcls is nil, replacing ignored weak-linked class oldcls. |
7af964d1 | 1932 | * Locking: runtimeLock must be write-locked by the caller |
b3962a83 | 1933 | **********************************************************************/ |
7257e56c | 1934 | static void addRemappedClass(Class oldcls, Class newcls) |
b3962a83 | 1935 | { |
66799735 | 1936 | runtimeLock.assertLocked(); |
b3962a83 A |
1937 | |
1938 | if (PrintFuture) { | |
1939 | _objc_inform("FUTURE: using %p instead of %p for %s", | |
31875a97 | 1940 | (void*)newcls, (void*)oldcls, oldcls->nameForLogging()); |
b3962a83 A |
1941 | } |
1942 | ||
1807f628 A |
1943 | auto result = remappedClasses(YES)->insert({ oldcls, newcls }); |
1944 | #if DEBUG | |
1945 | if (!std::get<1>(result)) { | |
1946 | // An existing mapping was overwritten. This is not allowed | |
1947 | // unless it was to nil. | |
1948 | auto iterator = std::get<0>(result); | |
1949 | auto value = std::get<1>(*iterator); | |
1950 | ASSERT(value == nil); | |
1951 | } | |
1952 | #else | |
1953 | (void)result; | |
1954 | #endif | |
b3962a83 A |
1955 | } |
1956 | ||
1957 | ||
1958 | /*********************************************************************** | |
1959 | * remapClass | |
1960 | * Returns the live class pointer for cls, which may be pointing to | |
1961 | * a class struct that has been reallocated. | |
7257e56c | 1962 | * Returns nil if cls is ignored because of weak linking. |
7af964d1 | 1963 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 1964 | **********************************************************************/ |
7257e56c | 1965 | static Class remapClass(Class cls) |
b3962a83 | 1966 | { |
31875a97 | 1967 | runtimeLock.assertLocked(); |
7af964d1 | 1968 | |
7257e56c | 1969 | if (!cls) return nil; |
ee974f79 | 1970 | |
1807f628 A |
1971 | auto *map = remappedClasses(NO); |
1972 | if (!map) | |
ee974f79 | 1973 | return cls; |
1807f628 A |
1974 | |
1975 | auto iterator = map->find(cls); | |
1976 | if (iterator == map->end()) | |
1977 | return cls; | |
1978 | return std::get<1>(*iterator); | |
b3962a83 A |
1979 | } |
1980 | ||
7257e56c | 1981 | static Class remapClass(classref_t cls) |
cd5f04f5 | 1982 | { |
7257e56c | 1983 | return remapClass((Class)cls); |
cd5f04f5 A |
1984 | } |
1985 | ||
7257e56c | 1986 | Class _class_remap(Class cls) |
cd5f04f5 | 1987 | { |
66799735 | 1988 | mutex_locker_t lock(runtimeLock); |
31875a97 | 1989 | return remapClass(cls); |
cd5f04f5 | 1990 | } |
b3962a83 A |
1991 | |
1992 | /*********************************************************************** | |
1993 | * remapClassRef | |
ee974f79 A |
1994 | * Fix up a class ref, in case the class referenced has been reallocated |
1995 | * or is an ignored weak-linked class. | |
7af964d1 | 1996 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 1997 | **********************************************************************/ |
7257e56c | 1998 | static void remapClassRef(Class *clsref) |
b3962a83 | 1999 | { |
31875a97 | 2000 | runtimeLock.assertLocked(); |
7af964d1 | 2001 | |
7257e56c | 2002 | Class newcls = remapClass(*clsref); |
b3962a83 A |
2003 | if (*clsref != newcls) *clsref = newcls; |
2004 | } | |
2005 | ||
2006 | ||
1807f628 A |
2007 | _Nullable Class |
2008 | objc_loadClassref(_Nullable Class * _Nonnull clsref) | |
2009 | { | |
2010 | auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref); | |
2011 | ||
2012 | uintptr_t cls = atomicClsref->load(std::memory_order_relaxed); | |
2013 | if (fastpath((cls & 1) == 0)) | |
2014 | return (Class)cls; | |
2015 | ||
2016 | auto stub = (stub_class_t *)(cls & ~1ULL); | |
2017 | Class initialized = stub->initializer((Class)stub, nil); | |
2018 | atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed); | |
2019 | return initialized; | |
2020 | } | |
2021 | ||
2022 | ||
cd5f04f5 | 2023 | /*********************************************************************** |
13ba007e | 2024 | * getMaybeUnrealizedNonMetaClass |
cd5f04f5 A |
2025 | * Return the ordinary class for this class or metaclass. |
2026 | * `inst` is an instance of `cls` or a subclass thereof, or nil. | |
2027 | * Non-nil inst is faster. | |
13ba007e | 2028 | * The result may be unrealized. |
cd5f04f5 A |
2029 | * Used by +initialize. |
2030 | * Locking: runtimeLock must be read- or write-locked by the caller | |
2031 | **********************************************************************/ | |
13ba007e | 2032 | static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst) |
cd5f04f5 | 2033 | { |
1807f628 | 2034 | static int total, named, secondary, sharedcache, dyld3; |
31875a97 | 2035 | runtimeLock.assertLocked(); |
1807f628 | 2036 | ASSERT(metacls->isRealized()); |
cd5f04f5 A |
2037 | |
2038 | total++; | |
2039 | ||
2040 | // return cls itself if it's already a non-meta class | |
7257e56c | 2041 | if (!metacls->isMetaClass()) return metacls; |
cd5f04f5 A |
2042 | |
2043 | // metacls really is a metaclass | |
13ba007e | 2044 | // which means inst (if any) is a class |
cd5f04f5 A |
2045 | |
2046 | // special case for root metaclass | |
7257e56c A |
2047 | // where inst == inst->ISA() == metacls is possible |
2048 | if (metacls->ISA() == metacls) { | |
34d5b5e8 | 2049 | Class cls = metacls->getSuperclass(); |
1807f628 A |
2050 | ASSERT(cls->isRealized()); |
2051 | ASSERT(!cls->isMetaClass()); | |
2052 | ASSERT(cls->ISA() == metacls); | |
7257e56c | 2053 | if (cls->ISA() == metacls) return cls; |
cd5f04f5 A |
2054 | } |
2055 | ||
2056 | // use inst if available | |
2057 | if (inst) { | |
13ba007e | 2058 | Class cls = remapClass((Class)inst); |
cd5f04f5 | 2059 | // cls may be a subclass - find the real class for metacls |
13ba007e A |
2060 | // fixme this probably stops working once Swift starts |
2061 | // reallocating classes if cls is unrealized. | |
2062 | while (cls) { | |
2063 | if (cls->ISA() == metacls) { | |
1807f628 | 2064 | ASSERT(!cls->isMetaClassMaybeUnrealized()); |
13ba007e A |
2065 | return cls; |
2066 | } | |
34d5b5e8 | 2067 | cls = cls->getSuperclass(); |
cd5f04f5 | 2068 | } |
31875a97 | 2069 | #if DEBUG |
cd5f04f5 A |
2070 | _objc_fatal("cls is not an instance of metacls"); |
2071 | #else | |
2072 | // release build: be forgiving and fall through to slow lookups | |
2073 | #endif | |
2074 | } | |
2075 | ||
34d5b5e8 A |
2076 | // See if the metaclass has a pointer to its nonmetaclass. |
2077 | if (Class cls = metacls->bits.safe_ro()->getNonMetaclass()) | |
2078 | return cls; | |
2079 | ||
8070259c A |
2080 | // try name lookup |
2081 | { | |
13ba007e | 2082 | Class cls = getClassExceptSomeSwift(metacls->mangledName()); |
1807f628 | 2083 | if (cls && cls->ISA() == metacls) { |
8070259c A |
2084 | named++; |
2085 | if (PrintInitializing) { | |
2086 | _objc_inform("INITIALIZE: %d/%d (%g%%) " | |
2087 | "successful by-name metaclass lookups", | |
2088 | named, total, named*100.0/total); | |
2089 | } | |
8070259c A |
2090 | return cls; |
2091 | } | |
cd5f04f5 A |
2092 | } |
2093 | ||
8070259c A |
2094 | // try secondary table |
2095 | { | |
2096 | Class cls = (Class)NXMapGet(nonMetaClasses(), metacls); | |
2097 | if (cls) { | |
2098 | secondary++; | |
2099 | if (PrintInitializing) { | |
2100 | _objc_inform("INITIALIZE: %d/%d (%g%%) " | |
2101 | "successful secondary metaclass lookups", | |
2102 | secondary, total, secondary*100.0/total); | |
2103 | } | |
2104 | ||
1807f628 A |
2105 | ASSERT(cls->ISA() == metacls); |
2106 | return cls; | |
2107 | } | |
2108 | } | |
2109 | ||
2110 | // try the dyld closure table | |
2111 | if (isPreoptimized()) | |
2112 | { | |
2113 | // Try table from dyld closure first. It was built to ignore the dupes it | |
2114 | // knows will come from the cache, so anything left in here was there when | |
2115 | // we launched | |
2116 | Class cls = nil; | |
2117 | // Note, we have to pass the lambda directly here as otherwise we would try | |
2118 | // message copy and autorelease. | |
2119 | _dyld_for_each_objc_class(metacls->mangledName(), | |
2120 | [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) { | |
2121 | // Skip images which aren't loaded. This supports the case where dyld | |
2122 | // might soft link an image from the main binary so its possibly not | |
2123 | // loaded yet. | |
2124 | if (!isLoaded) | |
2125 | return; | |
2126 | ||
2127 | // Found a loaded image with this class name, so check if its the right one | |
2128 | Class result = (Class)classPtr; | |
2129 | if (result->ISA() == metacls) { | |
2130 | cls = result; | |
2131 | *stop = true; | |
2132 | } | |
2133 | }); | |
2134 | ||
2135 | if (cls) { | |
2136 | dyld3++; | |
2137 | if (PrintInitializing) { | |
2138 | _objc_inform("INITIALIZE: %d/%d (%g%%) " | |
2139 | "successful dyld closure metaclass lookups", | |
2140 | dyld3, total, dyld3*100.0/total); | |
2141 | } | |
2142 | ||
8070259c A |
2143 | return cls; |
2144 | } | |
cd5f04f5 A |
2145 | } |
2146 | ||
8070259c A |
2147 | // try any duplicates in the dyld shared cache |
2148 | { | |
2149 | Class cls = nil; | |
2150 | ||
2151 | int count; | |
2152 | Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count); | |
2153 | if (classes) { | |
2154 | for (int i = 0; i < count; i++) { | |
2155 | if (classes[i]->ISA() == metacls) { | |
2156 | cls = classes[i]; | |
2157 | break; | |
2158 | } | |
cd5f04f5 | 2159 | } |
8070259c A |
2160 | free(classes); |
2161 | } | |
2162 | ||
2163 | if (cls) { | |
2164 | sharedcache++; | |
2165 | if (PrintInitializing) { | |
2166 | _objc_inform("INITIALIZE: %d/%d (%g%%) " | |
2167 | "successful shared cache metaclass lookups", | |
2168 | sharedcache, total, sharedcache*100.0/total); | |
2169 | } | |
2170 | ||
8070259c | 2171 | return cls; |
cd5f04f5 A |
2172 | } |
2173 | } | |
2174 | ||
7257e56c | 2175 | _objc_fatal("no class for metaclass %p", (void*)metacls); |
cd5f04f5 A |
2176 | } |
2177 | ||
2178 | ||
2179 | /*********************************************************************** | |
13ba007e A |
2180 | * class_initialize. Send the '+initialize' message on demand to any |
2181 | * uninitialized class. Force initialization of superclasses first. | |
2182 | * inst is an instance of cls, or nil. Non-nil is better for performance. | |
2183 | * Returns the class pointer. If the class was unrealized then | |
2184 | * it may be reallocated. | |
2185 | * Locking: | |
2186 | * runtimeLock must be held by the caller | |
2187 | * This function may drop the lock. | |
2188 | * On exit the lock is re-acquired or dropped as requested by leaveLocked. | |
cd5f04f5 | 2189 | **********************************************************************/ |
13ba007e A |
2190 | static Class initializeAndMaybeRelock(Class cls, id inst, |
2191 | mutex_t& lock, bool leaveLocked) | |
cd5f04f5 | 2192 | { |
13ba007e | 2193 | lock.assertLocked(); |
1807f628 | 2194 | ASSERT(cls->isRealized()); |
13ba007e A |
2195 | |
2196 | if (cls->isInitialized()) { | |
2197 | if (!leaveLocked) lock.unlock(); | |
2198 | return cls; | |
2199 | } | |
2200 | ||
2201 | // Find the non-meta class for cls, if it is not already one. | |
2202 | // The +initialize message is sent to the non-meta class object. | |
2203 | Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst); | |
2204 | ||
2205 | // Realize the non-meta class if necessary. | |
2206 | if (nonmeta->isRealized()) { | |
2207 | // nonmeta is cls, which was already realized | |
2208 | // OR nonmeta is distinct, but is already realized | |
2209 | // - nothing else to do | |
2210 | lock.unlock(); | |
2211 | } else { | |
2212 | nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock); | |
2213 | // runtimeLock is now unlocked | |
2214 | // fixme Swift can't relocate the class today, | |
2215 | // but someday it will: | |
2216 | cls = object_getClass(nonmeta); | |
2217 | } | |
2218 | ||
2219 | // runtimeLock is now unlocked, for +initialize dispatch | |
1807f628 | 2220 | ASSERT(nonmeta->isRealized()); |
13ba007e A |
2221 | initializeNonMetaClass(nonmeta); |
2222 | ||
2223 | if (leaveLocked) runtimeLock.lock(); | |
7257e56c | 2224 | return cls; |
cd5f04f5 A |
2225 | } |
2226 | ||
13ba007e A |
2227 | // Locking: acquires runtimeLock |
2228 | Class class_initialize(Class cls, id obj) | |
2229 | { | |
2230 | runtimeLock.lock(); | |
2231 | return initializeAndMaybeRelock(cls, obj, runtimeLock, false); | |
2232 | } | |
2233 | ||
2234 | // Locking: caller must hold runtimeLock; this may drop and re-acquire it | |
2235 | static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock) | |
2236 | { | |
2237 | return initializeAndMaybeRelock(cls, obj, lock, true); | |
2238 | } | |
2239 | ||
cd5f04f5 | 2240 | |
c1e772c4 A |
2241 | /*********************************************************************** |
2242 | * addRootClass | |
2243 | * Adds cls as a new realized root class. | |
2244 | * Locking: runtimeLock must be held by the caller. | |
2245 | **********************************************************************/ | |
c1e772c4 A |
2246 | static void addRootClass(Class cls) |
2247 | { | |
66799735 | 2248 | runtimeLock.assertLocked(); |
c1e772c4 | 2249 | |
1807f628 A |
2250 | ASSERT(cls->isRealized()); |
2251 | ||
2252 | objc_debug_realized_class_generation_count++; | |
2253 | ||
c1e772c4 A |
2254 | cls->data()->nextSiblingClass = _firstRealizedClass; |
2255 | _firstRealizedClass = cls; | |
2256 | } | |
2257 | ||
2258 | static void removeRootClass(Class cls) | |
2259 | { | |
66799735 | 2260 | runtimeLock.assertLocked(); |
c1e772c4 | 2261 | |
1807f628 A |
2262 | objc_debug_realized_class_generation_count++; |
2263 | ||
c1e772c4 A |
2264 | Class *classp; |
2265 | for (classp = &_firstRealizedClass; | |
2266 | *classp != cls; | |
2267 | classp = &(*classp)->data()->nextSiblingClass) | |
2268 | { } | |
2269 | ||
2270 | *classp = (*classp)->data()->nextSiblingClass; | |
2271 | } | |
2272 | ||
2273 | ||
b3962a83 A |
2274 | /*********************************************************************** |
2275 | * addSubclass | |
2276 | * Adds subcls as a subclass of supercls. | |
2277 | * Locking: runtimeLock must be held by the caller. | |
2278 | **********************************************************************/ | |
7257e56c | 2279 | static void addSubclass(Class supercls, Class subcls) |
b3962a83 | 2280 | { |
66799735 | 2281 | runtimeLock.assertLocked(); |
b3962a83 A |
2282 | |
2283 | if (supercls && subcls) { | |
1807f628 A |
2284 | ASSERT(supercls->isRealized()); |
2285 | ASSERT(subcls->isRealized()); | |
2286 | ||
2287 | objc_debug_realized_class_generation_count++; | |
2288 | ||
8972963c A |
2289 | subcls->data()->nextSiblingClass = supercls->data()->firstSubclass; |
2290 | supercls->data()->firstSubclass = subcls; | |
2291 | ||
8070259c A |
2292 | if (supercls->hasCxxCtor()) { |
2293 | subcls->setHasCxxCtor(); | |
7257e56c A |
2294 | } |
2295 | ||
8070259c A |
2296 | if (supercls->hasCxxDtor()) { |
2297 | subcls->setHasCxxDtor(); | |
8972963c A |
2298 | } |
2299 | ||
1807f628 A |
2300 | objc::AWZScanner::scanAddedSubClass(subcls, supercls); |
2301 | objc::RRScanner::scanAddedSubClass(subcls, supercls); | |
2302 | objc::CoreScanner::scanAddedSubClass(subcls, supercls); | |
8070259c | 2303 | |
34d5b5e8 A |
2304 | if (!supercls->allowsPreoptCaches()) { |
2305 | subcls->setDisallowPreoptCachesRecursively(__func__); | |
2306 | } else if (!supercls->allowsPreoptInlinedSels()) { | |
2307 | subcls->setDisallowPreoptInlinedSelsRecursively(__func__); | |
2308 | } | |
2309 | ||
1807f628 | 2310 | // Special case: instancesRequireRawIsa does not propagate |
c1e772c4 | 2311 | // from root class to root metaclass |
34d5b5e8 | 2312 | if (supercls->instancesRequireRawIsa() && supercls->getSuperclass()) { |
1807f628 | 2313 | subcls->setInstancesRequireRawIsaRecursively(true); |
8070259c | 2314 | } |
b3962a83 A |
2315 | } |
2316 | } | |
2317 | ||
2318 | ||
2319 | /*********************************************************************** | |
2320 | * removeSubclass | |
2321 | * Removes subcls as a subclass of supercls. | |
2322 | * Locking: runtimeLock must be held by the caller. | |
2323 | **********************************************************************/ | |
7257e56c | 2324 | static void removeSubclass(Class supercls, Class subcls) |
b3962a83 | 2325 | { |
66799735 | 2326 | runtimeLock.assertLocked(); |
1807f628 A |
2327 | ASSERT(supercls->isRealized()); |
2328 | ASSERT(subcls->isRealized()); | |
34d5b5e8 | 2329 | ASSERT(subcls->getSuperclass() == supercls); |
b3962a83 | 2330 | |
1807f628 A |
2331 | objc_debug_realized_class_generation_count++; |
2332 | ||
7257e56c | 2333 | Class *cp; |
8972963c | 2334 | for (cp = &supercls->data()->firstSubclass; |
b3962a83 | 2335 | *cp && *cp != subcls; |
8972963c | 2336 | cp = &(*cp)->data()->nextSiblingClass) |
b3962a83 | 2337 | ; |
1807f628 | 2338 | ASSERT(*cp == subcls); |
8972963c | 2339 | *cp = subcls->data()->nextSiblingClass; |
b3962a83 A |
2340 | } |
2341 | ||
2342 | ||
2343 | ||
2344 | /*********************************************************************** | |
2345 | * protocols | |
2346 | * Returns the protocol name => protocol map for protocols. | |
7af964d1 | 2347 | * Locking: runtimeLock must read- or write-locked by the caller |
b3962a83 A |
2348 | **********************************************************************/ |
2349 | static NXMapTable *protocols(void) | |
2350 | { | |
7257e56c | 2351 | static NXMapTable *protocol_map = nil; |
b3962a83 | 2352 | |
31875a97 | 2353 | runtimeLock.assertLocked(); |
b3962a83 | 2354 | |
7af964d1 | 2355 | INIT_ONCE_PTR(protocol_map, |
31875a97 | 2356 | NXCreateMapTable(NXStrValueMapPrototype, 16), |
7af964d1 | 2357 | NXFreeMapTable(v) ); |
b3962a83 A |
2358 | |
2359 | return protocol_map; | |
2360 | } | |
2361 | ||
2362 | ||
8070259c A |
2363 | /*********************************************************************** |
2364 | * getProtocol | |
2365 | * Looks up a protocol by name. Demangled Swift names are recognized. | |
2366 | * Locking: runtimeLock must be read- or write-locked by the caller. | |
2367 | **********************************************************************/ | |
1807f628 | 2368 | static NEVER_INLINE Protocol *getProtocol(const char *name) |
8070259c | 2369 | { |
31875a97 | 2370 | runtimeLock.assertLocked(); |
8070259c A |
2371 | |
2372 | // Try name as-is. | |
31875a97 | 2373 | Protocol *result = (Protocol *)NXMapGet(protocols(), name); |
8070259c A |
2374 | if (result) return result; |
2375 | ||
34d5b5e8 A |
2376 | // Try table from dyld3 closure and dyld shared cache |
2377 | result = getPreoptimizedProtocol(name); | |
2378 | if (result) return result; | |
2379 | ||
8070259c A |
2380 | // Try Swift-mangled equivalent of the given name. |
2381 | if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) { | |
31875a97 | 2382 | result = (Protocol *)NXMapGet(protocols(), swName); |
34d5b5e8 A |
2383 | |
2384 | // Try table from dyld3 closure and dyld shared cache | |
2385 | if (!result) | |
2386 | result = getPreoptimizedProtocol(swName); | |
2387 | ||
8070259c | 2388 | free(swName); |
34d5b5e8 | 2389 | return result; |
1807f628 A |
2390 | } |
2391 | ||
34d5b5e8 | 2392 | return nullptr; |
8070259c A |
2393 | } |
2394 | ||
2395 | ||
b3962a83 A |
2396 | /*********************************************************************** |
2397 | * remapProtocol | |
2398 | * Returns the live protocol pointer for proto, which may be pointing to | |
2399 | * a protocol struct that has been reallocated. | |
7af964d1 | 2400 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 2401 | **********************************************************************/ |
1807f628 | 2402 | static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto) |
b3962a83 | 2403 | { |
31875a97 | 2404 | runtimeLock.assertLocked(); |
7af964d1 | 2405 | |
1807f628 A |
2406 | // Protocols in shared cache images have a canonical bit to mark that they |
2407 | // are the definition we should use | |
2408 | if (((protocol_t *)proto)->isCanonical()) | |
2409 | return (protocol_t *)proto; | |
2410 | ||
8972963c | 2411 | protocol_t *newproto = (protocol_t *) |
8070259c | 2412 | getProtocol(((protocol_t *)proto)->mangledName); |
7af964d1 | 2413 | return newproto ? newproto : (protocol_t *)proto; |
b3962a83 A |
2414 | } |
2415 | ||
2416 | ||
2417 | /*********************************************************************** | |
2418 | * remapProtocolRef | |
2419 | * Fix up a protocol ref, in case the protocol referenced has been reallocated. | |
7af964d1 | 2420 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 2421 | **********************************************************************/ |
31875a97 | 2422 | static size_t UnfixedProtocolReferences; |
b3962a83 A |
2423 | static void remapProtocolRef(protocol_t **protoref) |
2424 | { | |
31875a97 | 2425 | runtimeLock.assertLocked(); |
7af964d1 A |
2426 | |
2427 | protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref); | |
31875a97 A |
2428 | if (*protoref != newproto) { |
2429 | *protoref = newproto; | |
2430 | UnfixedProtocolReferences++; | |
2431 | } | |
b3962a83 A |
2432 | } |
2433 | ||
2434 | ||
2435 | /*********************************************************************** | |
2436 | * moveIvars | |
2437 | * Slides a class's ivars to accommodate the given superclass size. | |
b3962a83 A |
2438 | * Ivars are NOT compacted to compensate for a superclass that shrunk. |
2439 | * Locking: runtimeLock must be held by the caller. | |
2440 | **********************************************************************/ | |
c1e772c4 | 2441 | static void moveIvars(class_ro_t *ro, uint32_t superSize) |
b3962a83 | 2442 | { |
66799735 | 2443 | runtimeLock.assertLocked(); |
b3962a83 A |
2444 | |
2445 | uint32_t diff; | |
b3962a83 | 2446 | |
1807f628 | 2447 | ASSERT(superSize > ro->instanceStart); |
b3962a83 | 2448 | diff = superSize - ro->instanceStart; |
b3962a83 A |
2449 | |
2450 | if (ro->ivars) { | |
7af964d1 A |
2451 | // Find maximum alignment in this class's ivars |
2452 | uint32_t maxAlignment = 1; | |
31875a97 A |
2453 | for (const auto& ivar : *ro->ivars) { |
2454 | if (!ivar.offset) continue; // anonymous bitfield | |
7af964d1 | 2455 | |
31875a97 | 2456 | uint32_t alignment = ivar.alignment(); |
7af964d1 A |
2457 | if (alignment > maxAlignment) maxAlignment = alignment; |
2458 | } | |
2459 | ||
2460 | // Compute a slide value that preserves that alignment | |
2461 | uint32_t alignMask = maxAlignment - 1; | |
c1e772c4 | 2462 | diff = (diff + alignMask) & ~alignMask; |
7af964d1 A |
2463 | |
2464 | // Slide all of this class's ivars en masse | |
31875a97 A |
2465 | for (const auto& ivar : *ro->ivars) { |
2466 | if (!ivar.offset) continue; // anonymous bitfield | |
7af964d1 | 2467 | |
31875a97 | 2468 | uint32_t oldOffset = (uint32_t)*ivar.offset; |
b3962a83 | 2469 | uint32_t newOffset = oldOffset + diff; |
31875a97 | 2470 | *ivar.offset = newOffset; |
b3962a83 A |
2471 | |
2472 | if (PrintIvars) { | |
31875a97 A |
2473 | _objc_inform("IVARS: offset %u -> %u for %s " |
2474 | "(size %u, align %u)", | |
2475 | oldOffset, newOffset, ivar.name, | |
2476 | ivar.size, ivar.alignment()); | |
b3962a83 A |
2477 | } |
2478 | } | |
2479 | } | |
2480 | ||
7af964d1 A |
2481 | *(uint32_t *)&ro->instanceStart += diff; |
2482 | *(uint32_t *)&ro->instanceSize += diff; | |
b3962a83 A |
2483 | } |
2484 | ||
7257e56c | 2485 | |
8070259c | 2486 | static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro) |
7257e56c | 2487 | { |
8972963c | 2488 | class_rw_t *rw = cls->data(); |
7257e56c | 2489 | |
1807f628 A |
2490 | ASSERT(supercls); |
2491 | ASSERT(!cls->isMetaClass()); | |
7257e56c | 2492 | |
8070259c A |
2493 | /* debug: print them all before sliding |
2494 | if (ro->ivars) { | |
31875a97 A |
2495 | for (const auto& ivar : *ro->ivars) { |
2496 | if (!ivar.offset) continue; // anonymous bitfield | |
8070259c A |
2497 | |
2498 | _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)", | |
31875a97 A |
2499 | ro->name, ivar.name, |
2500 | *ivar.offset, ivar.size, ivar.alignment()); | |
8070259c A |
2501 | } |
2502 | } | |
2503 | */ | |
2504 | ||
7257e56c | 2505 | // Non-fragile ivars - reconcile this class with its superclass |
f192a3e2 | 2506 | const class_ro_t *super_ro = supercls->data()->ro(); |
8972963c | 2507 | |
7257e56c A |
2508 | if (DebugNonFragileIvars) { |
2509 | // Debugging: Force non-fragile ivars to slide. | |
2510 | // Intended to find compiler, runtime, and program bugs. | |
2511 | // If it fails with this and works without, you have a problem. | |
2512 | ||
2513 | // Operation: Reset everything to 0 + misalignment. | |
2514 | // Then force the normal sliding logic to push everything back. | |
2515 | ||
2516 | // Exceptions: root classes, metaclasses, *NSCF* classes, | |
2517 | // __CF* classes, NSConstantString, NSSimpleCString | |
8972963c | 2518 | |
7257e56c | 2519 | // (already know it's not root because supercls != nil) |
8070259c A |
2520 | const char *clsname = cls->mangledName(); |
2521 | if (!strstr(clsname, "NSCF") && | |
2522 | 0 != strncmp(clsname, "__CF", 4) && | |
2523 | 0 != strcmp(clsname, "NSConstantString") && | |
2524 | 0 != strcmp(clsname, "NSSimpleCString")) | |
7257e56c A |
2525 | { |
2526 | uint32_t oldStart = ro->instanceStart; | |
7257e56c | 2527 | class_ro_t *ro_w = make_ro_writeable(rw); |
f192a3e2 | 2528 | ro = rw->ro(); |
8972963c | 2529 | |
7257e56c A |
2530 | // Find max ivar alignment in class. |
2531 | // default to word size to simplify ivar update | |
2532 | uint32_t alignment = 1<<WORD_SHIFT; | |
2533 | if (ro->ivars) { | |
31875a97 A |
2534 | for (const auto& ivar : *ro->ivars) { |
2535 | if (ivar.alignment() > alignment) { | |
2536 | alignment = ivar.alignment(); | |
7257e56c A |
2537 | } |
2538 | } | |
2539 | } | |
2540 | uint32_t misalignment = ro->instanceStart % alignment; | |
2541 | uint32_t delta = ro->instanceStart - misalignment; | |
2542 | ro_w->instanceStart = misalignment; | |
2543 | ro_w->instanceSize -= delta; | |
8972963c | 2544 | |
7257e56c A |
2545 | if (PrintIvars) { |
2546 | _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' " | |
2547 | "to slide (instanceStart %zu -> %zu)", | |
8070259c | 2548 | cls->nameForLogging(), (size_t)oldStart, |
7257e56c A |
2549 | (size_t)ro->instanceStart); |
2550 | } | |
8972963c | 2551 | |
7257e56c | 2552 | if (ro->ivars) { |
31875a97 A |
2553 | for (const auto& ivar : *ro->ivars) { |
2554 | if (!ivar.offset) continue; // anonymous bitfield | |
2555 | *ivar.offset -= delta; | |
8972963c | 2556 | } |
7257e56c | 2557 | } |
8972963c | 2558 | } |
7257e56c A |
2559 | } |
2560 | ||
c1e772c4 A |
2561 | if (ro->instanceStart >= super_ro->instanceSize) { |
2562 | // Superclass has not overgrown its space. We're done here. | |
7257e56c A |
2563 | return; |
2564 | } | |
2565 | // fixme can optimize for "class has no new ivars", etc | |
2566 | ||
7257e56c A |
2567 | if (ro->instanceStart < super_ro->instanceSize) { |
2568 | // Superclass has changed size. This class's ivars must move. | |
2569 | // Also slide layout bits in parallel. | |
2570 | // This code is incapable of compacting the subclass to | |
2571 | // compensate for a superclass that shrunk, so don't do that. | |
2572 | if (PrintIvars) { | |
2573 | _objc_inform("IVARS: sliding ivars for class %s " | |
2574 | "(superclass was %u bytes, now %u)", | |
8070259c | 2575 | cls->nameForLogging(), ro->instanceStart, |
7257e56c | 2576 | super_ro->instanceSize); |
8972963c | 2577 | } |
7257e56c | 2578 | class_ro_t *ro_w = make_ro_writeable(rw); |
f192a3e2 | 2579 | ro = rw->ro(); |
c1e772c4 | 2580 | moveIvars(ro_w, super_ro->instanceSize); |
34d5b5e8 | 2581 | gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->getName()); |
7257e56c | 2582 | } |
8972963c | 2583 | } |
b3962a83 | 2584 | |
34d5b5e8 A |
2585 | static void validateAlreadyRealizedClass(Class cls) { |
2586 | ASSERT(cls->isRealized()); | |
2587 | #if TARGET_OS_OSX | |
2588 | class_rw_t *rw = cls->data(); | |
2589 | size_t rwSize = malloc_size(rw); | |
2590 | ||
2591 | // Note: this check will need some adjustment if class_rw_t's | |
2592 | // size changes to not match the malloc bucket. | |
2593 | if (rwSize != sizeof(class_rw_t)) | |
2594 | _objc_fatal("realized class %p has corrupt data pointer %p", cls, rw); | |
2595 | #endif | |
2596 | } | |
8070259c | 2597 | |
b3962a83 | 2598 | /*********************************************************************** |
13ba007e | 2599 | * realizeClassWithoutSwift |
b3962a83 A |
2600 | * Performs first-time initialization on class cls, |
2601 | * including allocating its read-write data. | |
13ba007e | 2602 | * Does not perform any Swift-side initialization. |
b3962a83 | 2603 | * Returns the real class structure for the class. |
7af964d1 | 2604 | * Locking: runtimeLock must be write-locked by the caller |
b3962a83 | 2605 | **********************************************************************/ |
1807f628 | 2606 | static Class realizeClassWithoutSwift(Class cls, Class previously) |
b3962a83 | 2607 | { |
66799735 | 2608 | runtimeLock.assertLocked(); |
b3962a83 | 2609 | |
b3962a83 | 2610 | class_rw_t *rw; |
7257e56c A |
2611 | Class supercls; |
2612 | Class metacls; | |
b3962a83 | 2613 | |
7257e56c | 2614 | if (!cls) return nil; |
34d5b5e8 A |
2615 | if (cls->isRealized()) { |
2616 | validateAlreadyRealizedClass(cls); | |
2617 | return cls; | |
2618 | } | |
1807f628 | 2619 | ASSERT(cls == remapClass(cls)); |
b3962a83 | 2620 | |
8070259c A |
2621 | // fixme verify class is not in an un-dlopened part of the shared cache? |
2622 | ||
f192a3e2 A |
2623 | auto ro = (const class_ro_t *)cls->data(); |
2624 | auto isMeta = ro->flags & RO_META; | |
7af964d1 A |
2625 | if (ro->flags & RO_FUTURE) { |
2626 | // This was a future class. rw data is already allocated. | |
8972963c | 2627 | rw = cls->data(); |
f192a3e2 A |
2628 | ro = cls->data()->ro(); |
2629 | ASSERT(!isMeta); | |
8070259c | 2630 | cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE); |
7af964d1 A |
2631 | } else { |
2632 | // Normal class. Allocate writeable class data. | |
f192a3e2 A |
2633 | rw = objc::zalloc<class_rw_t>(); |
2634 | rw->set_ro(ro); | |
2635 | rw->flags = RW_REALIZED|RW_REALIZING|isMeta; | |
8972963c | 2636 | cls->setData(rw); |
7af964d1 | 2637 | } |
b3962a83 | 2638 | |
34d5b5e8 A |
2639 | cls->cache.initializeToEmptyOrPreoptimizedInDisguise(); |
2640 | ||
1807f628 A |
2641 | #if FAST_CACHE_META |
2642 | if (isMeta) cls->cache.setBit(FAST_CACHE_META); | |
2643 | #endif | |
c1e772c4 A |
2644 | |
2645 | // Choose an index for this class. | |
2646 | // Sets cls->instancesRequireRawIsa if indexes no more indexes are available | |
2647 | cls->chooseClassArrayIndex(); | |
2648 | ||
b3962a83 | 2649 | if (PrintConnecting) { |
13ba007e | 2650 | _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s", |
c1e772c4 | 2651 | cls->nameForLogging(), isMeta ? " (meta)" : "", |
13ba007e A |
2652 | (void*)cls, ro, cls->classArrayIndex(), |
2653 | cls->isSwiftStable() ? "(swift)" : "", | |
2654 | cls->isSwiftLegacy() ? "(pre-stable swift)" : ""); | |
b3962a83 A |
2655 | } |
2656 | ||
b3962a83 A |
2657 | // Realize superclass and metaclass, if they aren't already. |
2658 | // This needs to be done after RW_REALIZED is set above, for root classes. | |
c1e772c4 | 2659 | // This needs to be done after class index is chosen, for root metaclasses. |
13ba007e A |
2660 | // This assumes that none of those classes have Swift contents, |
2661 | // or that Swift's initializers have already been called. | |
2662 | // fixme that assumption will be wrong if we add support | |
2663 | // for ObjC subclasses of Swift classes. | |
34d5b5e8 | 2664 | supercls = realizeClassWithoutSwift(remapClass(cls->getSuperclass()), nil); |
1807f628 | 2665 | metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil); |
b3962a83 | 2666 | |
c1e772c4 | 2667 | #if SUPPORT_NONPOINTER_ISA |
1807f628 A |
2668 | if (isMeta) { |
2669 | // Metaclasses do not need any features from non pointer ISA | |
2670 | // This allows for a faspath for classes in objc_retain/objc_release. | |
2671 | cls->setInstancesRequireRawIsa(); | |
2672 | } else { | |
2673 | // Disable non-pointer isa for some classes and/or platforms. | |
2674 | // Set instancesRequireRawIsa. | |
2675 | bool instancesRequireRawIsa = cls->instancesRequireRawIsa(); | |
2676 | bool rawIsaIsInherited = false; | |
2677 | static bool hackedDispatch = false; | |
2678 | ||
2679 | if (DisableNonpointerIsa) { | |
2680 | // Non-pointer isa disabled by environment or app SDK version | |
2681 | instancesRequireRawIsa = true; | |
2682 | } | |
34d5b5e8 | 2683 | else if (!hackedDispatch && 0 == strcmp(ro->getName(), "OS_object")) |
1807f628 A |
2684 | { |
2685 | // hack for libdispatch et al - isa also acts as vtable pointer | |
2686 | hackedDispatch = true; | |
2687 | instancesRequireRawIsa = true; | |
2688 | } | |
34d5b5e8 | 2689 | else if (supercls && supercls->getSuperclass() && |
1807f628 A |
2690 | supercls->instancesRequireRawIsa()) |
2691 | { | |
2692 | // This is also propagated by addSubclass() | |
2693 | // but nonpointer isa setup needs it earlier. | |
2694 | // Special case: instancesRequireRawIsa does not propagate | |
2695 | // from root class to root metaclass | |
2696 | instancesRequireRawIsa = true; | |
2697 | rawIsaIsInherited = true; | |
2698 | } | |
2699 | ||
2700 | if (instancesRequireRawIsa) { | |
2701 | cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited); | |
2702 | } | |
c1e772c4 A |
2703 | } |
2704 | // SUPPORT_NONPOINTER_ISA | |
2705 | #endif | |
2706 | ||
8070259c | 2707 | // Update superclass and metaclass in case of remapping |
34d5b5e8 | 2708 | cls->setSuperclass(supercls); |
8070259c | 2709 | cls->initClassIsa(metacls); |
b3962a83 | 2710 | |
8972963c | 2711 | // Reconcile instance variable offsets / layout. |
8070259c A |
2712 | // This may reallocate class_ro_t, updating our ro variable. |
2713 | if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro); | |
2714 | ||
2715 | // Set fastInstanceSize if it wasn't set already. | |
2716 | cls->setInstanceSize(ro->instanceSize); | |
8972963c A |
2717 | |
2718 | // Copy some flags from ro to rw | |
7257e56c | 2719 | if (ro->flags & RO_HAS_CXX_STRUCTORS) { |
8070259c | 2720 | cls->setHasCxxDtor(); |
7257e56c | 2721 | if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) { |
8070259c | 2722 | cls->setHasCxxCtor(); |
7257e56c A |
2723 | } |
2724 | } | |
13ba007e A |
2725 | |
2726 | // Propagate the associated objects forbidden flag from ro or from | |
2727 | // the superclass. | |
2728 | if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) || | |
2729 | (supercls && supercls->forbidsAssociatedObjects())) | |
2730 | { | |
2731 | rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS; | |
2732 | } | |
b3962a83 | 2733 | |
8972963c | 2734 | // Connect this class to its superclass's subclass lists |
b3962a83 | 2735 | if (supercls) { |
8972963c | 2736 | addSubclass(supercls, cls); |
c1e772c4 A |
2737 | } else { |
2738 | addRootClass(cls); | |
8972963c | 2739 | } |
b3962a83 | 2740 | |
8972963c | 2741 | // Attach categories |
1807f628 | 2742 | methodizeClass(cls, previously); |
7af964d1 | 2743 | |
b3962a83 A |
2744 | return cls; |
2745 | } | |
2746 | ||
2747 | ||
13ba007e A |
2748 | /*********************************************************************** |
2749 | * _objc_realizeClassFromSwift | |
2750 | * Called by Swift when it needs the ObjC part of a class to be realized. | |
2751 | * There are four cases: | |
2752 | * 1. cls != nil; previously == cls | |
2753 | * Class cls is being realized in place | |
2754 | * 2. cls != nil; previously == nil | |
2755 | * Class cls is being constructed at runtime | |
2756 | * 3. cls != nil; previously != cls | |
2757 | * The class that was at previously has been reallocated to cls | |
2758 | * 4. cls == nil, previously != nil | |
2759 | * The class at previously is hereby disavowed | |
2760 | * | |
2761 | * Only variants #1 and #2 are supported today. | |
2762 | * | |
2763 | * Locking: acquires runtimeLock | |
2764 | **********************************************************************/ | |
2765 | Class _objc_realizeClassFromSwift(Class cls, void *previously) | |
2766 | { | |
2767 | if (cls) { | |
2768 | if (previously && previously != (void*)cls) { | |
2769 | // #3: relocation | |
1807f628 A |
2770 | mutex_locker_t lock(runtimeLock); |
2771 | addRemappedClass((Class)previously, cls); | |
2772 | addClassTableEntry(cls); | |
2773 | addNamedClass(cls, cls->mangledName(), /*replacing*/nil); | |
2774 | return realizeClassWithoutSwift(cls, (Class)previously); | |
13ba007e A |
2775 | } else { |
2776 | // #1 and #2: realization in place, or new class | |
2777 | mutex_locker_t lock(runtimeLock); | |
2778 | ||
2779 | if (!previously) { | |
2780 | // #2: new class | |
2781 | cls = readClass(cls, false/*bundle*/, false/*shared cache*/); | |
2782 | } | |
2783 | ||
2784 | // #1 and #2: realization in place, or new class | |
2785 | // We ignore the Swift metadata initializer callback. | |
2786 | // We assume that's all handled since we're being called from Swift. | |
1807f628 | 2787 | return realizeClassWithoutSwift(cls, nil); |
13ba007e A |
2788 | } |
2789 | } | |
2790 | else { | |
2791 | // #4: disavowal | |
2792 | // In the future this will mean remapping the old address to nil | |
2793 | // and if necessary removing the old address from any other tables. | |
2794 | _objc_fatal("Swift requested that class %p be ignored, " | |
2795 | "but libobjc does not support that.", previously); | |
2796 | } | |
2797 | } | |
2798 | ||
2799 | /*********************************************************************** | |
2800 | * realizeSwiftClass | |
2801 | * Performs first-time initialization on class cls, | |
2802 | * including allocating its read-write data, | |
2803 | * and any Swift-side initialization. | |
2804 | * Returns the real class structure for the class. | |
2805 | * Locking: acquires runtimeLock indirectly | |
2806 | **********************************************************************/ | |
2807 | static Class realizeSwiftClass(Class cls) | |
2808 | { | |
2809 | runtimeLock.assertUnlocked(); | |
2810 | ||
2811 | // Some assumptions: | |
2812 | // * Metaclasses never have a Swift initializer. | |
2813 | // * Root classes never have a Swift initializer. | |
2814 | // (These two together avoid initialization order problems at the root.) | |
2815 | // * Unrealized non-Swift classes have no Swift ancestry. | |
2816 | // * Unrealized Swift classes with no initializer have no ancestry that | |
2817 | // does have the initializer. | |
2818 | // (These two together mean we don't need to scan superclasses here | |
2819 | // and we don't need to worry about Swift superclasses inside | |
2820 | // realizeClassWithoutSwift()). | |
2821 | ||
2822 | // fixme some of these assumptions will be wrong | |
2823 | // if we add support for ObjC sublasses of Swift classes. | |
2824 | ||
2825 | #if DEBUG | |
2826 | runtimeLock.lock(); | |
1807f628 A |
2827 | ASSERT(remapClass(cls) == cls); |
2828 | ASSERT(cls->isSwiftStable_ButAllowLegacyForNow()); | |
2829 | ASSERT(!cls->isMetaClassMaybeUnrealized()); | |
34d5b5e8 | 2830 | ASSERT(cls->getSuperclass()); |
13ba007e A |
2831 | runtimeLock.unlock(); |
2832 | #endif | |
2833 | ||
2834 | // Look for a Swift metadata initialization function | |
2835 | // installed on the class. If it is present we call it. | |
2836 | // That function in turn initializes the Swift metadata, | |
2837 | // prepares the "compiler-generated" ObjC metadata if not | |
2838 | // already present, and calls _objc_realizeSwiftClass() to finish | |
2839 | // our own initialization. | |
2840 | ||
2841 | if (auto init = cls->swiftMetadataInitializer()) { | |
2842 | if (PrintConnecting) { | |
2843 | _objc_inform("CLASS: calling Swift metadata initializer " | |
2844 | "for class '%s' (%p)", cls->nameForLogging(), cls); | |
2845 | } | |
2846 | ||
2847 | Class newcls = init(cls, nil); | |
2848 | ||
2849 | // fixme someday Swift will need to relocate classes at this point, | |
2850 | // but we don't accept that yet. | |
2851 | if (cls != newcls) { | |
1807f628 A |
2852 | mutex_locker_t lock(runtimeLock); |
2853 | addRemappedClass(cls, newcls); | |
13ba007e A |
2854 | } |
2855 | ||
2856 | return newcls; | |
2857 | } | |
2858 | else { | |
2859 | // No Swift-side initialization callback. | |
2860 | // Perform our own realization directly. | |
2861 | mutex_locker_t lock(runtimeLock); | |
1807f628 | 2862 | return realizeClassWithoutSwift(cls, nil); |
13ba007e A |
2863 | } |
2864 | } | |
2865 | ||
2866 | ||
2867 | /*********************************************************************** | |
2868 | * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked) | |
2869 | * Realize a class that might be a Swift class. | |
2870 | * Returns the real class structure for the class. | |
2871 | * Locking: | |
2872 | * runtimeLock must be held on entry | |
2873 | * runtimeLock may be dropped during execution | |
2874 | * ...AndUnlock function leaves runtimeLock unlocked on exit | |
2875 | * ...AndLeaveLocked re-acquires runtimeLock if it was dropped | |
2876 | * This complication avoids repeated lock transitions in some cases. | |
2877 | **********************************************************************/ | |
2878 | static Class | |
2879 | realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked) | |
2880 | { | |
2881 | lock.assertLocked(); | |
2882 | ||
2883 | if (!cls->isSwiftStable_ButAllowLegacyForNow()) { | |
2884 | // Non-Swift class. Realize it now with the lock still held. | |
2885 | // fixme wrong in the future for objc subclasses of swift classes | |
1807f628 | 2886 | realizeClassWithoutSwift(cls, nil); |
13ba007e A |
2887 | if (!leaveLocked) lock.unlock(); |
2888 | } else { | |
2889 | // Swift class. We need to drop locks and call the Swift | |
2890 | // runtime to initialize it. | |
2891 | lock.unlock(); | |
2892 | cls = realizeSwiftClass(cls); | |
1807f628 | 2893 | ASSERT(cls->isRealized()); // callback must have provoked realization |
13ba007e A |
2894 | if (leaveLocked) lock.lock(); |
2895 | } | |
2896 | ||
2897 | return cls; | |
2898 | } | |
2899 | ||
2900 | static Class | |
2901 | realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock) | |
2902 | { | |
2903 | return realizeClassMaybeSwiftMaybeRelock(cls, lock, false); | |
2904 | } | |
2905 | ||
2906 | static Class | |
2907 | realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock) | |
2908 | { | |
2909 | return realizeClassMaybeSwiftMaybeRelock(cls, lock, true); | |
2910 | } | |
2911 | ||
2912 | ||
ee974f79 A |
2913 | /*********************************************************************** |
2914 | * missingWeakSuperclass | |
2915 | * Return YES if some superclass of cls was weak-linked and is missing. | |
2916 | **********************************************************************/ | |
31875a97 | 2917 | static bool |
7257e56c | 2918 | missingWeakSuperclass(Class cls) |
ee974f79 | 2919 | { |
1807f628 | 2920 | ASSERT(!cls->isRealized()); |
ee974f79 | 2921 | |
34d5b5e8 | 2922 | if (!cls->getSuperclass()) { |
7257e56c | 2923 | // superclass nil. This is normal for root classes only. |
8972963c | 2924 | return (!(cls->data()->flags & RO_ROOT)); |
ee974f79 | 2925 | } else { |
7257e56c | 2926 | // superclass not nil. Check if a higher superclass is missing. |
34d5b5e8 A |
2927 | Class supercls = remapClass(cls->getSuperclass()); |
2928 | ASSERT(cls != cls->getSuperclass()); | |
1807f628 | 2929 | ASSERT(cls != supercls); |
ee974f79 | 2930 | if (!supercls) return YES; |
7257e56c | 2931 | if (supercls->isRealized()) return NO; |
ee974f79 A |
2932 | return missingWeakSuperclass(supercls); |
2933 | } | |
2934 | } | |
2935 | ||
2936 | ||
b3962a83 A |
2937 | /*********************************************************************** |
2938 | * realizeAllClassesInImage | |
2939 | * Non-lazily realizes all unrealized classes in the given image. | |
2940 | * Locking: runtimeLock must be held by the caller. | |
13ba007e | 2941 | * Locking: this function may drop and re-acquire the lock. |
b3962a83 A |
2942 | **********************************************************************/ |
2943 | static void realizeAllClassesInImage(header_info *hi) | |
2944 | { | |
66799735 | 2945 | runtimeLock.assertLocked(); |
b3962a83 A |
2946 | |
2947 | size_t count, i; | |
1807f628 | 2948 | classref_t const *classlist; |
b3962a83 | 2949 | |
c1e772c4 | 2950 | if (hi->areAllClassesRealized()) return; |
b3962a83 A |
2951 | |
2952 | classlist = _getObjc2ClassList(hi, &count); | |
2953 | ||
2954 | for (i = 0; i < count; i++) { | |
13ba007e A |
2955 | Class cls = remapClass(classlist[i]); |
2956 | if (cls) { | |
2957 | realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock); | |
2958 | } | |
b3962a83 A |
2959 | } |
2960 | ||
c1e772c4 | 2961 | hi->setAllClassesRealized(YES); |
b3962a83 A |
2962 | } |
2963 | ||
2964 | ||
2965 | /*********************************************************************** | |
2966 | * realizeAllClasses | |
2967 | * Non-lazily realizes all unrealized classes in all known images. | |
2968 | * Locking: runtimeLock must be held by the caller. | |
13ba007e A |
2969 | * Locking: this function may drop and re-acquire the lock. |
2970 | * Dropping the lock makes this function thread-unsafe with respect | |
2971 | * to concurrent image unload, but the callers of this function | |
2972 | * already ultimately do something that is also thread-unsafe with | |
2973 | * respect to image unload (such as using the list of all classes). | |
b3962a83 A |
2974 | **********************************************************************/ |
2975 | static void realizeAllClasses(void) | |
2976 | { | |
66799735 | 2977 | runtimeLock.assertLocked(); |
b3962a83 A |
2978 | |
2979 | header_info *hi; | |
c1e772c4 | 2980 | for (hi = FirstHeader; hi; hi = hi->getNext()) { |
13ba007e | 2981 | realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock |
b3962a83 A |
2982 | } |
2983 | } | |
2984 | ||
2985 | ||
2986 | /*********************************************************************** | |
2987 | * _objc_allocateFutureClass | |
2988 | * Allocate an unresolved future class for the given class name. | |
2989 | * Returns any existing allocation if one was already made. | |
2990 | * Assumes the named class doesn't exist yet. | |
2991 | * Locking: acquires runtimeLock | |
2992 | **********************************************************************/ | |
cd5f04f5 | 2993 | Class _objc_allocateFutureClass(const char *name) |
b3962a83 | 2994 | { |
66799735 | 2995 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 2996 | |
7257e56c | 2997 | Class cls; |
31875a97 | 2998 | NXMapTable *map = futureNamedClasses(); |
b3962a83 | 2999 | |
31875a97 | 3000 | if ((cls = (Class)NXMapGet(map, name))) { |
b3962a83 | 3001 | // Already have a future class for this name. |
7257e56c | 3002 | return cls; |
b3962a83 A |
3003 | } |
3004 | ||
7257e56c | 3005 | cls = _calloc_class(sizeof(objc_class)); |
cd5f04f5 | 3006 | addFutureNamedClass(name, cls); |
b3962a83 | 3007 | |
7257e56c | 3008 | return cls; |
b3962a83 A |
3009 | } |
3010 | ||
3011 | ||
8070259c A |
3012 | /*********************************************************************** |
3013 | * objc_getFutureClass. Return the id of the named class. | |
3014 | * If the class does not exist, return an uninitialized class | |
3015 | * structure that will be used for the class when and if it | |
3016 | * does get loaded. | |
3017 | * Not thread safe. | |
3018 | **********************************************************************/ | |
3019 | Class objc_getFutureClass(const char *name) | |
3020 | { | |
3021 | Class cls; | |
3022 | ||
3023 | // YES unconnected, NO class handler | |
3024 | // (unconnected is OK because it will someday be the real class) | |
3025 | cls = look_up_class(name, YES, NO); | |
3026 | if (cls) { | |
3027 | if (PrintFuture) { | |
3028 | _objc_inform("FUTURE: found %p already in use for %s", | |
3029 | (void*)cls, name); | |
3030 | } | |
3031 | ||
3032 | return cls; | |
3033 | } | |
3034 | ||
3035 | // No class or future class with that name yet. Make one. | |
3036 | // fixme not thread-safe with respect to | |
3037 | // simultaneous library load or getFutureClass. | |
3038 | return _objc_allocateFutureClass(name); | |
3039 | } | |
3040 | ||
3041 | ||
7257e56c A |
3042 | BOOL _class_isFutureClass(Class cls) |
3043 | { | |
3044 | return cls && cls->isFuture(); | |
3045 | } | |
3046 | ||
34d5b5e8 A |
3047 | BOOL _class_isSwift(Class _Nullable cls) |
3048 | { | |
3049 | return cls && cls->isSwiftStable(); | |
3050 | } | |
7257e56c | 3051 | |
7af964d1 | 3052 | /*********************************************************************** |
7257e56c A |
3053 | * _objc_flush_caches |
3054 | * Flushes all caches. | |
3055 | * (Historical behavior: flush caches for cls, its metaclass, | |
3056 | * and subclasses thereof. Nil flushes all classes.) | |
3057 | * Locking: acquires runtimeLock | |
7af964d1 | 3058 | **********************************************************************/ |
34d5b5e8 | 3059 | static void flushCaches(Class cls, const char *func, bool (^predicate)(Class)) |
b3962a83 | 3060 | { |
66799735 | 3061 | runtimeLock.assertLocked(); |
1807f628 | 3062 | #if CONFIG_USE_CACHE_LOCK |
31875a97 | 3063 | mutex_locker_t lock(cacheUpdateLock); |
1807f628 | 3064 | #endif |
7257e56c | 3065 | |
34d5b5e8 A |
3066 | const auto handler = ^(Class c) { |
3067 | if (predicate(c)) { | |
3068 | c->cache.eraseNolock(func); | |
3069 | } | |
3070 | ||
3071 | return true; | |
3072 | }; | |
3073 | ||
7257e56c | 3074 | if (cls) { |
34d5b5e8 A |
3075 | foreach_realized_class_and_subclass(cls, handler); |
3076 | } else { | |
3077 | foreach_realized_class_and_metaclass(handler); | |
b3962a83 | 3078 | } |
b3962a83 A |
3079 | } |
3080 | ||
3081 | ||
7257e56c | 3082 | void _objc_flush_caches(Class cls) |
7af964d1 | 3083 | { |
31875a97 | 3084 | { |
66799735 | 3085 | mutex_locker_t lock(runtimeLock); |
34d5b5e8 A |
3086 | flushCaches(cls, __func__, [](Class c){ |
3087 | return !c->cache.isConstantOptimizedCache(); | |
3088 | }); | |
3089 | if (cls && !cls->isMetaClass() && !cls->isRootClass()) { | |
3090 | flushCaches(cls->ISA(), __func__, [](Class c){ | |
3091 | return !c->cache.isConstantOptimizedCache(); | |
3092 | }); | |
c1e772c4 A |
3093 | } else { |
3094 | // cls is a root class or root metaclass. Its metaclass is itself | |
3095 | // or a subclass so the metaclass caches were already flushed. | |
3096 | } | |
31875a97 | 3097 | } |
7257e56c A |
3098 | |
3099 | if (!cls) { | |
3100 | // collectALot if cls==nil | |
1807f628 | 3101 | #if CONFIG_USE_CACHE_LOCK |
31875a97 | 3102 | mutex_locker_t lock(cacheUpdateLock); |
1807f628 A |
3103 | #else |
3104 | mutex_locker_t lock(runtimeLock); | |
3105 | #endif | |
34d5b5e8 | 3106 | cache_t::collectNolock(true); |
7257e56c | 3107 | } |
7af964d1 A |
3108 | } |
3109 | ||
3110 | ||
3111 | /*********************************************************************** | |
3112 | * map_images | |
3113 | * Process the given images which are being mapped in by dyld. | |
3114 | * Calls ABI-agnostic code after taking ABI-specific locks. | |
3115 | * | |
3116 | * Locking: write-locks runtimeLock | |
3117 | **********************************************************************/ | |
c1e772c4 | 3118 | void |
bd8dfcfc A |
3119 | map_images(unsigned count, const char * const paths[], |
3120 | const struct mach_header * const mhdrs[]) | |
7af964d1 | 3121 | { |
66799735 | 3122 | mutex_locker_t lock(runtimeLock); |
c1e772c4 | 3123 | return map_images_nolock(count, paths, mhdrs); |
7af964d1 A |
3124 | } |
3125 | ||
3126 | ||
f192a3e2 A |
3127 | static void load_categories_nolock(header_info *hi) { |
3128 | bool hasClassProperties = hi->info()->hasCategoryClassProperties(); | |
3129 | ||
3130 | size_t count; | |
3131 | auto processCatlist = [&](category_t * const *catlist) { | |
3132 | for (unsigned i = 0; i < count; i++) { | |
3133 | category_t *cat = catlist[i]; | |
3134 | Class cls = remapClass(cat->cls); | |
3135 | locstamped_category_t lc{cat, hi}; | |
3136 | ||
3137 | if (!cls) { | |
3138 | // Category's target class is missing (probably weak-linked). | |
3139 | // Ignore the category. | |
3140 | if (PrintConnecting) { | |
3141 | _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with " | |
3142 | "missing weak-linked target class", | |
3143 | cat->name, cat); | |
3144 | } | |
3145 | continue; | |
3146 | } | |
3147 | ||
3148 | // Process this category. | |
3149 | if (cls->isStubClass()) { | |
3150 | // Stub classes are never realized. Stub classes | |
3151 | // don't know their metaclass until they're | |
3152 | // initialized, so we have to add categories with | |
3153 | // class methods or properties to the stub itself. | |
3154 | // methodizeClass() will find them and add them to | |
3155 | // the metaclass as appropriate. | |
3156 | if (cat->instanceMethods || | |
3157 | cat->protocols || | |
3158 | cat->instanceProperties || | |
3159 | cat->classMethods || | |
3160 | cat->protocols || | |
3161 | (hasClassProperties && cat->_classProperties)) | |
3162 | { | |
3163 | objc::unattachedCategories.addForClass(lc, cls); | |
3164 | } | |
3165 | } else { | |
3166 | // First, register the category with its target class. | |
3167 | // Then, rebuild the class's method lists (etc) if | |
3168 | // the class is realized. | |
3169 | if (cat->instanceMethods || cat->protocols | |
3170 | || cat->instanceProperties) | |
3171 | { | |
3172 | if (cls->isRealized()) { | |
3173 | attachCategories(cls, &lc, 1, ATTACH_EXISTING); | |
3174 | } else { | |
3175 | objc::unattachedCategories.addForClass(lc, cls); | |
3176 | } | |
3177 | } | |
3178 | ||
3179 | if (cat->classMethods || cat->protocols | |
3180 | || (hasClassProperties && cat->_classProperties)) | |
3181 | { | |
3182 | if (cls->ISA()->isRealized()) { | |
3183 | attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS); | |
3184 | } else { | |
3185 | objc::unattachedCategories.addForClass(lc, cls->ISA()); | |
3186 | } | |
3187 | } | |
3188 | } | |
3189 | } | |
3190 | }; | |
3191 | ||
bc4fafce A |
3192 | processCatlist(hi->catlist(&count)); |
3193 | processCatlist(hi->catlist2(&count)); | |
f192a3e2 A |
3194 | } |
3195 | ||
3196 | static void loadAllCategories() { | |
3197 | mutex_locker_t lock(runtimeLock); | |
3198 | ||
3199 | for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) { | |
3200 | load_categories_nolock(hi); | |
3201 | } | |
3202 | } | |
3203 | ||
7af964d1 A |
3204 | /*********************************************************************** |
3205 | * load_images | |
3206 | * Process +load in the given images which are being mapped in by dyld. | |
7af964d1 A |
3207 | * |
3208 | * Locking: write-locks runtimeLock and loadMethodLock | |
3209 | **********************************************************************/ | |
c1e772c4 A |
3210 | extern bool hasLoadMethods(const headerType *mhdr); |
3211 | extern void prepare_load_methods(const headerType *mhdr); | |
31875a97 | 3212 | |
c1e772c4 A |
3213 | void |
3214 | load_images(const char *path __unused, const struct mach_header *mh) | |
3215 | { | |
f192a3e2 A |
3216 | if (!didInitialAttachCategories && didCallDyldNotifyRegister) { |
3217 | didInitialAttachCategories = true; | |
3218 | loadAllCategories(); | |
3219 | } | |
3220 | ||
31875a97 | 3221 | // Return without taking locks if there are no +load methods here. |
c1e772c4 | 3222 | if (!hasLoadMethods((const headerType *)mh)) return; |
7af964d1 | 3223 | |
31875a97 | 3224 | recursive_mutex_locker_t lock(loadMethodLock); |
7af964d1 A |
3225 | |
3226 | // Discover load methods | |
31875a97 | 3227 | { |
66799735 | 3228 | mutex_locker_t lock2(runtimeLock); |
c1e772c4 | 3229 | prepare_load_methods((const headerType *)mh); |
31875a97 | 3230 | } |
7af964d1 A |
3231 | |
3232 | // Call +load methods (without runtimeLock - re-entrant) | |
c1e772c4 | 3233 | call_load_methods(); |
7af964d1 A |
3234 | } |
3235 | ||
3236 | ||
3237 | /*********************************************************************** | |
3238 | * unmap_image | |
3239 | * Process the given image which is about to be unmapped by dyld. | |
7af964d1 A |
3240 | * |
3241 | * Locking: write-locks runtimeLock and loadMethodLock | |
3242 | **********************************************************************/ | |
cd5f04f5 | 3243 | void |
c1e772c4 | 3244 | unmap_image(const char *path __unused, const struct mach_header *mh) |
b3962a83 | 3245 | { |
31875a97 | 3246 | recursive_mutex_locker_t lock(loadMethodLock); |
66799735 | 3247 | mutex_locker_t lock2(runtimeLock); |
8972963c | 3248 | unmap_image_nolock(mh); |
b3962a83 A |
3249 | } |
3250 | ||
3251 | ||
c1e772c4 A |
3252 | /*********************************************************************** |
3253 | * mustReadClasses | |
3254 | * Preflight check in advance of readClass() from an image. | |
3255 | **********************************************************************/ | |
1807f628 | 3256 | bool mustReadClasses(header_info *hi, bool hasDyldRoots) |
c1e772c4 A |
3257 | { |
3258 | const char *reason; | |
3259 | ||
3260 | // If the image is not preoptimized then we must read classes. | |
1807f628 | 3261 | if (!hi->hasPreoptimizedClasses()) { |
c1e772c4 A |
3262 | reason = nil; // Don't log this one because it is noisy. |
3263 | goto readthem; | |
3264 | } | |
3265 | ||
3266 | // If iOS simulator then we must read classes. | |
3267 | #if TARGET_OS_SIMULATOR | |
3268 | reason = "the image is for iOS simulator"; | |
3269 | goto readthem; | |
3270 | #endif | |
3271 | ||
1807f628 | 3272 | ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache |
c1e772c4 A |
3273 | |
3274 | // If the image may have missing weak superclasses then we must read classes | |
1807f628 | 3275 | if (!noMissingWeakSuperclasses() || hasDyldRoots) { |
c1e772c4 A |
3276 | reason = "the image may contain classes with missing weak superclasses"; |
3277 | goto readthem; | |
3278 | } | |
3279 | ||
3280 | // If there are unresolved future classes then we must read classes. | |
3281 | if (haveFutureNamedClasses()) { | |
3282 | reason = "there are unresolved future classes pending"; | |
3283 | goto readthem; | |
3284 | } | |
3285 | ||
13ba007e A |
3286 | // readClass() rewrites bits in backward-deploying Swift stable ABI code. |
3287 | // The assumption here is there there are no such classes | |
3288 | // in the dyld shared cache. | |
3289 | #if DEBUG | |
3290 | { | |
3291 | size_t count; | |
1807f628 | 3292 | classref_t const *classlist = _getObjc2ClassList(hi, &count); |
13ba007e A |
3293 | for (size_t i = 0; i < count; i++) { |
3294 | Class cls = remapClass(classlist[i]); | |
1807f628 | 3295 | ASSERT(!cls->isUnfixedBackwardDeployingStableSwift()); |
13ba007e A |
3296 | } |
3297 | } | |
3298 | #endif | |
3299 | ||
c1e772c4 A |
3300 | // readClass() does not need to do anything. |
3301 | return NO; | |
3302 | ||
3303 | readthem: | |
3304 | if (PrintPreopt && reason) { | |
3305 | _objc_inform("PREOPTIMIZATION: reading classes manually from %s " | |
3306 | "because %s", hi->fname(), reason); | |
3307 | } | |
3308 | return YES; | |
3309 | } | |
3310 | ||
3311 | ||
8070259c A |
3312 | /*********************************************************************** |
3313 | * readClass | |
3314 | * Read a class and metaclass as written by a compiler. | |
3315 | * Returns the new class pointer. This could be: | |
3316 | * - cls | |
3317 | * - nil (cls has a missing weak-linked superclass) | |
3318 | * - something else (space for this class was reserved by a future class) | |
3319 | * | |
c1e772c4 A |
3320 | * Note that all work performed by this function is preflighted by |
3321 | * mustReadClasses(). Do not change this function without updating that one. | |
3322 | * | |
8070259c A |
3323 | * Locking: runtimeLock acquired by map_images or objc_readClassPair |
3324 | **********************************************************************/ | |
31875a97 | 3325 | Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized) |
8070259c | 3326 | { |
34d5b5e8 | 3327 | const char *mangledName = cls->nonlazyMangledName(); |
8070259c A |
3328 | |
3329 | if (missingWeakSuperclass(cls)) { | |
3330 | // No superclass (probably weak-linked). | |
3331 | // Disavow any knowledge of this subclass. | |
3332 | if (PrintConnecting) { | |
3333 | _objc_inform("CLASS: IGNORING class '%s' with " | |
3334 | "missing weak-linked superclass", | |
3335 | cls->nameForLogging()); | |
3336 | } | |
3337 | addRemappedClass(cls, nil); | |
34d5b5e8 | 3338 | cls->setSuperclass(nil); |
8070259c A |
3339 | return nil; |
3340 | } | |
3341 | ||
13ba007e A |
3342 | cls->fixupBackwardDeployingStableSwift(); |
3343 | ||
31875a97 | 3344 | Class replacing = nil; |
34d5b5e8 A |
3345 | if (mangledName != nullptr) { |
3346 | if (Class newCls = popFutureNamedClass(mangledName)) { | |
3347 | // This name was previously allocated as a future class. | |
3348 | // Copy objc_class to future class's struct. | |
3349 | // Preserve future's rw data block. | |
3350 | ||
3351 | if (newCls->isAnySwift()) { | |
3352 | _objc_fatal("Can't complete future class request for '%s' " | |
3353 | "because the real class is too big.", | |
3354 | cls->nameForLogging()); | |
3355 | } | |
3356 | ||
3357 | class_rw_t *rw = newCls->data(); | |
3358 | const class_ro_t *old_ro = rw->ro(); | |
3359 | memcpy(newCls, cls, sizeof(objc_class)); | |
3360 | ||
3361 | // Manually set address-discriminated ptrauthed fields | |
3362 | // so that newCls gets the correct signatures. | |
3363 | newCls->setSuperclass(cls->getSuperclass()); | |
3364 | newCls->initIsa(cls->getIsa()); | |
3365 | ||
3366 | rw->set_ro((class_ro_t *)newCls->data()); | |
3367 | newCls->setData(rw); | |
3368 | freeIfMutable((char *)old_ro->getName()); | |
3369 | free((void *)old_ro); | |
3370 | ||
3371 | addRemappedClass(cls, newCls); | |
3372 | ||
3373 | replacing = cls; | |
3374 | cls = newCls; | |
8070259c A |
3375 | } |
3376 | } | |
3377 | ||
31875a97 | 3378 | if (headerIsPreoptimized && !replacing) { |
8070259c A |
3379 | // class list built in shared cache |
3380 | // fixme strict assert doesn't work because of duplicates | |
1807f628 | 3381 | // ASSERT(cls == getClass(name)); |
34d5b5e8 | 3382 | ASSERT(mangledName == nullptr || getClassExceptSomeSwift(mangledName)); |
8070259c | 3383 | } else { |
34d5b5e8 A |
3384 | if (mangledName) { //some Swift generic classes can lazily generate their names |
3385 | addNamedClass(cls, mangledName, replacing); | |
3386 | } else { | |
3387 | Class meta = cls->ISA(); | |
3388 | const class_ro_t *metaRO = meta->bits.safe_ro(); | |
3389 | ASSERT(metaRO->getNonMetaclass() && "Metaclass with lazy name must have a pointer to the corresponding nonmetaclass."); | |
3390 | ASSERT(metaRO->getNonMetaclass() == cls && "Metaclass nonmetaclass pointer must equal the original class."); | |
3391 | } | |
66799735 | 3392 | addClassTableEntry(cls); |
8070259c | 3393 | } |
66799735 | 3394 | |
8070259c A |
3395 | // for future reference: shared cache never contains MH_BUNDLEs |
3396 | if (headerIsBundle) { | |
3397 | cls->data()->flags |= RO_FROM_BUNDLE; | |
3398 | cls->ISA()->data()->flags |= RO_FROM_BUNDLE; | |
3399 | } | |
3400 | ||
31875a97 A |
3401 | return cls; |
3402 | } | |
3403 | ||
3404 | ||
3405 | /*********************************************************************** | |
3406 | * readProtocol | |
3407 | * Read a protocol as written by a compiler. | |
3408 | **********************************************************************/ | |
3409 | static void | |
3410 | readProtocol(protocol_t *newproto, Class protocol_class, | |
3411 | NXMapTable *protocol_map, | |
3412 | bool headerIsPreoptimized, bool headerIsBundle) | |
3413 | { | |
3414 | // This is not enough to make protocols in unloaded bundles safe, | |
3415 | // but it does prevent crashes when looking up unrelated protocols. | |
3416 | auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert; | |
3417 | ||
3418 | protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName); | |
3419 | ||
3420 | if (oldproto) { | |
1807f628 A |
3421 | if (oldproto != newproto) { |
3422 | // Some other definition already won. | |
3423 | if (PrintProtocols) { | |
3424 | _objc_inform("PROTOCOLS: protocol at %p is %s " | |
3425 | "(duplicate of %p)", | |
3426 | newproto, oldproto->nameForLogging(), oldproto); | |
3427 | } | |
3428 | ||
3429 | // If we are a shared cache binary then we have a definition of this | |
3430 | // protocol, but if another one was chosen then we need to clear our | |
3431 | // isCanonical bit so that no-one trusts it. | |
3432 | // Note, if getProtocol returned a shared cache protocol then the | |
3433 | // canonical definition is already in the shared cache and we don't | |
3434 | // need to do anything. | |
3435 | if (headerIsPreoptimized && !oldproto->isCanonical()) { | |
3436 | // Note newproto is an entry in our __objc_protolist section which | |
3437 | // for shared cache binaries points to the original protocol in | |
3438 | // that binary, not the shared cache uniqued one. | |
3439 | auto cacheproto = (protocol_t *) | |
3440 | getSharedCachePreoptimizedProtocol(newproto->mangledName); | |
3441 | if (cacheproto && cacheproto->isCanonical()) | |
3442 | cacheproto->clearIsCanonical(); | |
3443 | } | |
8070259c A |
3444 | } |
3445 | } | |
31875a97 A |
3446 | else if (headerIsPreoptimized) { |
3447 | // Shared cache initialized the protocol object itself, | |
3448 | // but in order to allow out-of-cache replacement we need | |
3449 | // to add it to the protocol table now. | |
8070259c | 3450 | |
31875a97 A |
3451 | protocol_t *cacheproto = (protocol_t *) |
3452 | getPreoptimizedProtocol(newproto->mangledName); | |
3453 | protocol_t *installedproto; | |
3454 | if (cacheproto && cacheproto != newproto) { | |
3455 | // Another definition in the shared cache wins (because | |
3456 | // everything in the cache was fixed up to point to it). | |
3457 | installedproto = cacheproto; | |
3458 | } | |
3459 | else { | |
3460 | // This definition wins. | |
3461 | installedproto = newproto; | |
3462 | } | |
3463 | ||
1807f628 A |
3464 | ASSERT(installedproto->getIsa() == protocol_class); |
3465 | ASSERT(installedproto->size >= sizeof(protocol_t)); | |
31875a97 A |
3466 | insertFn(protocol_map, installedproto->mangledName, |
3467 | installedproto); | |
3468 | ||
3469 | if (PrintProtocols) { | |
3470 | _objc_inform("PROTOCOLS: protocol at %p is %s", | |
3471 | installedproto, installedproto->nameForLogging()); | |
3472 | if (newproto != installedproto) { | |
3473 | _objc_inform("PROTOCOLS: protocol at %p is %s " | |
3474 | "(duplicate of %p)", | |
3475 | newproto, installedproto->nameForLogging(), | |
3476 | installedproto); | |
3477 | } | |
3478 | } | |
3479 | } | |
34d5b5e8 A |
3480 | else { |
3481 | // New protocol from an un-preoptimized image. Fix it up in place. | |
31875a97 A |
3482 | // fixme duplicate protocols from unloadable bundle |
3483 | newproto->initIsa(protocol_class); // fixme pinned | |
3484 | insertFn(protocol_map, newproto->mangledName, newproto); | |
3485 | if (PrintProtocols) { | |
3486 | _objc_inform("PROTOCOLS: protocol at %p is %s", | |
3487 | newproto, newproto->nameForLogging()); | |
3488 | } | |
3489 | } | |
8070259c A |
3490 | } |
3491 | ||
b3962a83 A |
3492 | /*********************************************************************** |
3493 | * _read_images | |
3494 | * Perform initial processing of the headers in the linked | |
3495 | * list beginning with headerList. | |
3496 | * | |
7af964d1 | 3497 | * Called by: map_images_nolock |
b3962a83 | 3498 | * |
7af964d1 | 3499 | * Locking: runtimeLock acquired by map_images |
b3962a83 | 3500 | **********************************************************************/ |
c1e772c4 | 3501 | void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses) |
b3962a83 A |
3502 | { |
3503 | header_info *hi; | |
3504 | uint32_t hIndex; | |
3505 | size_t count; | |
7af964d1 | 3506 | size_t i; |
7257e56c | 3507 | Class *resolvedFutureClasses = nil; |
b3962a83 | 3508 | size_t resolvedFutureClassCount = 0; |
31875a97 | 3509 | static bool doneOnce; |
1807f628 | 3510 | bool launchTime = NO; |
31875a97 | 3511 | TimeLogger ts(PrintImageTimes); |
7af964d1 | 3512 | |
66799735 | 3513 | runtimeLock.assertLocked(); |
7af964d1 | 3514 | |
b3962a83 | 3515 | #define EACH_HEADER \ |
8972963c | 3516 | hIndex = 0; \ |
c1e772c4 | 3517 | hIndex < hCount && (hi = hList[hIndex]); \ |
b3962a83 A |
3518 | hIndex++ |
3519 | ||
8972963c A |
3520 | if (!doneOnce) { |
3521 | doneOnce = YES; | |
1807f628 | 3522 | launchTime = YES; |
7257e56c | 3523 | |
8070259c | 3524 | #if SUPPORT_NONPOINTER_ISA |
c1e772c4 A |
3525 | // Disable non-pointer isa under some conditions. |
3526 | ||
3527 | # if SUPPORT_INDEXED_ISA | |
3528 | // Disable nonpointer isa if any image contains old Swift code | |
3529 | for (EACH_HEADER) { | |
3530 | if (hi->info()->containsSwift() && | |
13ba007e | 3531 | hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3) |
c1e772c4 A |
3532 | { |
3533 | DisableNonpointerIsa = true; | |
3534 | if (PrintRawIsa) { | |
3535 | _objc_inform("RAW ISA: disabling non-pointer isa because " | |
3536 | "the app or a framework contains Swift code " | |
3537 | "older than Swift 3.0"); | |
3538 | } | |
3539 | break; | |
3540 | } | |
3541 | } | |
3542 | # endif | |
8070259c | 3543 | |
c1e772c4 | 3544 | # if TARGET_OS_OSX |
31875a97 A |
3545 | // Disable non-pointer isa if the app is too old |
3546 | // (linked before OS X 10.11) | |
34d5b5e8 | 3547 | if (!dyld_program_sdk_at_least(dyld_platform_version_macOS_10_11)) { |
c1e772c4 | 3548 | DisableNonpointerIsa = true; |
8070259c A |
3549 | if (PrintRawIsa) { |
3550 | _objc_inform("RAW ISA: disabling non-pointer isa because " | |
34d5b5e8 | 3551 | "the app is too old."); |
8070259c A |
3552 | } |
3553 | } | |
31875a97 A |
3554 | |
3555 | // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section | |
3556 | // New apps that load old extensions may need this. | |
3557 | for (EACH_HEADER) { | |
c1e772c4 | 3558 | if (hi->mhdr()->filetype != MH_EXECUTE) continue; |
31875a97 | 3559 | unsigned long size; |
c1e772c4 A |
3560 | if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) { |
3561 | DisableNonpointerIsa = true; | |
31875a97 A |
3562 | if (PrintRawIsa) { |
3563 | _objc_inform("RAW ISA: disabling non-pointer isa because " | |
3564 | "the app has a __DATA,__objc_rawisa section"); | |
3565 | } | |
3566 | } | |
3567 | break; // assume only one MH_EXECUTE image | |
3568 | } | |
8070259c A |
3569 | # endif |
3570 | ||
8070259c A |
3571 | #endif |
3572 | ||
7257e56c A |
3573 | if (DisableTaggedPointers) { |
3574 | disableTaggedPointers(); | |
3575 | } | |
8972963c | 3576 | |
66799735 A |
3577 | initializeTaggedPointerObfuscator(); |
3578 | ||
8972963c | 3579 | if (PrintConnecting) { |
c1e772c4 | 3580 | _objc_inform("CLASS: found %d classes during launch", totalClasses); |
8972963c A |
3581 | } |
3582 | ||
c1e772c4 | 3583 | // namedClasses |
cd5f04f5 | 3584 | // Preoptimized classes don't go in this table. |
8972963c | 3585 | // 4/3 is NXMapTable's load factor |
7257e56c | 3586 | int namedClassesSize = |
c1e772c4 | 3587 | (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3; |
8972963c | 3588 | gdb_objc_realized_classes = |
31875a97 | 3589 | NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize); |
1807f628 | 3590 | |
31875a97 | 3591 | ts.log("IMAGE TIMES: first time tasks"); |
b3962a83 A |
3592 | } |
3593 | ||
1807f628 A |
3594 | // Fix up @selector references |
3595 | static size_t UnfixedSelectors; | |
3596 | { | |
3597 | mutex_locker_t lock(selLock); | |
3598 | for (EACH_HEADER) { | |
3599 | if (hi->hasPreoptimizedSelectors()) continue; | |
3600 | ||
3601 | bool isBundle = hi->isBundle(); | |
3602 | SEL *sels = _getObjc2SelectorRefs(hi, &count); | |
3603 | UnfixedSelectors += count; | |
3604 | for (i = 0; i < count; i++) { | |
3605 | const char *name = sel_cname(sels[i]); | |
3606 | SEL sel = sel_registerNameNoLock(name, isBundle); | |
3607 | if (sels[i] != sel) { | |
3608 | sels[i] = sel; | |
3609 | } | |
3610 | } | |
3611 | } | |
3612 | } | |
3613 | ||
3614 | ts.log("IMAGE TIMES: fix up selector references"); | |
8972963c | 3615 | |
7af964d1 | 3616 | // Discover classes. Fix up unresolved future classes. Mark bundle classes. |
1807f628 | 3617 | bool hasDyldRoots = dyld_shared_cache_some_image_overridden(); |
cd5f04f5 | 3618 | |
b3962a83 | 3619 | for (EACH_HEADER) { |
1807f628 | 3620 | if (! mustReadClasses(hi, hasDyldRoots)) { |
c1e772c4 A |
3621 | // Image is sufficiently optimized that we need not call readClass() |
3622 | continue; | |
3623 | } | |
3624 | ||
1807f628 A |
3625 | classref_t const *classlist = _getObjc2ClassList(hi, &count); |
3626 | ||
31875a97 | 3627 | bool headerIsBundle = hi->isBundle(); |
1807f628 | 3628 | bool headerIsPreoptimized = hi->hasPreoptimizedClasses(); |
cd5f04f5 | 3629 | |
b3962a83 | 3630 | for (i = 0; i < count; i++) { |
7257e56c | 3631 | Class cls = (Class)classlist[i]; |
31875a97 | 3632 | Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized); |
cd5f04f5 | 3633 | |
8070259c A |
3634 | if (newCls != cls && newCls) { |
3635 | // Class was moved but not deleted. Currently this occurs | |
3636 | // only when the new class resolved a future class. | |
cd5f04f5 | 3637 | // Non-lazily realize the class below. |
7257e56c | 3638 | resolvedFutureClasses = (Class *) |
31875a97 | 3639 | realloc(resolvedFutureClasses, |
c1e772c4 | 3640 | (resolvedFutureClassCount+1) * sizeof(Class)); |
cd5f04f5 | 3641 | resolvedFutureClasses[resolvedFutureClassCount++] = newCls; |
b3962a83 | 3642 | } |
b3962a83 A |
3643 | } |
3644 | } | |
3645 | ||
31875a97 | 3646 | ts.log("IMAGE TIMES: discover classes"); |
cd5f04f5 | 3647 | |
b3962a83 | 3648 | // Fix up remapped classes |
cd5f04f5 A |
3649 | // Class list and nonlazy class list remain unremapped. |
3650 | // Class refs and super refs are remapped for message dispatching. | |
b3962a83 A |
3651 | |
3652 | if (!noClassesRemapped()) { | |
3653 | for (EACH_HEADER) { | |
7257e56c | 3654 | Class *classrefs = _getObjc2ClassRefs(hi, &count); |
b3962a83 A |
3655 | for (i = 0; i < count; i++) { |
3656 | remapClassRef(&classrefs[i]); | |
3657 | } | |
3658 | // fixme why doesn't test future1 catch the absence of this? | |
3659 | classrefs = _getObjc2SuperRefs(hi, &count); | |
3660 | for (i = 0; i < count; i++) { | |
3661 | remapClassRef(&classrefs[i]); | |
3662 | } | |
3663 | } | |
3664 | } | |
3665 | ||
31875a97 | 3666 | ts.log("IMAGE TIMES: remap classes"); |
b3962a83 | 3667 | |
7257e56c A |
3668 | #if SUPPORT_FIXUP |
3669 | // Fix up old objc_msgSend_fixup call sites | |
3670 | for (EACH_HEADER) { | |
3671 | message_ref_t *refs = _getObjc2MessageRefs(hi, &count); | |
3672 | if (count == 0) continue; | |
3673 | ||
3674 | if (PrintVtables) { | |
3675 | _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch " | |
c1e772c4 | 3676 | "call sites in %s", count, hi->fname()); |
7257e56c A |
3677 | } |
3678 | for (i = 0; i < count; i++) { | |
3679 | fixupMessageRef(refs+i); | |
3680 | } | |
3681 | } | |
31875a97 A |
3682 | |
3683 | ts.log("IMAGE TIMES: fix up objc_msgSend_fixup"); | |
7257e56c A |
3684 | #endif |
3685 | ||
1807f628 | 3686 | |
b3962a83 | 3687 | // Discover protocols. Fix up protocol refs. |
b3962a83 | 3688 | for (EACH_HEADER) { |
7257e56c | 3689 | extern objc_class OBJC_CLASS_$_Protocol; |
b3962a83 | 3690 | Class cls = (Class)&OBJC_CLASS_$_Protocol; |
1807f628 | 3691 | ASSERT(cls); |
8070259c | 3692 | NXMapTable *protocol_map = protocols(); |
1807f628 A |
3693 | bool isPreoptimized = hi->hasPreoptimizedProtocols(); |
3694 | ||
3695 | // Skip reading protocols if this is an image from the shared cache | |
3696 | // and we support roots | |
3697 | // Note, after launch we do need to walk the protocol as the protocol | |
3698 | // in the shared cache is marked with isCanonical() and that may not | |
3699 | // be true if some non-shared cache binary was chosen as the canonical | |
3700 | // definition | |
bc4fafce | 3701 | if (launchTime && isPreoptimized) { |
1807f628 A |
3702 | if (PrintProtocols) { |
3703 | _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s", | |
3704 | hi->fname()); | |
3705 | } | |
3706 | continue; | |
3707 | } | |
3708 | ||
31875a97 A |
3709 | bool isBundle = hi->isBundle(); |
3710 | ||
1807f628 | 3711 | protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count); |
b3962a83 | 3712 | for (i = 0; i < count; i++) { |
31875a97 A |
3713 | readProtocol(protolist[i], cls, protocol_map, |
3714 | isPreoptimized, isBundle); | |
b3962a83 A |
3715 | } |
3716 | } | |
31875a97 A |
3717 | |
3718 | ts.log("IMAGE TIMES: discover protocols"); | |
3719 | ||
3720 | // Fix up @protocol references | |
3721 | // Preoptimized images may have the right | |
3722 | // answer already but we don't know for sure. | |
b3962a83 | 3723 | for (EACH_HEADER) { |
1807f628 A |
3724 | // At launch time, we know preoptimized image refs are pointing at the |
3725 | // shared cache definition of a protocol. We can skip the check on | |
3726 | // launch, but have to visit @protocol refs for shared cache images | |
3727 | // loaded later. | |
bc4fafce | 3728 | if (launchTime && hi->isPreoptimized()) |
1807f628 | 3729 | continue; |
31875a97 | 3730 | protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count); |
b3962a83 | 3731 | for (i = 0; i < count; i++) { |
8070259c | 3732 | remapProtocolRef(&protolist[i]); |
b3962a83 | 3733 | } |
7af964d1 | 3734 | } |
b3962a83 | 3735 | |
31875a97 A |
3736 | ts.log("IMAGE TIMES: fix up @protocol references"); |
3737 | ||
f192a3e2 A |
3738 | // Discover categories. Only do this after the initial category |
3739 | // attachment has been done. For categories present at startup, | |
3740 | // discovery is deferred until the first load_images call after | |
3741 | // the call to _dyld_objc_notify_register completes. rdar://problem/53119145 | |
3742 | if (didInitialAttachCategories) { | |
3743 | for (EACH_HEADER) { | |
3744 | load_categories_nolock(hi); | |
3745 | } | |
1807f628 A |
3746 | } |
3747 | ||
3748 | ts.log("IMAGE TIMES: discover categories"); | |
3749 | ||
3750 | // Category discovery MUST BE Late to avoid potential races | |
3751 | // when other threads call the new category code before | |
3752 | // this thread finishes its fixups. | |
3753 | ||
3754 | // +load handled by prepare_load_methods() | |
3755 | ||
7af964d1 A |
3756 | // Realize non-lazy classes (for +load methods and static instances) |
3757 | for (EACH_HEADER) { | |
bc4fafce | 3758 | classref_t const *classlist = hi->nlclslist(&count); |
b3962a83 | 3759 | for (i = 0; i < count; i++) { |
7257e56c A |
3760 | Class cls = remapClass(classlist[i]); |
3761 | if (!cls) continue; | |
3762 | ||
66799735 | 3763 | addClassTableEntry(cls); |
13ba007e A |
3764 | |
3765 | if (cls->isSwiftStable()) { | |
3766 | if (cls->swiftMetadataInitializer()) { | |
3767 | _objc_fatal("Swift class %s with a metadata initializer " | |
3768 | "is not allowed to be non-lazy", | |
3769 | cls->nameForLogging()); | |
3770 | } | |
3771 | // fixme also disallow relocatable classes | |
3772 | // We can't disallow all Swift classes because of | |
3773 | // classes like Swift.__EmptyArrayStorage | |
3774 | } | |
1807f628 | 3775 | realizeClassWithoutSwift(cls, nil); |
b3962a83 | 3776 | } |
31875a97 A |
3777 | } |
3778 | ||
3779 | ts.log("IMAGE TIMES: realize non-lazy classes"); | |
7af964d1 A |
3780 | |
3781 | // Realize newly-resolved future classes, in case CF manipulates them | |
3782 | if (resolvedFutureClasses) { | |
3783 | for (i = 0; i < resolvedFutureClassCount; i++) { | |
13ba007e A |
3784 | Class cls = resolvedFutureClasses[i]; |
3785 | if (cls->isSwiftStable()) { | |
3786 | _objc_fatal("Swift class is not allowed to be future"); | |
3787 | } | |
1807f628 A |
3788 | realizeClassWithoutSwift(cls, nil); |
3789 | cls->setInstancesRequireRawIsaRecursively(false/*inherited*/); | |
7af964d1 | 3790 | } |
31875a97 | 3791 | free(resolvedFutureClasses); |
b3962a83 A |
3792 | } |
3793 | ||
1807f628 | 3794 | ts.log("IMAGE TIMES: realize future classes"); |
b3962a83 | 3795 | |
8972963c A |
3796 | if (DebugNonFragileIvars) { |
3797 | realizeAllClasses(); | |
3798 | } | |
3799 | ||
31875a97 A |
3800 | |
3801 | // Print preoptimization statistics | |
3802 | if (PrintPreopt) { | |
3803 | static unsigned int PreoptTotalMethodLists; | |
3804 | static unsigned int PreoptOptimizedMethodLists; | |
3805 | static unsigned int PreoptTotalClasses; | |
3806 | static unsigned int PreoptOptimizedClasses; | |
3807 | ||
3808 | for (EACH_HEADER) { | |
1807f628 | 3809 | if (hi->hasPreoptimizedSelectors()) { |
31875a97 | 3810 | _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors " |
c1e772c4 | 3811 | "in %s", hi->fname()); |
31875a97 | 3812 | } |
c1e772c4 | 3813 | else if (hi->info()->optimizedByDyld()) { |
31875a97 | 3814 | _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors " |
c1e772c4 | 3815 | "in %s", hi->fname()); |
31875a97 A |
3816 | } |
3817 | ||
1807f628 | 3818 | classref_t const *classlist = _getObjc2ClassList(hi, &count); |
31875a97 A |
3819 | for (i = 0; i < count; i++) { |
3820 | Class cls = remapClass(classlist[i]); | |
3821 | if (!cls) continue; | |
3822 | ||
3823 | PreoptTotalClasses++; | |
1807f628 | 3824 | if (hi->hasPreoptimizedClasses()) { |
31875a97 A |
3825 | PreoptOptimizedClasses++; |
3826 | } | |
3827 | ||
3828 | const method_list_t *mlist; | |
34d5b5e8 | 3829 | if ((mlist = cls->bits.safe_ro()->baseMethods())) { |
31875a97 A |
3830 | PreoptTotalMethodLists++; |
3831 | if (mlist->isFixedUp()) { | |
3832 | PreoptOptimizedMethodLists++; | |
3833 | } | |
3834 | } | |
34d5b5e8 | 3835 | if ((mlist = cls->ISA()->bits.safe_ro()->baseMethods())) { |
31875a97 A |
3836 | PreoptTotalMethodLists++; |
3837 | if (mlist->isFixedUp()) { | |
3838 | PreoptOptimizedMethodLists++; | |
3839 | } | |
3840 | } | |
3841 | } | |
3842 | } | |
3843 | ||
3844 | _objc_inform("PREOPTIMIZATION: %zu selector references not " | |
3845 | "pre-optimized", UnfixedSelectors); | |
3846 | _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted", | |
3847 | PreoptOptimizedMethodLists, PreoptTotalMethodLists, | |
3848 | PreoptTotalMethodLists | |
3849 | ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists | |
3850 | : 0.0); | |
3851 | _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered", | |
3852 | PreoptOptimizedClasses, PreoptTotalClasses, | |
3853 | PreoptTotalClasses | |
3854 | ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses | |
3855 | : 0.0); | |
3856 | _objc_inform("PREOPTIMIZATION: %zu protocol references not " | |
3857 | "pre-optimized", UnfixedProtocolReferences); | |
3858 | } | |
3859 | ||
b3962a83 A |
3860 | #undef EACH_HEADER |
3861 | } | |
3862 | ||
3863 | ||
3864 | /*********************************************************************** | |
3865 | * prepare_load_methods | |
3866 | * Schedule +load for classes in this image, any un-+load-ed | |
3867 | * superclasses in other images, and any categories in this image. | |
3868 | **********************************************************************/ | |
3869 | // Recursively schedule +load for cls and any un-+load-ed superclasses. | |
3870 | // cls must already be connected. | |
7257e56c | 3871 | static void schedule_class_load(Class cls) |
b3962a83 | 3872 | { |
ee974f79 | 3873 | if (!cls) return; |
1807f628 | 3874 | ASSERT(cls->isRealized()); // _read_images should realize |
b3962a83 | 3875 | |
8972963c | 3876 | if (cls->data()->flags & RW_LOADED) return; |
b3962a83 | 3877 | |
ee974f79 | 3878 | // Ensure superclass-first ordering |
34d5b5e8 | 3879 | schedule_class_load(cls->getSuperclass()); |
b3962a83 | 3880 | |
7257e56c A |
3881 | add_class_to_loadable_list(cls); |
3882 | cls->setInfo(RW_LOADED); | |
b3962a83 A |
3883 | } |
3884 | ||
31875a97 A |
3885 | // Quick scan for +load methods that doesn't take a lock. |
3886 | bool hasLoadMethods(const headerType *mhdr) | |
3887 | { | |
3888 | size_t count; | |
3889 | if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true; | |
3890 | if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true; | |
3891 | return false; | |
3892 | } | |
3893 | ||
3894 | void prepare_load_methods(const headerType *mhdr) | |
b3962a83 A |
3895 | { |
3896 | size_t count, i; | |
3897 | ||
66799735 | 3898 | runtimeLock.assertLocked(); |
b3962a83 | 3899 | |
1807f628 | 3900 | classref_t const *classlist = |
31875a97 | 3901 | _getObjc2NonlazyClassList(mhdr, &count); |
b3962a83 | 3902 | for (i = 0; i < count; i++) { |
ee974f79 | 3903 | schedule_class_load(remapClass(classlist[i])); |
b3962a83 A |
3904 | } |
3905 | ||
1807f628 | 3906 | category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count); |
b3962a83 A |
3907 | for (i = 0; i < count; i++) { |
3908 | category_t *cat = categorylist[i]; | |
7257e56c | 3909 | Class cls = remapClass(cat->cls); |
ee974f79 | 3910 | if (!cls) continue; // category for ignored weak-linked class |
13ba007e A |
3911 | if (cls->isSwiftStable()) { |
3912 | _objc_fatal("Swift class extensions and categories on Swift " | |
3913 | "classes are not allowed to have +load methods"); | |
3914 | } | |
1807f628 A |
3915 | realizeClassWithoutSwift(cls, nil); |
3916 | ASSERT(cls->ISA()->isRealized()); | |
7257e56c | 3917 | add_category_to_loadable_list(cat); |
b3962a83 | 3918 | } |
b3962a83 A |
3919 | } |
3920 | ||
3921 | ||
3922 | /*********************************************************************** | |
3923 | * _unload_image | |
3924 | * Only handles MH_BUNDLE for now. | |
7af964d1 | 3925 | * Locking: write-lock and loadMethodLock acquired by unmap_image |
b3962a83 | 3926 | **********************************************************************/ |
cd5f04f5 | 3927 | void _unload_image(header_info *hi) |
b3962a83 A |
3928 | { |
3929 | size_t count, i; | |
3930 | ||
31875a97 | 3931 | loadMethodLock.assertLocked(); |
66799735 | 3932 | runtimeLock.assertLocked(); |
b3962a83 A |
3933 | |
3934 | // Unload unattached categories and categories waiting for +load. | |
3935 | ||
1807f628 A |
3936 | // Ignore __objc_catlist2. We don't support unloading Swift |
3937 | // and we never will. | |
bc4fafce | 3938 | category_t * const *catlist = hi->catlist(&count); |
b3962a83 A |
3939 | for (i = 0; i < count; i++) { |
3940 | category_t *cat = catlist[i]; | |
7257e56c | 3941 | Class cls = remapClass(cat->cls); |
1807f628 | 3942 | if (!cls) continue; // category for ignored weak-linked class |
ee974f79 | 3943 | |
b3962a83 A |
3944 | // fixme for MH_DYLIB cat's class may have been unloaded already |
3945 | ||
3946 | // unattached list | |
1807f628 | 3947 | objc::unattachedCategories.eraseCategoryForClass(cat, cls); |
b3962a83 A |
3948 | |
3949 | // +load queue | |
7257e56c | 3950 | remove_category_from_loadable_list(cat); |
b3962a83 A |
3951 | } |
3952 | ||
3953 | // Unload classes. | |
3954 | ||
c1e772c4 A |
3955 | // Gather classes from both __DATA,__objc_clslist |
3956 | // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter | |
3957 | // only, and we need to unload that class if we unload an arclite image. | |
cd5f04f5 | 3958 | |
1807f628 A |
3959 | objc::DenseSet<Class> classes{}; |
3960 | classref_t const *classlist; | |
cd5f04f5 | 3961 | |
c1e772c4 | 3962 | classlist = _getObjc2ClassList(hi, &count); |
b3962a83 | 3963 | for (i = 0; i < count; i++) { |
7257e56c | 3964 | Class cls = remapClass(classlist[i]); |
1807f628 | 3965 | if (cls) classes.insert(cls); |
cd5f04f5 | 3966 | } |
c1e772c4 | 3967 | |
bc4fafce | 3968 | classlist = hi->nlclslist(&count); |
cd5f04f5 | 3969 | for (i = 0; i < count; i++) { |
7257e56c | 3970 | Class cls = remapClass(classlist[i]); |
1807f628 | 3971 | if (cls) classes.insert(cls); |
b3962a83 | 3972 | } |
c1e772c4 A |
3973 | |
3974 | // First detach classes from each other. Then free each class. | |
3975 | // This avoid bugs where this loop unloads a subclass before its superclass | |
3976 | ||
1807f628 | 3977 | for (Class cls: classes) { |
c1e772c4 A |
3978 | remove_class_from_loadable_list(cls); |
3979 | detach_class(cls->ISA(), YES); | |
3980 | detach_class(cls, NO); | |
3981 | } | |
1807f628 | 3982 | for (Class cls: classes) { |
c1e772c4 A |
3983 | free_class(cls->ISA()); |
3984 | free_class(cls); | |
3985 | } | |
3986 | ||
8972963c A |
3987 | // XXX FIXME -- Clean up protocols: |
3988 | // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time | |
b3962a83 A |
3989 | |
3990 | // fixme DebugUnload | |
b3962a83 A |
3991 | } |
3992 | ||
3993 | ||
3994 | /*********************************************************************** | |
3995 | * method_getDescription | |
3996 | * Returns a pointer to this method's objc_method_description. | |
3997 | * Locking: none | |
3998 | **********************************************************************/ | |
3999 | struct objc_method_description * | |
4000 | method_getDescription(Method m) | |
4001 | { | |
7257e56c | 4002 | if (!m) return nil; |
bc4fafce | 4003 | return m->getDescription(); |
b3962a83 A |
4004 | } |
4005 | ||
4006 | ||
b3962a83 A |
4007 | IMP |
4008 | method_getImplementation(Method m) | |
4009 | { | |
bc4fafce A |
4010 | return m ? m->imp(true) : nil; |
4011 | } | |
4012 | ||
4013 | IMPAndSEL _method_getImplementationAndName(Method m) | |
4014 | { | |
4015 | return { m->imp(true), m->name() }; | |
b3962a83 A |
4016 | } |
4017 | ||
4018 | ||
4019 | /*********************************************************************** | |
4020 | * method_getName | |
4021 | * Returns this method's selector. | |
7257e56c | 4022 | * The method must not be nil. |
b3962a83 A |
4023 | * The method must already have been fixed-up. |
4024 | * Locking: none | |
4025 | **********************************************************************/ | |
4026 | SEL | |
7257e56c | 4027 | method_getName(Method m) |
b3962a83 | 4028 | { |
7257e56c | 4029 | if (!m) return nil; |
7af964d1 | 4030 | |
bc4fafce A |
4031 | ASSERT(m->name() == sel_registerName(sel_getName(m->name()))); |
4032 | return m->name(); | |
b3962a83 A |
4033 | } |
4034 | ||
4035 | ||
4036 | /*********************************************************************** | |
4037 | * method_getTypeEncoding | |
4038 | * Returns this method's old-style type encoding string. | |
7257e56c | 4039 | * The method must not be nil. |
b3962a83 A |
4040 | * Locking: none |
4041 | **********************************************************************/ | |
4042 | const char * | |
4043 | method_getTypeEncoding(Method m) | |
4044 | { | |
7257e56c | 4045 | if (!m) return nil; |
bc4fafce | 4046 | return m->types(); |
b3962a83 A |
4047 | } |
4048 | ||
4049 | ||
4050 | /*********************************************************************** | |
4051 | * method_setImplementation | |
4052 | * Sets this method's implementation to imp. | |
4053 | * The previous implementation is returned. | |
4054 | **********************************************************************/ | |
7af964d1 | 4055 | static IMP |
7257e56c | 4056 | _method_setImplementation(Class cls, method_t *m, IMP imp) |
b3962a83 | 4057 | { |
66799735 | 4058 | runtimeLock.assertLocked(); |
b3962a83 | 4059 | |
7257e56c A |
4060 | if (!m) return nil; |
4061 | if (!imp) return nil; | |
7af964d1 | 4062 | |
bc4fafce | 4063 | IMP old = m->imp(false); |
34d5b5e8 A |
4064 | SEL sel = m->name(); |
4065 | ||
bc4fafce | 4066 | m->setImp(imp); |
7af964d1 | 4067 | |
31875a97 | 4068 | // Cache updates are slow if cls is nil (i.e. unknown) |
7257e56c | 4069 | // RR/AWZ updates are slow if cls is nil (i.e. unknown) |
cd5f04f5 | 4070 | // fixme build list of classes whose Methods are known externally? |
7af964d1 | 4071 | |
34d5b5e8 A |
4072 | flushCaches(cls, __func__, [sel, old](Class c){ |
4073 | return c->cache.shouldFlush(sel, old); | |
4074 | }); | |
b3962a83 | 4075 | |
1807f628 | 4076 | adjustCustomFlagsForMethodChange(cls, m); |
8972963c | 4077 | |
b3962a83 A |
4078 | return old; |
4079 | } | |
4080 | ||
7af964d1 A |
4081 | IMP |
4082 | method_setImplementation(Method m, IMP imp) | |
b3962a83 | 4083 | { |
7257e56c | 4084 | // Don't know the class - will be slow if RR/AWZ are affected |
7af964d1 | 4085 | // fixme build list of classes whose Methods are known externally? |
66799735 | 4086 | mutex_locker_t lock(runtimeLock); |
31875a97 | 4087 | return _method_setImplementation(Nil, m, imp); |
b3962a83 A |
4088 | } |
4089 | ||
34d5b5e8 A |
4090 | extern void _method_setImplementationRawUnsafe(Method m, IMP imp) |
4091 | { | |
4092 | mutex_locker_t lock(runtimeLock); | |
4093 | m->setImp(imp); | |
4094 | } | |
4095 | ||
b3962a83 | 4096 | |
7257e56c | 4097 | void method_exchangeImplementations(Method m1, Method m2) |
7af964d1 | 4098 | { |
7af964d1 A |
4099 | if (!m1 || !m2) return; |
4100 | ||
66799735 | 4101 | mutex_locker_t lock(runtimeLock); |
7af964d1 | 4102 | |
34d5b5e8 A |
4103 | IMP imp1 = m1->imp(false); |
4104 | IMP imp2 = m2->imp(false); | |
4105 | SEL sel1 = m1->name(); | |
4106 | SEL sel2 = m2->name(); | |
4107 | ||
4108 | m1->setImp(imp2); | |
4109 | m2->setImp(imp1); | |
7af964d1 | 4110 | |
7257e56c A |
4111 | |
4112 | // RR/AWZ updates are slow because class is unknown | |
31875a97 | 4113 | // Cache updates are slow because class is unknown |
cd5f04f5 A |
4114 | // fixme build list of classes whose Methods are known externally? |
4115 | ||
34d5b5e8 A |
4116 | flushCaches(nil, __func__, [sel1, sel2, imp1, imp2](Class c){ |
4117 | return c->cache.shouldFlush(sel1, imp1) || c->cache.shouldFlush(sel2, imp2); | |
4118 | }); | |
7af964d1 | 4119 | |
1807f628 A |
4120 | adjustCustomFlagsForMethodChange(nil, m1); |
4121 | adjustCustomFlagsForMethodChange(nil, m2); | |
7af964d1 A |
4122 | } |
4123 | ||
b3962a83 A |
4124 | |
4125 | /*********************************************************************** | |
4126 | * ivar_getOffset | |
4127 | * fixme | |
4128 | * Locking: none | |
4129 | **********************************************************************/ | |
4130 | ptrdiff_t | |
4131 | ivar_getOffset(Ivar ivar) | |
4132 | { | |
4133 | if (!ivar) return 0; | |
7257e56c | 4134 | return *ivar->offset; |
b3962a83 A |
4135 | } |
4136 | ||
4137 | ||
4138 | /*********************************************************************** | |
4139 | * ivar_getName | |
4140 | * fixme | |
4141 | * Locking: none | |
4142 | **********************************************************************/ | |
4143 | const char * | |
4144 | ivar_getName(Ivar ivar) | |
4145 | { | |
7257e56c A |
4146 | if (!ivar) return nil; |
4147 | return ivar->name; | |
b3962a83 A |
4148 | } |
4149 | ||
4150 | ||
4151 | /*********************************************************************** | |
4152 | * ivar_getTypeEncoding | |
4153 | * fixme | |
4154 | * Locking: none | |
4155 | **********************************************************************/ | |
4156 | const char * | |
4157 | ivar_getTypeEncoding(Ivar ivar) | |
4158 | { | |
7257e56c A |
4159 | if (!ivar) return nil; |
4160 | return ivar->type; | |
b3962a83 A |
4161 | } |
4162 | ||
4163 | ||
8972963c A |
4164 | |
4165 | const char *property_getName(objc_property_t prop) | |
4166 | { | |
7257e56c | 4167 | return prop->name; |
8972963c A |
4168 | } |
4169 | ||
4170 | const char *property_getAttributes(objc_property_t prop) | |
4171 | { | |
7257e56c | 4172 | return prop->attributes; |
8972963c A |
4173 | } |
4174 | ||
4175 | objc_property_attribute_t *property_copyAttributeList(objc_property_t prop, | |
4176 | unsigned int *outCount) | |
4177 | { | |
4178 | if (!prop) { | |
4179 | if (outCount) *outCount = 0; | |
7257e56c | 4180 | return nil; |
8972963c A |
4181 | } |
4182 | ||
66799735 | 4183 | mutex_locker_t lock(runtimeLock); |
31875a97 | 4184 | return copyPropertyAttributeList(prop->attributes,outCount); |
8972963c A |
4185 | } |
4186 | ||
4187 | char * property_copyAttributeValue(objc_property_t prop, const char *name) | |
4188 | { | |
7257e56c | 4189 | if (!prop || !name || *name == '\0') return nil; |
8972963c | 4190 | |
66799735 | 4191 | mutex_locker_t lock(runtimeLock); |
31875a97 | 4192 | return copyPropertyAttributeValue(prop->attributes, name); |
8972963c A |
4193 | } |
4194 | ||
4195 | ||
cd5f04f5 A |
4196 | /*********************************************************************** |
4197 | * getExtendedTypesIndexesForMethod | |
4198 | * Returns: | |
4199 | * a is the count of methods in all method lists before m's method list | |
4200 | * b is the index of m in m's method list | |
4201 | * a+b is the index of m's extended types in the extended types array | |
4202 | **********************************************************************/ | |
7257e56c | 4203 | static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b) |
cd5f04f5 A |
4204 | { |
4205 | a = 0; | |
4206 | ||
31875a97 A |
4207 | if (proto->instanceMethods) { |
4208 | if (isRequiredMethod && isInstanceMethod) { | |
4209 | b = proto->instanceMethods->indexOfMethod(m); | |
4210 | return; | |
4211 | } | |
4212 | a += proto->instanceMethods->count; | |
cd5f04f5 | 4213 | } |
cd5f04f5 | 4214 | |
31875a97 A |
4215 | if (proto->classMethods) { |
4216 | if (isRequiredMethod && !isInstanceMethod) { | |
4217 | b = proto->classMethods->indexOfMethod(m); | |
4218 | return; | |
4219 | } | |
4220 | a += proto->classMethods->count; | |
cd5f04f5 | 4221 | } |
cd5f04f5 | 4222 | |
31875a97 A |
4223 | if (proto->optionalInstanceMethods) { |
4224 | if (!isRequiredMethod && isInstanceMethod) { | |
4225 | b = proto->optionalInstanceMethods->indexOfMethod(m); | |
4226 | return; | |
4227 | } | |
4228 | a += proto->optionalInstanceMethods->count; | |
cd5f04f5 | 4229 | } |
cd5f04f5 | 4230 | |
31875a97 A |
4231 | if (proto->optionalClassMethods) { |
4232 | if (!isRequiredMethod && !isInstanceMethod) { | |
4233 | b = proto->optionalClassMethods->indexOfMethod(m); | |
4234 | return; | |
4235 | } | |
4236 | a += proto->optionalClassMethods->count; | |
cd5f04f5 | 4237 | } |
7257e56c A |
4238 | } |
4239 | ||
4240 | ||
4241 | /*********************************************************************** | |
4242 | * getExtendedTypesIndexForMethod | |
4243 | * Returns the index of m's extended types in proto's extended types array. | |
4244 | **********************************************************************/ | |
4245 | static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod) | |
4246 | { | |
4247 | uint32_t a; | |
4248 | uint32_t b; | |
4249 | getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod, | |
4250 | isInstanceMethod, a, b); | |
4251 | return a + b; | |
4252 | } | |
4253 | ||
4254 | ||
4255 | /*********************************************************************** | |
4256 | * fixupProtocolMethodList | |
4257 | * Fixes up a single method list in a protocol. | |
4258 | **********************************************************************/ | |
4259 | static void | |
31875a97 | 4260 | fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist, |
7257e56c A |
4261 | bool required, bool instance) |
4262 | { | |
66799735 | 4263 | runtimeLock.assertLocked(); |
7257e56c | 4264 | |
31875a97 A |
4265 | if (!mlist) return; |
4266 | if (mlist->isFixedUp()) return; | |
7257e56c | 4267 | |
c1e772c4 | 4268 | const char **extTypes = proto->extendedMethodTypes(); |
31875a97 | 4269 | fixupMethodList(mlist, true/*always copy for simplicity*/, |
c1e772c4 | 4270 | !extTypes/*sort if no extended method types*/); |
7257e56c | 4271 | |
bc4fafce | 4272 | if (extTypes && !mlist->isSmallList()) { |
7257e56c A |
4273 | // Sort method list and extended method types together. |
4274 | // fixupMethodList() can't do this. | |
4275 | // fixme COW stomp | |
31875a97 | 4276 | uint32_t count = mlist->count; |
7257e56c A |
4277 | uint32_t prefix; |
4278 | uint32_t junk; | |
31875a97 | 4279 | getExtendedTypesIndexesForMethod(proto, &mlist->get(0), |
7257e56c | 4280 | required, instance, prefix, junk); |
7257e56c A |
4281 | for (uint32_t i = 0; i < count; i++) { |
4282 | for (uint32_t j = i+1; j < count; j++) { | |
bc4fafce A |
4283 | auto& mi = mlist->get(i).big(); |
4284 | auto& mj = mlist->get(j).big(); | |
31875a97 A |
4285 | if (mi.name > mj.name) { |
4286 | std::swap(mi, mj); | |
c1e772c4 | 4287 | std::swap(extTypes[prefix+i], extTypes[prefix+j]); |
7257e56c A |
4288 | } |
4289 | } | |
4290 | } | |
4291 | } | |
4292 | } | |
4293 | ||
4294 | ||
4295 | /*********************************************************************** | |
4296 | * fixupProtocol | |
4297 | * Fixes up all of a protocol's method lists. | |
4298 | **********************************************************************/ | |
4299 | static void | |
4300 | fixupProtocol(protocol_t *proto) | |
4301 | { | |
66799735 | 4302 | runtimeLock.assertLocked(); |
7257e56c A |
4303 | |
4304 | if (proto->protocols) { | |
4305 | for (uintptr_t i = 0; i < proto->protocols->count; i++) { | |
4306 | protocol_t *sub = remapProtocol(proto->protocols->list[i]); | |
4307 | if (!sub->isFixedUp()) fixupProtocol(sub); | |
4308 | } | |
4309 | } | |
cd5f04f5 | 4310 | |
31875a97 A |
4311 | fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES); |
4312 | fixupProtocolMethodList(proto, proto->classMethods, YES, NO); | |
4313 | fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES); | |
4314 | fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO); | |
cd5f04f5 | 4315 | |
7257e56c | 4316 | // fixme memory barrier so we can check this with no lock |
31875a97 | 4317 | proto->setFixedUp(); |
cd5f04f5 A |
4318 | } |
4319 | ||
4320 | ||
b3962a83 | 4321 | /*********************************************************************** |
7257e56c A |
4322 | * fixupProtocolIfNeeded |
4323 | * Fixes up all of a protocol's method lists if they aren't fixed up already. | |
4324 | * Locking: write-locks runtimeLock. | |
b3962a83 | 4325 | **********************************************************************/ |
7257e56c A |
4326 | static void |
4327 | fixupProtocolIfNeeded(protocol_t *proto) | |
b3962a83 | 4328 | { |
31875a97 | 4329 | runtimeLock.assertUnlocked(); |
1807f628 | 4330 | ASSERT(proto); |
b3962a83 | 4331 | |
7257e56c | 4332 | if (!proto->isFixedUp()) { |
66799735 | 4333 | mutex_locker_t lock(runtimeLock); |
7257e56c | 4334 | fixupProtocol(proto); |
7257e56c A |
4335 | } |
4336 | } | |
b3962a83 | 4337 | |
b3962a83 | 4338 | |
7257e56c A |
4339 | static method_list_t * |
4340 | getProtocolMethodList(protocol_t *proto, bool required, bool instance) | |
4341 | { | |
4342 | method_list_t **mlistp = nil; | |
4343 | if (required) { | |
4344 | if (instance) { | |
7af964d1 | 4345 | mlistp = &proto->instanceMethods; |
b3962a83 | 4346 | } else { |
7af964d1 | 4347 | mlistp = &proto->classMethods; |
b3962a83 A |
4348 | } |
4349 | } else { | |
7257e56c | 4350 | if (instance) { |
7af964d1 | 4351 | mlistp = &proto->optionalInstanceMethods; |
b3962a83 | 4352 | } else { |
7af964d1 | 4353 | mlistp = &proto->optionalClassMethods; |
b3962a83 A |
4354 | } |
4355 | } | |
4356 | ||
7257e56c A |
4357 | return *mlistp; |
4358 | } | |
4359 | ||
4360 | ||
4361 | /*********************************************************************** | |
4362 | * protocol_getMethod_nolock | |
4363 | * Locking: runtimeLock must be held by the caller | |
4364 | **********************************************************************/ | |
4365 | static method_t * | |
4366 | protocol_getMethod_nolock(protocol_t *proto, SEL sel, | |
4367 | bool isRequiredMethod, bool isInstanceMethod, | |
4368 | bool recursive) | |
4369 | { | |
31875a97 | 4370 | runtimeLock.assertLocked(); |
7257e56c A |
4371 | |
4372 | if (!proto || !sel) return nil; | |
cd5f04f5 | 4373 | |
1807f628 | 4374 | ASSERT(proto->isFixedUp()); |
7257e56c A |
4375 | |
4376 | method_list_t *mlist = | |
4377 | getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod); | |
4378 | if (mlist) { | |
cd5f04f5 A |
4379 | method_t *m = search_method_list(mlist, sel); |
4380 | if (m) return m; | |
b3962a83 A |
4381 | } |
4382 | ||
cd5f04f5 A |
4383 | if (recursive && proto->protocols) { |
4384 | method_t *m; | |
4385 | for (uint32_t i = 0; i < proto->protocols->count; i++) { | |
b3962a83 | 4386 | protocol_t *realProto = remapProtocol(proto->protocols->list[i]); |
7257e56c A |
4387 | m = protocol_getMethod_nolock(realProto, sel, |
4388 | isRequiredMethod, isInstanceMethod, | |
4389 | true); | |
b3962a83 A |
4390 | if (m) return m; |
4391 | } | |
4392 | } | |
4393 | ||
7257e56c | 4394 | return nil; |
b3962a83 A |
4395 | } |
4396 | ||
4397 | ||
4398 | /*********************************************************************** | |
7257e56c | 4399 | * protocol_getMethod |
b3962a83 | 4400 | * fixme |
7257e56c | 4401 | * Locking: acquires runtimeLock |
b3962a83 | 4402 | **********************************************************************/ |
cd5f04f5 | 4403 | Method |
7257e56c | 4404 | protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive) |
b3962a83 | 4405 | { |
7257e56c A |
4406 | if (!proto) return nil; |
4407 | fixupProtocolIfNeeded(proto); | |
4408 | ||
66799735 | 4409 | mutex_locker_t lock(runtimeLock); |
31875a97 A |
4410 | return protocol_getMethod_nolock(proto, sel, isRequiredMethod, |
4411 | isInstanceMethod, recursive); | |
cd5f04f5 A |
4412 | } |
4413 | ||
4414 | ||
4415 | /*********************************************************************** | |
7257e56c | 4416 | * protocol_getMethodTypeEncoding_nolock |
cd5f04f5 | 4417 | * Return the @encode string for the requested protocol method. |
7257e56c | 4418 | * Returns nil if the compiler did not emit any extended @encode data. |
66799735 | 4419 | * Locking: runtimeLock must be held by the caller |
cd5f04f5 A |
4420 | **********************************************************************/ |
4421 | const char * | |
7257e56c A |
4422 | protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel, |
4423 | bool isRequiredMethod, | |
4424 | bool isInstanceMethod) | |
cd5f04f5 | 4425 | { |
31875a97 | 4426 | runtimeLock.assertLocked(); |
7257e56c A |
4427 | |
4428 | if (!proto) return nil; | |
c1e772c4 | 4429 | if (!proto->extendedMethodTypes()) return nil; |
cd5f04f5 | 4430 | |
1807f628 | 4431 | ASSERT(proto->isFixedUp()); |
cd5f04f5 A |
4432 | |
4433 | method_t *m = | |
7257e56c A |
4434 | protocol_getMethod_nolock(proto, sel, |
4435 | isRequiredMethod, isInstanceMethod, false); | |
cd5f04f5 A |
4436 | if (m) { |
4437 | uint32_t i = getExtendedTypesIndexForMethod(proto, m, | |
4438 | isRequiredMethod, | |
4439 | isInstanceMethod); | |
c1e772c4 | 4440 | return proto->extendedMethodTypes()[i]; |
cd5f04f5 A |
4441 | } |
4442 | ||
4443 | // No method with that name. Search incorporated protocols. | |
4444 | if (proto->protocols) { | |
4445 | for (uintptr_t i = 0; i < proto->protocols->count; i++) { | |
4446 | const char *enc = | |
7257e56c | 4447 | protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod); |
cd5f04f5 A |
4448 | if (enc) return enc; |
4449 | } | |
4450 | } | |
4451 | ||
7257e56c | 4452 | return nil; |
b3962a83 A |
4453 | } |
4454 | ||
cd5f04f5 A |
4455 | /*********************************************************************** |
4456 | * _protocol_getMethodTypeEncoding | |
4457 | * Return the @encode string for the requested protocol method. | |
7257e56c A |
4458 | * Returns nil if the compiler did not emit any extended @encode data. |
4459 | * Locking: acquires runtimeLock | |
cd5f04f5 A |
4460 | **********************************************************************/ |
4461 | const char * | |
4462 | _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel, | |
4463 | BOOL isRequiredMethod, BOOL isInstanceMethod) | |
4464 | { | |
7257e56c A |
4465 | protocol_t *proto = newprotocol(proto_gen); |
4466 | ||
4467 | if (!proto) return nil; | |
4468 | fixupProtocolIfNeeded(proto); | |
4469 | ||
66799735 | 4470 | mutex_locker_t lock(runtimeLock); |
31875a97 A |
4471 | return protocol_getMethodTypeEncoding_nolock(proto, sel, |
4472 | isRequiredMethod, | |
4473 | isInstanceMethod); | |
cd5f04f5 | 4474 | } |
b3962a83 | 4475 | |
8070259c A |
4476 | |
4477 | /*********************************************************************** | |
4478 | * protocol_t::demangledName | |
4479 | * Returns the (Swift-demangled) name of the given protocol. | |
4480 | * Locking: none | |
4481 | **********************************************************************/ | |
4482 | const char * | |
4483 | protocol_t::demangledName() | |
4484 | { | |
34d5b5e8 A |
4485 | if (!hasDemangledNameField()) |
4486 | return mangledName; | |
8070259c A |
4487 | |
4488 | if (! _demangledName) { | |
4489 | char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/); | |
4490 | if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName), | |
4491 | (void**)&_demangledName)) | |
4492 | { | |
4493 | if (de) free(de); | |
4494 | } | |
4495 | } | |
4496 | return _demangledName; | |
4497 | } | |
4498 | ||
b3962a83 A |
4499 | /*********************************************************************** |
4500 | * protocol_getName | |
8070259c | 4501 | * Returns the (Swift-demangled) name of the given protocol. |
b3962a83 A |
4502 | * Locking: runtimeLock must not be held by the caller |
4503 | **********************************************************************/ | |
4504 | const char * | |
4505 | protocol_getName(Protocol *proto) | |
4506 | { | |
8070259c A |
4507 | if (!proto) return "nil"; |
4508 | else return newprotocol(proto)->demangledName(); | |
b3962a83 A |
4509 | } |
4510 | ||
4511 | ||
4512 | /*********************************************************************** | |
4513 | * protocol_getInstanceMethodDescription | |
4514 | * Returns the description of a named instance method. | |
4515 | * Locking: runtimeLock must not be held by the caller | |
4516 | **********************************************************************/ | |
4517 | struct objc_method_description | |
4518 | protocol_getMethodDescription(Protocol *p, SEL aSel, | |
4519 | BOOL isRequiredMethod, BOOL isInstanceMethod) | |
4520 | { | |
4521 | Method m = | |
7257e56c A |
4522 | protocol_getMethod(newprotocol(p), aSel, |
4523 | isRequiredMethod, isInstanceMethod, true); | |
bc4fafce A |
4524 | // method_getDescription is inefficient for small methods. Don't bother |
4525 | // trying to use it, just make our own. | |
4526 | if (m) return (struct objc_method_description){m->name(), (char *)m->types()}; | |
7257e56c | 4527 | else return (struct objc_method_description){nil, nil}; |
b3962a83 A |
4528 | } |
4529 | ||
4530 | ||
4531 | /*********************************************************************** | |
7257e56c | 4532 | * protocol_conformsToProtocol_nolock |
b3962a83 | 4533 | * Returns YES if self conforms to other. |
7af964d1 | 4534 | * Locking: runtimeLock must be held by the caller. |
b3962a83 | 4535 | **********************************************************************/ |
7257e56c A |
4536 | static bool |
4537 | protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other) | |
b3962a83 | 4538 | { |
31875a97 | 4539 | runtimeLock.assertLocked(); |
7257e56c | 4540 | |
b3962a83 A |
4541 | if (!self || !other) { |
4542 | return NO; | |
4543 | } | |
4544 | ||
7257e56c A |
4545 | // protocols need not be fixed up |
4546 | ||
8070259c | 4547 | if (0 == strcmp(self->mangledName, other->mangledName)) { |
b3962a83 A |
4548 | return YES; |
4549 | } | |
4550 | ||
4551 | if (self->protocols) { | |
8972963c | 4552 | uintptr_t i; |
b3962a83 | 4553 | for (i = 0; i < self->protocols->count; i++) { |
7af964d1 | 4554 | protocol_t *proto = remapProtocol(self->protocols->list[i]); |
1807f628 A |
4555 | if (other == proto) { |
4556 | return YES; | |
4557 | } | |
8070259c | 4558 | if (0 == strcmp(other->mangledName, proto->mangledName)) { |
b3962a83 A |
4559 | return YES; |
4560 | } | |
7257e56c | 4561 | if (protocol_conformsToProtocol_nolock(proto, other)) { |
b3962a83 A |
4562 | return YES; |
4563 | } | |
4564 | } | |
4565 | } | |
4566 | ||
4567 | return NO; | |
4568 | } | |
4569 | ||
4570 | ||
7af964d1 A |
4571 | /*********************************************************************** |
4572 | * protocol_conformsToProtocol | |
4573 | * Returns YES if self conforms to other. | |
4574 | * Locking: acquires runtimeLock | |
4575 | **********************************************************************/ | |
4576 | BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other) | |
4577 | { | |
66799735 | 4578 | mutex_locker_t lock(runtimeLock); |
31875a97 A |
4579 | return protocol_conformsToProtocol_nolock(newprotocol(self), |
4580 | newprotocol(other)); | |
7af964d1 A |
4581 | } |
4582 | ||
4583 | ||
b3962a83 A |
4584 | /*********************************************************************** |
4585 | * protocol_isEqual | |
4586 | * Return YES if two protocols are equal (i.e. conform to each other) | |
4587 | * Locking: acquires runtimeLock | |
4588 | **********************************************************************/ | |
4589 | BOOL protocol_isEqual(Protocol *self, Protocol *other) | |
4590 | { | |
4591 | if (self == other) return YES; | |
4592 | if (!self || !other) return NO; | |
4593 | ||
4594 | if (!protocol_conformsToProtocol(self, other)) return NO; | |
4595 | if (!protocol_conformsToProtocol(other, self)) return NO; | |
4596 | ||
4597 | return YES; | |
4598 | } | |
4599 | ||
4600 | ||
4601 | /*********************************************************************** | |
4602 | * protocol_copyMethodDescriptionList | |
4603 | * Returns descriptions of a protocol's methods. | |
4604 | * Locking: acquires runtimeLock | |
4605 | **********************************************************************/ | |
4606 | struct objc_method_description * | |
4607 | protocol_copyMethodDescriptionList(Protocol *p, | |
4608 | BOOL isRequiredMethod,BOOL isInstanceMethod, | |
4609 | unsigned int *outCount) | |
4610 | { | |
8972963c | 4611 | protocol_t *proto = newprotocol(p); |
7257e56c | 4612 | struct objc_method_description *result = nil; |
b3962a83 A |
4613 | unsigned int count = 0; |
4614 | ||
4615 | if (!proto) { | |
4616 | if (outCount) *outCount = 0; | |
7257e56c | 4617 | return nil; |
b3962a83 A |
4618 | } |
4619 | ||
7257e56c | 4620 | fixupProtocolIfNeeded(proto); |
b3962a83 | 4621 | |
66799735 | 4622 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 4623 | |
7257e56c A |
4624 | method_list_t *mlist = |
4625 | getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod); | |
b3962a83 A |
4626 | |
4627 | if (mlist) { | |
8972963c | 4628 | result = (struct objc_method_description *) |
31875a97 A |
4629 | calloc(mlist->count + 1, sizeof(struct objc_method_description)); |
4630 | for (const auto& meth : *mlist) { | |
bc4fafce A |
4631 | result[count].name = meth.name(); |
4632 | result[count].types = (char *)meth.types(); | |
31875a97 | 4633 | count++; |
b3962a83 A |
4634 | } |
4635 | } | |
4636 | ||
b3962a83 A |
4637 | if (outCount) *outCount = count; |
4638 | return result; | |
4639 | } | |
4640 | ||
4641 | ||
4642 | /*********************************************************************** | |
4643 | * protocol_getProperty | |
4644 | * fixme | |
7257e56c | 4645 | * Locking: runtimeLock must be held by the caller |
b3962a83 | 4646 | **********************************************************************/ |
8972963c | 4647 | static property_t * |
7257e56c A |
4648 | protocol_getProperty_nolock(protocol_t *proto, const char *name, |
4649 | bool isRequiredProperty, bool isInstanceProperty) | |
b3962a83 | 4650 | { |
31875a97 | 4651 | runtimeLock.assertLocked(); |
7257e56c | 4652 | |
c1e772c4 A |
4653 | if (!isRequiredProperty) { |
4654 | // Only required properties are currently supported. | |
7257e56c | 4655 | return nil; |
b3962a83 A |
4656 | } |
4657 | ||
c1e772c4 A |
4658 | property_list_t *plist = isInstanceProperty ? |
4659 | proto->instanceProperties : proto->classProperties(); | |
4660 | if (plist) { | |
31875a97 A |
4661 | for (auto& prop : *plist) { |
4662 | if (0 == strcmp(name, prop.name)) { | |
4663 | return ∝ | |
b3962a83 A |
4664 | } |
4665 | } | |
4666 | } | |
4667 | ||
4668 | if (proto->protocols) { | |
4669 | uintptr_t i; | |
4670 | for (i = 0; i < proto->protocols->count; i++) { | |
7af964d1 | 4671 | protocol_t *p = remapProtocol(proto->protocols->list[i]); |
8972963c | 4672 | property_t *prop = |
7257e56c A |
4673 | protocol_getProperty_nolock(p, name, |
4674 | isRequiredProperty, | |
4675 | isInstanceProperty); | |
b3962a83 A |
4676 | if (prop) return prop; |
4677 | } | |
4678 | } | |
4679 | ||
7257e56c | 4680 | return nil; |
b3962a83 A |
4681 | } |
4682 | ||
8972963c | 4683 | objc_property_t protocol_getProperty(Protocol *p, const char *name, |
b3962a83 A |
4684 | BOOL isRequiredProperty, BOOL isInstanceProperty) |
4685 | { | |
7257e56c | 4686 | if (!p || !name) return nil; |
b3962a83 | 4687 | |
66799735 | 4688 | mutex_locker_t lock(runtimeLock); |
31875a97 A |
4689 | return (objc_property_t) |
4690 | protocol_getProperty_nolock(newprotocol(p), name, | |
4691 | isRequiredProperty, isInstanceProperty); | |
b3962a83 A |
4692 | } |
4693 | ||
4694 | ||
4695 | /*********************************************************************** | |
4696 | * protocol_copyPropertyList | |
c1e772c4 | 4697 | * protocol_copyPropertyList2 |
b3962a83 A |
4698 | * fixme |
4699 | * Locking: acquires runtimeLock | |
4700 | **********************************************************************/ | |
8972963c A |
4701 | static property_t ** |
4702 | copyPropertyList(property_list_t *plist, unsigned int *outCount) | |
4703 | { | |
7257e56c | 4704 | property_t **result = nil; |
8972963c A |
4705 | unsigned int count = 0; |
4706 | ||
4707 | if (plist) { | |
4708 | count = plist->count; | |
4709 | } | |
4710 | ||
4711 | if (count > 0) { | |
8972963c | 4712 | result = (property_t **)malloc((count+1) * sizeof(property_t *)); |
31875a97 A |
4713 | |
4714 | count = 0; | |
4715 | for (auto& prop : *plist) { | |
4716 | result[count++] = ∝ | |
8972963c | 4717 | } |
31875a97 | 4718 | result[count] = nil; |
8972963c A |
4719 | } |
4720 | ||
4721 | if (outCount) *outCount = count; | |
4722 | return result; | |
4723 | } | |
4724 | ||
c1e772c4 A |
4725 | objc_property_t * |
4726 | protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount, | |
4727 | BOOL isRequiredProperty, BOOL isInstanceProperty) | |
b3962a83 | 4728 | { |
c1e772c4 A |
4729 | if (!proto || !isRequiredProperty) { |
4730 | // Optional properties are not currently supported. | |
b3962a83 | 4731 | if (outCount) *outCount = 0; |
7257e56c | 4732 | return nil; |
b3962a83 A |
4733 | } |
4734 | ||
66799735 | 4735 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 4736 | |
c1e772c4 A |
4737 | property_list_t *plist = isInstanceProperty |
4738 | ? newprotocol(proto)->instanceProperties | |
4739 | : newprotocol(proto)->classProperties(); | |
31875a97 | 4740 | return (objc_property_t *)copyPropertyList(plist, outCount); |
b3962a83 A |
4741 | } |
4742 | ||
c1e772c4 A |
4743 | objc_property_t * |
4744 | protocol_copyPropertyList(Protocol *proto, unsigned int *outCount) | |
4745 | { | |
4746 | return protocol_copyPropertyList2(proto, outCount, | |
4747 | YES/*required*/, YES/*instance*/); | |
4748 | } | |
4749 | ||
b3962a83 A |
4750 | |
4751 | /*********************************************************************** | |
4752 | * protocol_copyProtocolList | |
4753 | * Copies this protocol's incorporated protocols. | |
4754 | * Does not copy those protocol's incorporated protocols in turn. | |
4755 | * Locking: acquires runtimeLock | |
4756 | **********************************************************************/ | |
8972963c A |
4757 | Protocol * __unsafe_unretained * |
4758 | protocol_copyProtocolList(Protocol *p, unsigned int *outCount) | |
b3962a83 A |
4759 | { |
4760 | unsigned int count = 0; | |
7257e56c | 4761 | Protocol **result = nil; |
b3962a83 A |
4762 | protocol_t *proto = newprotocol(p); |
4763 | ||
4764 | if (!proto) { | |
4765 | if (outCount) *outCount = 0; | |
7257e56c | 4766 | return nil; |
b3962a83 A |
4767 | } |
4768 | ||
66799735 | 4769 | mutex_locker_t lock(runtimeLock); |
b3962a83 A |
4770 | |
4771 | if (proto->protocols) { | |
4772 | count = (unsigned int)proto->protocols->count; | |
4773 | } | |
4774 | if (count > 0) { | |
8972963c | 4775 | result = (Protocol **)malloc((count+1) * sizeof(Protocol *)); |
b3962a83 A |
4776 | |
4777 | unsigned int i; | |
4778 | for (i = 0; i < count; i++) { | |
4779 | result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]); | |
4780 | } | |
7257e56c | 4781 | result[i] = nil; |
b3962a83 A |
4782 | } |
4783 | ||
b3962a83 A |
4784 | if (outCount) *outCount = count; |
4785 | return result; | |
4786 | } | |
4787 | ||
4788 | ||
8972963c A |
4789 | /*********************************************************************** |
4790 | * objc_allocateProtocol | |
4791 | * Creates a new protocol. The protocol may not be used until | |
4792 | * objc_registerProtocol() is called. | |
7257e56c | 4793 | * Returns nil if a protocol with the same name already exists. |
8972963c A |
4794 | * Locking: acquires runtimeLock |
4795 | **********************************************************************/ | |
4796 | Protocol * | |
4797 | objc_allocateProtocol(const char *name) | |
4798 | { | |
66799735 | 4799 | mutex_locker_t lock(runtimeLock); |
8972963c | 4800 | |
8070259c | 4801 | if (getProtocol(name)) { |
7257e56c | 4802 | return nil; |
8972963c A |
4803 | } |
4804 | ||
31875a97 | 4805 | protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1); |
8972963c | 4806 | |
7257e56c | 4807 | extern objc_class OBJC_CLASS_$___IncompleteProtocol; |
8972963c | 4808 | Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol; |
8070259c A |
4809 | result->initProtocolIsa(cls); |
4810 | result->size = sizeof(protocol_t); | |
4811 | // fixme mangle the name if it looks swift-y? | |
c1e772c4 | 4812 | result->mangledName = strdupIfMutable(name); |
8972963c A |
4813 | |
4814 | // fixme reserve name without installing | |
4815 | ||
8972963c A |
4816 | return (Protocol *)result; |
4817 | } | |
4818 | ||
4819 | ||
4820 | /*********************************************************************** | |
4821 | * objc_registerProtocol | |
4822 | * Registers a newly-constructed protocol. The protocol is now | |
4823 | * ready for use and immutable. | |
4824 | * Locking: acquires runtimeLock | |
4825 | **********************************************************************/ | |
4826 | void objc_registerProtocol(Protocol *proto_gen) | |
4827 | { | |
4828 | protocol_t *proto = newprotocol(proto_gen); | |
4829 | ||
66799735 | 4830 | mutex_locker_t lock(runtimeLock); |
8972963c | 4831 | |
7257e56c | 4832 | extern objc_class OBJC_CLASS_$___IncompleteProtocol; |
8972963c | 4833 | Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol; |
7257e56c | 4834 | extern objc_class OBJC_CLASS_$_Protocol; |
8972963c A |
4835 | Class cls = (Class)&OBJC_CLASS_$_Protocol; |
4836 | ||
7257e56c | 4837 | if (proto->ISA() == cls) { |
8972963c | 4838 | _objc_inform("objc_registerProtocol: protocol '%s' was already " |
8070259c | 4839 | "registered!", proto->nameForLogging()); |
8972963c A |
4840 | return; |
4841 | } | |
7257e56c | 4842 | if (proto->ISA() != oldcls) { |
8972963c | 4843 | _objc_inform("objc_registerProtocol: protocol '%s' was not allocated " |
8070259c | 4844 | "with objc_allocateProtocol!", proto->nameForLogging()); |
8972963c A |
4845 | return; |
4846 | } | |
4847 | ||
31875a97 A |
4848 | // NOT initProtocolIsa(). The protocol object may already |
4849 | // have been retained and we must preserve that count. | |
4850 | proto->changeIsa(cls); | |
8972963c | 4851 | |
1807f628 A |
4852 | // Don't add this protocol if we already have it. |
4853 | // Should we warn on duplicates? | |
4854 | if (getProtocol(proto->mangledName) == nil) { | |
4855 | NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto); | |
4856 | } | |
8972963c A |
4857 | } |
4858 | ||
4859 | ||
4860 | /*********************************************************************** | |
4861 | * protocol_addProtocol | |
4862 | * Adds an incorporated protocol to another protocol. | |
4863 | * No method enforcement is performed. | |
4864 | * `proto` must be under construction. `addition` must not. | |
4865 | * Locking: acquires runtimeLock | |
4866 | **********************************************************************/ | |
4867 | void | |
4868 | protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen) | |
4869 | { | |
4870 | protocol_t *proto = newprotocol(proto_gen); | |
4871 | protocol_t *addition = newprotocol(addition_gen); | |
4872 | ||
7257e56c | 4873 | extern objc_class OBJC_CLASS_$___IncompleteProtocol; |
8972963c A |
4874 | Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol; |
4875 | ||
4876 | if (!proto_gen) return; | |
4877 | if (!addition_gen) return; | |
4878 | ||
66799735 | 4879 | mutex_locker_t lock(runtimeLock); |
8972963c | 4880 | |
7257e56c | 4881 | if (proto->ISA() != cls) { |
8972963c | 4882 | _objc_inform("protocol_addProtocol: modified protocol '%s' is not " |
8070259c | 4883 | "under construction!", proto->nameForLogging()); |
8972963c A |
4884 | return; |
4885 | } | |
7257e56c | 4886 | if (addition->ISA() == cls) { |
8972963c | 4887 | _objc_inform("protocol_addProtocol: added protocol '%s' is still " |
8070259c | 4888 | "under construction!", addition->nameForLogging()); |
8972963c A |
4889 | return; |
4890 | } | |
4891 | ||
4892 | protocol_list_t *protolist = proto->protocols; | |
4893 | if (!protolist) { | |
4894 | protolist = (protocol_list_t *) | |
31875a97 | 4895 | calloc(1, sizeof(protocol_list_t) |
8972963c A |
4896 | + sizeof(protolist->list[0])); |
4897 | } else { | |
4898 | protolist = (protocol_list_t *) | |
31875a97 | 4899 | realloc(protolist, protocol_list_size(protolist) |
8972963c A |
4900 | + sizeof(protolist->list[0])); |
4901 | } | |
4902 | ||
4903 | protolist->list[protolist->count++] = (protocol_ref_t)addition; | |
4904 | proto->protocols = protolist; | |
8972963c A |
4905 | } |
4906 | ||
4907 | ||
4908 | /*********************************************************************** | |
4909 | * protocol_addMethodDescription | |
4910 | * Adds a method to a protocol. The protocol must be under construction. | |
4911 | * Locking: acquires runtimeLock | |
4912 | **********************************************************************/ | |
4913 | static void | |
31875a97 | 4914 | protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types) |
8972963c | 4915 | { |
31875a97 | 4916 | if (!list) { |
bc4fafce A |
4917 | list = (method_list_t *)calloc(method_list_t::byteSize(sizeof(struct method_t::big), 1), 1); |
4918 | list->entsizeAndFlags = sizeof(struct method_t::big); | |
31875a97 | 4919 | list->setFixedUp(); |
8972963c | 4920 | } else { |
31875a97 A |
4921 | size_t size = list->byteSize() + list->entsize(); |
4922 | list = (method_list_t *)realloc(list, size); | |
8972963c A |
4923 | } |
4924 | ||
bc4fafce | 4925 | auto &meth = list->get(list->count++).big(); |
31875a97 | 4926 | meth.name = name; |
c1e772c4 | 4927 | meth.types = types ? strdupIfMutable(types) : ""; |
31875a97 | 4928 | meth.imp = nil; |
8972963c A |
4929 | } |
4930 | ||
4931 | void | |
4932 | protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types, | |
4933 | BOOL isRequiredMethod, BOOL isInstanceMethod) | |
4934 | { | |
4935 | protocol_t *proto = newprotocol(proto_gen); | |
4936 | ||
7257e56c | 4937 | extern objc_class OBJC_CLASS_$___IncompleteProtocol; |
8972963c A |
4938 | Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol; |
4939 | ||
4940 | if (!proto_gen) return; | |
4941 | ||
66799735 | 4942 | mutex_locker_t lock(runtimeLock); |
8972963c | 4943 | |
7257e56c | 4944 | if (proto->ISA() != cls) { |
8972963c | 4945 | _objc_inform("protocol_addMethodDescription: protocol '%s' is not " |
8070259c | 4946 | "under construction!", proto->nameForLogging()); |
8972963c A |
4947 | return; |
4948 | } | |
4949 | ||
4950 | if (isRequiredMethod && isInstanceMethod) { | |
31875a97 | 4951 | protocol_addMethod_nolock(proto->instanceMethods, name, types); |
8972963c | 4952 | } else if (isRequiredMethod && !isInstanceMethod) { |
31875a97 | 4953 | protocol_addMethod_nolock(proto->classMethods, name, types); |
8972963c | 4954 | } else if (!isRequiredMethod && isInstanceMethod) { |
31875a97 | 4955 | protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types); |
8972963c | 4956 | } else /* !isRequiredMethod && !isInstanceMethod) */ { |
31875a97 | 4957 | protocol_addMethod_nolock(proto->optionalClassMethods, name, types); |
8972963c | 4958 | } |
8972963c A |
4959 | } |
4960 | ||
4961 | ||
4962 | /*********************************************************************** | |
4963 | * protocol_addProperty | |
4964 | * Adds a property to a protocol. The protocol must be under construction. | |
4965 | * Locking: acquires runtimeLock | |
4966 | **********************************************************************/ | |
4967 | static void | |
31875a97 | 4968 | protocol_addProperty_nolock(property_list_t *&plist, const char *name, |
7257e56c A |
4969 | const objc_property_attribute_t *attrs, |
4970 | unsigned int count) | |
8972963c | 4971 | { |
31875a97 | 4972 | if (!plist) { |
bc4fafce | 4973 | plist = (property_list_t *)calloc(property_list_t::byteSize(sizeof(property_t), 1), 1); |
31875a97 | 4974 | plist->entsizeAndFlags = sizeof(property_t); |
bc4fafce | 4975 | plist->count = 1; |
8972963c | 4976 | } else { |
bc4fafce A |
4977 | plist->count++; |
4978 | plist = (property_list_t *)realloc(plist, plist->byteSize()); | |
8972963c A |
4979 | } |
4980 | ||
bc4fafce | 4981 | property_t& prop = plist->get(plist->count - 1); |
c1e772c4 | 4982 | prop.name = strdupIfMutable(name); |
31875a97 | 4983 | prop.attributes = copyPropertyAttributeString(attrs, count); |
8972963c A |
4984 | } |
4985 | ||
4986 | void | |
4987 | protocol_addProperty(Protocol *proto_gen, const char *name, | |
4988 | const objc_property_attribute_t *attrs, | |
4989 | unsigned int count, | |
4990 | BOOL isRequiredProperty, BOOL isInstanceProperty) | |
4991 | { | |
4992 | protocol_t *proto = newprotocol(proto_gen); | |
4993 | ||
7257e56c | 4994 | extern objc_class OBJC_CLASS_$___IncompleteProtocol; |
8972963c A |
4995 | Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol; |
4996 | ||
4997 | if (!proto) return; | |
4998 | if (!name) return; | |
4999 | ||
66799735 | 5000 | mutex_locker_t lock(runtimeLock); |
8972963c | 5001 | |
7257e56c | 5002 | if (proto->ISA() != cls) { |
8972963c | 5003 | _objc_inform("protocol_addProperty: protocol '%s' is not " |
8070259c | 5004 | "under construction!", proto->nameForLogging()); |
8972963c A |
5005 | return; |
5006 | } | |
5007 | ||
5008 | if (isRequiredProperty && isInstanceProperty) { | |
31875a97 | 5009 | protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count); |
8972963c | 5010 | } |
c1e772c4 A |
5011 | else if (isRequiredProperty && !isInstanceProperty) { |
5012 | protocol_addProperty_nolock(proto->_classProperties, name, attrs, count); | |
5013 | } | |
5014 | //else if (!isRequiredProperty && isInstanceProperty) { | |
31875a97 | 5015 | // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count); |
c1e772c4 A |
5016 | //} |
5017 | //else /* !isRequiredProperty && !isInstanceProperty) */ { | |
31875a97 | 5018 | // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count); |
8972963c | 5019 | //} |
8972963c A |
5020 | } |
5021 | ||
1807f628 A |
5022 | static int |
5023 | objc_getRealizedClassList_nolock(Class *buffer, int bufferLen) | |
5024 | { | |
5025 | int count = 0; | |
5026 | ||
5027 | if (buffer) { | |
5028 | int c = 0; | |
5029 | foreach_realized_class([=, &count, &c](Class cls) { | |
5030 | count++; | |
5031 | if (c < bufferLen) { | |
5032 | buffer[c++] = cls; | |
5033 | } | |
5034 | return true; | |
5035 | }); | |
5036 | } else { | |
5037 | foreach_realized_class([&count](Class cls) { | |
5038 | count++; | |
5039 | return true; | |
5040 | }); | |
5041 | } | |
5042 | ||
5043 | return count; | |
5044 | } | |
5045 | ||
5046 | static Class * | |
5047 | objc_copyRealizedClassList_nolock(unsigned int *outCount) | |
5048 | { | |
5049 | Class *result = nil; | |
5050 | unsigned int count = 0; | |
5051 | ||
5052 | foreach_realized_class([&count](Class cls) { | |
5053 | count++; | |
5054 | return true; | |
5055 | }); | |
5056 | ||
5057 | if (count > 0) { | |
5058 | unsigned int c = 0; | |
5059 | ||
5060 | result = (Class *)malloc((1+count) * sizeof(Class)); | |
5061 | foreach_realized_class([=, &c](Class cls) { | |
5062 | result[c++] = cls; | |
5063 | return true; | |
5064 | }); | |
5065 | result[c] = nil; | |
5066 | } | |
5067 | ||
5068 | if (outCount) *outCount = count; | |
5069 | return result; | |
5070 | } | |
5071 | ||
b3962a83 | 5072 | /*********************************************************************** |
1807f628 A |
5073 | * objc_getClassList |
5074 | * Returns pointers to all classes. | |
5075 | * This requires all classes be realized, which is regretfully non-lazy. | |
5076 | * Locking: acquires runtimeLock | |
5077 | **********************************************************************/ | |
5078 | int | |
5079 | objc_getClassList(Class *buffer, int bufferLen) | |
b3962a83 | 5080 | { |
66799735 | 5081 | mutex_locker_t lock(runtimeLock); |
7af964d1 A |
5082 | |
5083 | realizeAllClasses(); | |
b3962a83 | 5084 | |
1807f628 A |
5085 | return objc_getRealizedClassList_nolock(buffer, bufferLen); |
5086 | } | |
b3962a83 | 5087 | |
1807f628 A |
5088 | /*********************************************************************** |
5089 | * objc_copyClassList | |
5090 | * Returns pointers to Realized classes. | |
5091 | * | |
5092 | * outCount may be nil. *outCount is the number of classes returned. | |
5093 | * If the returned array is not nil, it is nil-terminated and must be | |
5094 | * freed with free(). | |
5095 | * Locking: write-locks runtimeLock | |
5096 | **********************************************************************/ | |
5097 | Class * | |
5098 | objc_copyRealizedClassList(unsigned int *outCount) | |
5099 | { | |
5100 | mutex_locker_t lock(runtimeLock); | |
b3962a83 | 5101 | |
1807f628 | 5102 | return objc_copyRealizedClassList_nolock(outCount); |
b3962a83 A |
5103 | } |
5104 | ||
5105 | ||
8972963c A |
5106 | /*********************************************************************** |
5107 | * objc_copyClassList | |
5108 | * Returns pointers to all classes. | |
5109 | * This requires all classes be realized, which is regretfully non-lazy. | |
5110 | * | |
7257e56c A |
5111 | * outCount may be nil. *outCount is the number of classes returned. |
5112 | * If the returned array is not nil, it is nil-terminated and must be | |
8972963c A |
5113 | * freed with free(). |
5114 | * Locking: write-locks runtimeLock | |
5115 | **********************************************************************/ | |
5116 | Class * | |
5117 | objc_copyClassList(unsigned int *outCount) | |
5118 | { | |
66799735 | 5119 | mutex_locker_t lock(runtimeLock); |
8972963c A |
5120 | |
5121 | realizeAllClasses(); | |
5122 | ||
1807f628 A |
5123 | return objc_copyRealizedClassList_nolock(outCount); |
5124 | } | |
c1e772c4 | 5125 | |
1807f628 A |
5126 | /*********************************************************************** |
5127 | * class_copyImpCache | |
5128 | * Returns the current content of the Class IMP Cache | |
5129 | * | |
5130 | * outCount may be nil. *outCount is the number of entries returned. | |
5131 | * If the returned array is not nil, it is nil-terminated and must be | |
5132 | * freed with free(). | |
5133 | * Locking: write-locks cacheUpdateLock | |
5134 | **********************************************************************/ | |
5135 | objc_imp_cache_entry * | |
5136 | class_copyImpCache(Class cls, int *outCount) | |
5137 | { | |
5138 | objc_imp_cache_entry *buffer = nullptr; | |
8972963c | 5139 | |
1807f628 A |
5140 | #if CONFIG_USE_CACHE_LOCK |
5141 | mutex_locker_t lock(cacheUpdateLock); | |
5142 | #else | |
5143 | mutex_locker_t lock(runtimeLock); | |
5144 | #endif | |
5145 | ||
5146 | cache_t &cache = cls->cache; | |
5147 | int count = (int)cache.occupied(); | |
5148 | ||
5149 | if (count) { | |
5150 | buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry)); | |
34d5b5e8 | 5151 | cache.copyCacheNolock(buffer, count); |
8972963c | 5152 | } |
c1e772c4 | 5153 | |
8972963c | 5154 | if (outCount) *outCount = count; |
1807f628 | 5155 | return buffer; |
8972963c A |
5156 | } |
5157 | ||
5158 | ||
b3962a83 A |
5159 | /*********************************************************************** |
5160 | * objc_copyProtocolList | |
5161 | * Returns pointers to all protocols. | |
7af964d1 | 5162 | * Locking: read-locks runtimeLock |
b3962a83 | 5163 | **********************************************************************/ |
8972963c | 5164 | Protocol * __unsafe_unretained * |
b3962a83 A |
5165 | objc_copyProtocolList(unsigned int *outCount) |
5166 | { | |
66799735 | 5167 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 5168 | |
b3962a83 | 5169 | NXMapTable *protocol_map = protocols(); |
b3962a83 | 5170 | |
1807f628 A |
5171 | // Find all the protocols from the pre-optimized images. These protocols |
5172 | // won't be in the protocol map. | |
5173 | objc::DenseMap<const char*, Protocol*> preoptimizedProtocols; | |
bc4fafce | 5174 | { |
1807f628 A |
5175 | header_info *hi; |
5176 | for (hi = FirstHeader; hi; hi = hi->getNext()) { | |
5177 | if (!hi->hasPreoptimizedProtocols()) | |
5178 | continue; | |
5179 | ||
5180 | size_t count, i; | |
5181 | const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count); | |
5182 | for (i = 0; i < count; i++) { | |
5183 | const protocol_t* protocol = protolist[i]; | |
5184 | ||
5185 | // Skip protocols we have in the run time map. These likely | |
5186 | // correspond to protocols added dynamically which have the same | |
5187 | // name as a protocol found later in a dlopen'ed shared cache image. | |
5188 | if (NXMapGet(protocol_map, protocol->mangledName) != nil) | |
5189 | continue; | |
5190 | ||
5191 | // The protocols in the shared cache protolist point to their | |
5192 | // original on-disk object, not the optimized one. We can use the name | |
5193 | // to find the optimized one. | |
5194 | Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName); | |
5195 | preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto }); | |
5196 | } | |
5197 | } | |
5198 | } | |
5199 | ||
5200 | unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size(); | |
b3962a83 | 5201 | if (count == 0) { |
b3962a83 | 5202 | if (outCount) *outCount = 0; |
7257e56c | 5203 | return nil; |
b3962a83 A |
5204 | } |
5205 | ||
31875a97 | 5206 | Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*)); |
b3962a83 | 5207 | |
31875a97 A |
5208 | unsigned int i = 0; |
5209 | Protocol *proto; | |
5210 | const char *name; | |
5211 | NXMapState state = NXInitMapState(protocol_map); | |
b3962a83 A |
5212 | while (NXNextMapState(protocol_map, &state, |
5213 | (const void **)&name, (const void **)&proto)) | |
5214 | { | |
5215 | result[i++] = proto; | |
5216 | } | |
1807f628 A |
5217 | |
5218 | // Add any protocols found in the pre-optimized table | |
5219 | for (auto it : preoptimizedProtocols) { | |
5220 | result[i++] = it.second; | |
5221 | } | |
b3962a83 | 5222 | |
7257e56c | 5223 | result[i++] = nil; |
1807f628 | 5224 | ASSERT(i == count+1); |
b3962a83 | 5225 | |
b3962a83 A |
5226 | if (outCount) *outCount = count; |
5227 | return result; | |
5228 | } | |
5229 | ||
5230 | ||
5231 | /*********************************************************************** | |
5232 | * objc_getProtocol | |
7257e56c | 5233 | * Get a protocol by name, or return nil |
7af964d1 | 5234 | * Locking: read-locks runtimeLock |
b3962a83 A |
5235 | **********************************************************************/ |
5236 | Protocol *objc_getProtocol(const char *name) | |
5237 | { | |
66799735 | 5238 | mutex_locker_t lock(runtimeLock); |
31875a97 | 5239 | return getProtocol(name); |
b3962a83 A |
5240 | } |
5241 | ||
5242 | ||
5243 | /*********************************************************************** | |
5244 | * class_copyMethodList | |
5245 | * fixme | |
7af964d1 | 5246 | * Locking: read-locks runtimeLock |
b3962a83 A |
5247 | **********************************************************************/ |
5248 | Method * | |
7257e56c | 5249 | class_copyMethodList(Class cls, unsigned int *outCount) |
b3962a83 | 5250 | { |
b3962a83 | 5251 | unsigned int count = 0; |
7257e56c | 5252 | Method *result = nil; |
b3962a83 A |
5253 | |
5254 | if (!cls) { | |
5255 | if (outCount) *outCount = 0; | |
7257e56c | 5256 | return nil; |
b3962a83 A |
5257 | } |
5258 | ||
66799735 | 5259 | mutex_locker_t lock(runtimeLock); |
f192a3e2 | 5260 | const auto methods = cls->data()->methods(); |
b3962a83 | 5261 | |
1807f628 | 5262 | ASSERT(cls->isRealized()); |
b3962a83 | 5263 | |
f192a3e2 | 5264 | count = methods.count(); |
b3962a83 A |
5265 | |
5266 | if (count > 0) { | |
8972963c | 5267 | result = (Method *)malloc((count + 1) * sizeof(Method)); |
b3962a83 | 5268 | |
31875a97 | 5269 | count = 0; |
f192a3e2 | 5270 | for (auto& meth : methods) { |
c1e772c4 | 5271 | result[count++] = &meth; |
31875a97 A |
5272 | } |
5273 | result[count] = nil; | |
b3962a83 A |
5274 | } |
5275 | ||
b3962a83 A |
5276 | if (outCount) *outCount = count; |
5277 | return result; | |
5278 | } | |
5279 | ||
5280 | ||
5281 | /*********************************************************************** | |
5282 | * class_copyIvarList | |
5283 | * fixme | |
7af964d1 | 5284 | * Locking: read-locks runtimeLock |
b3962a83 A |
5285 | **********************************************************************/ |
5286 | Ivar * | |
7257e56c | 5287 | class_copyIvarList(Class cls, unsigned int *outCount) |
b3962a83 | 5288 | { |
b3962a83 | 5289 | const ivar_list_t *ivars; |
7257e56c | 5290 | Ivar *result = nil; |
b3962a83 | 5291 | unsigned int count = 0; |
b3962a83 A |
5292 | |
5293 | if (!cls) { | |
5294 | if (outCount) *outCount = 0; | |
7257e56c | 5295 | return nil; |
b3962a83 A |
5296 | } |
5297 | ||
66799735 | 5298 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 5299 | |
1807f628 | 5300 | ASSERT(cls->isRealized()); |
b3962a83 | 5301 | |
f192a3e2 | 5302 | if ((ivars = cls->data()->ro()->ivars) && ivars->count) { |
8972963c | 5303 | result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar)); |
b3962a83 | 5304 | |
31875a97 A |
5305 | for (auto& ivar : *ivars) { |
5306 | if (!ivar.offset) continue; // anonymous bitfield | |
5307 | result[count++] = &ivar; | |
b3962a83 | 5308 | } |
7257e56c | 5309 | result[count] = nil; |
b3962a83 | 5310 | } |
b3962a83 A |
5311 | |
5312 | if (outCount) *outCount = count; | |
5313 | return result; | |
5314 | } | |
5315 | ||
5316 | ||
5317 | /*********************************************************************** | |
5318 | * class_copyPropertyList. Returns a heap block containing the | |
7257e56c | 5319 | * properties declared in the class, or nil if the class |
b3962a83 A |
5320 | * declares no properties. Caller must free the block. |
5321 | * Does not copy any superclass's properties. | |
7af964d1 | 5322 | * Locking: read-locks runtimeLock |
b3962a83 | 5323 | **********************************************************************/ |
8972963c | 5324 | objc_property_t * |
7257e56c | 5325 | class_copyPropertyList(Class cls, unsigned int *outCount) |
b3962a83 | 5326 | { |
b3962a83 A |
5327 | if (!cls) { |
5328 | if (outCount) *outCount = 0; | |
7257e56c | 5329 | return nil; |
b3962a83 A |
5330 | } |
5331 | ||
66799735 | 5332 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 5333 | |
66799735 | 5334 | checkIsKnownClass(cls); |
1807f628 | 5335 | ASSERT(cls->isRealized()); |
66799735 | 5336 | |
31875a97 | 5337 | auto rw = cls->data(); |
b3962a83 | 5338 | |
31875a97 | 5339 | property_t **result = nil; |
f192a3e2 A |
5340 | auto const properties = rw->properties(); |
5341 | unsigned int count = properties.count(); | |
b3962a83 | 5342 | if (count > 0) { |
8972963c | 5343 | result = (property_t **)malloc((count + 1) * sizeof(property_t *)); |
31875a97 A |
5344 | |
5345 | count = 0; | |
f192a3e2 | 5346 | for (auto& prop : properties) { |
31875a97 | 5347 | result[count++] = ∝ |
b3962a83 | 5348 | } |
31875a97 | 5349 | result[count] = nil; |
b3962a83 A |
5350 | } |
5351 | ||
b3962a83 | 5352 | if (outCount) *outCount = count; |
8972963c | 5353 | return (objc_property_t *)result; |
b3962a83 A |
5354 | } |
5355 | ||
5356 | ||
5357 | /*********************************************************************** | |
7257e56c | 5358 | * objc_class::getLoadMethod |
b3962a83 A |
5359 | * fixme |
5360 | * Called only from add_class_to_loadable_list. | |
7af964d1 | 5361 | * Locking: runtimeLock must be read- or write-locked by the caller. |
b3962a83 | 5362 | **********************************************************************/ |
cd5f04f5 | 5363 | IMP |
7257e56c | 5364 | objc_class::getLoadMethod() |
b3962a83 | 5365 | { |
31875a97 | 5366 | runtimeLock.assertLocked(); |
b3962a83 | 5367 | |
b3962a83 | 5368 | const method_list_t *mlist; |
b3962a83 | 5369 | |
1807f628 A |
5370 | ASSERT(isRealized()); |
5371 | ASSERT(ISA()->isRealized()); | |
5372 | ASSERT(!isMetaClass()); | |
5373 | ASSERT(ISA()->isMetaClass()); | |
b3962a83 | 5374 | |
f192a3e2 | 5375 | mlist = ISA()->data()->ro()->baseMethods(); |
8070259c | 5376 | if (mlist) { |
31875a97 | 5377 | for (const auto& meth : *mlist) { |
bc4fafce | 5378 | const char *name = sel_cname(meth.name()); |
8070259c | 5379 | if (0 == strcmp(name, "load")) { |
bc4fafce | 5380 | return meth.imp(false); |
8070259c | 5381 | } |
b3962a83 A |
5382 | } |
5383 | } | |
5384 | ||
7257e56c | 5385 | return nil; |
b3962a83 A |
5386 | } |
5387 | ||
5388 | ||
5389 | /*********************************************************************** | |
5390 | * _category_getName | |
5391 | * Returns a category's name. | |
5392 | * Locking: none | |
5393 | **********************************************************************/ | |
cd5f04f5 | 5394 | const char * |
b3962a83 A |
5395 | _category_getName(Category cat) |
5396 | { | |
7257e56c | 5397 | return cat->name; |
b3962a83 A |
5398 | } |
5399 | ||
5400 | ||
5401 | /*********************************************************************** | |
5402 | * _category_getClassName | |
5403 | * Returns a category's class's name | |
5404 | * Called only from add_category_to_loadable_list and | |
8070259c | 5405 | * remove_category_from_loadable_list for logging purposes. |
7af964d1 | 5406 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 5407 | **********************************************************************/ |
cd5f04f5 | 5408 | const char * |
b3962a83 A |
5409 | _category_getClassName(Category cat) |
5410 | { | |
31875a97 | 5411 | runtimeLock.assertLocked(); |
8070259c | 5412 | return remapClass(cat->cls)->nameForLogging(); |
b3962a83 A |
5413 | } |
5414 | ||
5415 | ||
5416 | /*********************************************************************** | |
5417 | * _category_getClass | |
5418 | * Returns a category's class | |
5419 | * Called only by call_category_loads. | |
7af964d1 | 5420 | * Locking: read-locks runtimeLock |
b3962a83 | 5421 | **********************************************************************/ |
cd5f04f5 | 5422 | Class |
b3962a83 A |
5423 | _category_getClass(Category cat) |
5424 | { | |
66799735 | 5425 | mutex_locker_t lock(runtimeLock); |
7257e56c | 5426 | Class result = remapClass(cat->cls); |
1807f628 | 5427 | ASSERT(result->isRealized()); // ok for call_category_loads' usage |
7257e56c | 5428 | return result; |
b3962a83 A |
5429 | } |
5430 | ||
5431 | ||
5432 | /*********************************************************************** | |
5433 | * _category_getLoadMethod | |
5434 | * fixme | |
5435 | * Called only from add_category_to_loadable_list | |
7af964d1 | 5436 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 5437 | **********************************************************************/ |
cd5f04f5 | 5438 | IMP |
b3962a83 A |
5439 | _category_getLoadMethod(Category cat) |
5440 | { | |
31875a97 | 5441 | runtimeLock.assertLocked(); |
b3962a83 A |
5442 | |
5443 | const method_list_t *mlist; | |
b3962a83 | 5444 | |
7257e56c | 5445 | mlist = cat->classMethods; |
8070259c | 5446 | if (mlist) { |
31875a97 | 5447 | for (const auto& meth : *mlist) { |
bc4fafce | 5448 | const char *name = sel_cname(meth.name()); |
8070259c | 5449 | if (0 == strcmp(name, "load")) { |
bc4fafce | 5450 | return meth.imp(false); |
8070259c | 5451 | } |
b3962a83 A |
5452 | } |
5453 | } | |
5454 | ||
7257e56c | 5455 | return nil; |
b3962a83 A |
5456 | } |
5457 | ||
5458 | ||
c1e772c4 A |
5459 | /*********************************************************************** |
5460 | * category_t::propertiesForMeta | |
5461 | * Return a category's instance or class properties. | |
5462 | * hi is the image containing the category. | |
5463 | **********************************************************************/ | |
5464 | property_list_t * | |
5465 | category_t::propertiesForMeta(bool isMeta, struct header_info *hi) | |
5466 | { | |
5467 | if (!isMeta) return instanceProperties; | |
5468 | else if (hi->info()->hasCategoryClassProperties()) return _classProperties; | |
5469 | else return nil; | |
5470 | } | |
5471 | ||
5472 | ||
b3962a83 A |
5473 | /*********************************************************************** |
5474 | * class_copyProtocolList | |
5475 | * fixme | |
7af964d1 | 5476 | * Locking: read-locks runtimeLock |
b3962a83 | 5477 | **********************************************************************/ |
8972963c | 5478 | Protocol * __unsafe_unretained * |
7257e56c | 5479 | class_copyProtocolList(Class cls, unsigned int *outCount) |
b3962a83 | 5480 | { |
b3962a83 | 5481 | unsigned int count = 0; |
7257e56c | 5482 | Protocol **result = nil; |
b3962a83 A |
5483 | |
5484 | if (!cls) { | |
5485 | if (outCount) *outCount = 0; | |
7257e56c | 5486 | return nil; |
b3962a83 A |
5487 | } |
5488 | ||
66799735 | 5489 | mutex_locker_t lock(runtimeLock); |
f192a3e2 | 5490 | const auto protocols = cls->data()->protocols(); |
66799735 A |
5491 | |
5492 | checkIsKnownClass(cls); | |
b3962a83 | 5493 | |
1807f628 | 5494 | ASSERT(cls->isRealized()); |
b3962a83 | 5495 | |
f192a3e2 | 5496 | count = protocols.count(); |
b3962a83 | 5497 | |
31875a97 | 5498 | if (count > 0) { |
8972963c | 5499 | result = (Protocol **)malloc((count+1) * sizeof(Protocol *)); |
31875a97 A |
5500 | |
5501 | count = 0; | |
f192a3e2 | 5502 | for (const auto& proto : protocols) { |
31875a97 | 5503 | result[count++] = (Protocol *)remapProtocol(proto); |
b3962a83 | 5504 | } |
31875a97 | 5505 | result[count] = nil; |
b3962a83 A |
5506 | } |
5507 | ||
b3962a83 A |
5508 | if (outCount) *outCount = count; |
5509 | return result; | |
5510 | } | |
5511 | ||
5512 | ||
5513 | /*********************************************************************** | |
66799735 A |
5514 | * objc_copyImageNames |
5515 | * Copies names of loaded images with ObjC contents. | |
5516 | * | |
5517 | * Locking: acquires runtimeLock | |
b3962a83 | 5518 | **********************************************************************/ |
66799735 | 5519 | const char **objc_copyImageNames(unsigned int *outCount) |
b3962a83 | 5520 | { |
66799735 | 5521 | mutex_locker_t lock(runtimeLock); |
1807f628 A |
5522 | |
5523 | int HeaderCount = 0; | |
5524 | for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) { | |
5525 | HeaderCount++; | |
5526 | } | |
5527 | ||
66799735 A |
5528 | #if TARGET_OS_WIN32 |
5529 | const TCHAR **names = (const TCHAR **) | |
5530 | malloc((HeaderCount+1) * sizeof(TCHAR *)); | |
5531 | #else | |
5532 | const char **names = (const char **) | |
5533 | malloc((HeaderCount+1) * sizeof(char *)); | |
5534 | #endif | |
5535 | ||
5536 | unsigned int count = 0; | |
5537 | for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) { | |
5538 | #if TARGET_OS_WIN32 | |
5539 | if (hi->moduleName) { | |
5540 | names[count++] = hi->moduleName; | |
5541 | } | |
5542 | #else | |
5543 | const char *fname = hi->fname(); | |
5544 | if (fname) { | |
5545 | names[count++] = fname; | |
5546 | } | |
5547 | #endif | |
5548 | } | |
5549 | names[count] = nil; | |
b3962a83 | 5550 | |
66799735 A |
5551 | if (count == 0) { |
5552 | // Return nil instead of empty list if there are no images | |
5553 | free((void *)names); | |
5554 | names = nil; | |
5555 | } | |
5556 | ||
5557 | if (outCount) *outCount = count; | |
5558 | return names; | |
5559 | } | |
5560 | ||
5561 | ||
5562 | /*********************************************************************** | |
5563 | * copyClassNamesForImage_nolock | |
5564 | * Copies class names from the given image. | |
5565 | * Missing weak-import classes are omitted. | |
5566 | * Swift class names are demangled. | |
5567 | * | |
5568 | * Locking: runtimeLock must be held by the caller | |
5569 | **********************************************************************/ | |
5570 | const char ** | |
5571 | copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount) | |
5572 | { | |
5573 | runtimeLock.assertLocked(); | |
1807f628 | 5574 | ASSERT(hi); |
66799735 A |
5575 | |
5576 | size_t count; | |
1807f628 | 5577 | classref_t const *classlist = _getObjc2ClassList(hi, &count); |
66799735 A |
5578 | const char **names = (const char **) |
5579 | malloc((count+1) * sizeof(const char *)); | |
5580 | ||
5581 | size_t shift = 0; | |
5582 | for (size_t i = 0; i < count; i++) { | |
7257e56c | 5583 | Class cls = remapClass(classlist[i]); |
ee974f79 | 5584 | if (cls) { |
f192a3e2 | 5585 | names[i-shift] = cls->demangledName(/* needs lock */false); |
ee974f79 A |
5586 | } else { |
5587 | shift++; // ignored weak-linked class | |
5588 | } | |
b3962a83 | 5589 | } |
ee974f79 | 5590 | count -= shift; |
7257e56c | 5591 | names[count] = nil; |
b3962a83 | 5592 | |
b3962a83 A |
5593 | if (outCount) *outCount = (unsigned int)count; |
5594 | return names; | |
5595 | } | |
5596 | ||
34d5b5e8 A |
5597 | Class * |
5598 | copyClassesForImage_nolock(header_info *hi, unsigned int *outCount) | |
5599 | { | |
5600 | runtimeLock.assertLocked(); | |
5601 | ASSERT(hi); | |
5602 | ||
5603 | size_t count; | |
5604 | classref_t const *classlist = _getObjc2ClassList(hi, &count); | |
5605 | Class *classes = (Class *) | |
5606 | malloc((count+1) * sizeof(Class)); | |
5607 | ||
5608 | size_t shift = 0; | |
5609 | for (size_t i = 0; i < count; i++) { | |
5610 | Class cls = remapClass(classlist[i]); | |
5611 | if (cls) { | |
5612 | classes[i-shift] = cls; | |
5613 | } else { | |
5614 | shift++; // ignored weak-linked class | |
5615 | } | |
5616 | } | |
5617 | count -= shift; | |
5618 | classes[count] = nil; | |
5619 | ||
5620 | if (outCount) *outCount = (unsigned int)count; | |
5621 | return classes; | |
5622 | } | |
b3962a83 | 5623 | |
66799735 A |
5624 | |
5625 | /*********************************************************************** | |
5626 | * objc_copyClassNamesForImage | |
5627 | * Copies class names from the named image. | |
5628 | * The image name must be identical to dladdr's dli_fname value. | |
5629 | * Missing weak-import classes are omitted. | |
5630 | * Swift class names are demangled. | |
5631 | * | |
5632 | * Locking: acquires runtimeLock | |
5633 | **********************************************************************/ | |
5634 | const char ** | |
5635 | objc_copyClassNamesForImage(const char *image, unsigned int *outCount) | |
5636 | { | |
5637 | if (!image) { | |
5638 | if (outCount) *outCount = 0; | |
5639 | return nil; | |
5640 | } | |
5641 | ||
5642 | mutex_locker_t lock(runtimeLock); | |
5643 | ||
5644 | // Find the image. | |
5645 | header_info *hi; | |
5646 | for (hi = FirstHeader; hi != nil; hi = hi->getNext()) { | |
5647 | #if TARGET_OS_WIN32 | |
5648 | if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break; | |
5649 | #else | |
5650 | if (0 == strcmp(image, hi->fname())) break; | |
5651 | #endif | |
5652 | } | |
5653 | ||
5654 | if (!hi) { | |
5655 | if (outCount) *outCount = 0; | |
5656 | return nil; | |
5657 | } | |
5658 | ||
5659 | return copyClassNamesForImage_nolock(hi, outCount); | |
5660 | } | |
5661 | ||
34d5b5e8 A |
5662 | Class * |
5663 | objc_copyClassesForImage(const char *image, unsigned int *outCount) | |
5664 | { | |
5665 | if (!image) { | |
5666 | if (outCount) *outCount = 0; | |
5667 | return nil; | |
5668 | } | |
5669 | ||
5670 | mutex_locker_t lock(runtimeLock); | |
5671 | ||
5672 | // Find the image. | |
5673 | header_info *hi; | |
5674 | for (hi = FirstHeader; hi != nil; hi = hi->getNext()) { | |
5675 | if (0 == strcmp(image, hi->fname())) break; | |
5676 | } | |
5677 | ||
5678 | if (!hi) { | |
5679 | if (outCount) *outCount = 0; | |
5680 | return nil; | |
5681 | } | |
5682 | ||
5683 | return copyClassesForImage_nolock(hi, outCount); | |
5684 | } | |
66799735 A |
5685 | |
5686 | /*********************************************************************** | |
5687 | * objc_copyClassNamesForImageHeader | |
5688 | * Copies class names from the given image. | |
5689 | * Missing weak-import classes are omitted. | |
5690 | * Swift class names are demangled. | |
5691 | * | |
5692 | * Locking: acquires runtimeLock | |
5693 | **********************************************************************/ | |
5694 | const char ** | |
5695 | objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount) | |
5696 | { | |
5697 | if (!mh) { | |
5698 | if (outCount) *outCount = 0; | |
5699 | return nil; | |
5700 | } | |
5701 | ||
5702 | mutex_locker_t lock(runtimeLock); | |
5703 | ||
5704 | // Find the image. | |
5705 | header_info *hi; | |
5706 | for (hi = FirstHeader; hi != nil; hi = hi->getNext()) { | |
5707 | if (hi->mhdr() == (const headerType *)mh) break; | |
5708 | } | |
5709 | ||
5710 | if (!hi) { | |
5711 | if (outCount) *outCount = 0; | |
5712 | return nil; | |
5713 | } | |
5714 | ||
5715 | return copyClassNamesForImage_nolock(hi, outCount); | |
5716 | } | |
5717 | ||
5718 | ||
8070259c A |
5719 | /*********************************************************************** |
5720 | * saveTemporaryString | |
5721 | * Save a string in a thread-local FIFO buffer. | |
5722 | * This is suitable for temporary strings generated for logging purposes. | |
5723 | **********************************************************************/ | |
5724 | static void | |
5725 | saveTemporaryString(char *str) | |
5726 | { | |
5727 | // Fixed-size FIFO. We free the first string, shift | |
5728 | // the rest, and add the new string to the end. | |
5729 | _objc_pthread_data *data = _objc_fetch_pthread_data(true); | |
5730 | if (data->printableNames[0]) { | |
5731 | free(data->printableNames[0]); | |
5732 | } | |
5733 | int last = countof(data->printableNames) - 1; | |
5734 | for (int i = 0; i < last; i++) { | |
5735 | data->printableNames[i] = data->printableNames[i+1]; | |
5736 | } | |
5737 | data->printableNames[last] = str; | |
5738 | } | |
5739 | ||
5740 | ||
5741 | /*********************************************************************** | |
5742 | * objc_class::nameForLogging | |
5743 | * Returns the class's name, suitable for display. | |
5744 | * The returned memory is TEMPORARY. Print it or copy it immediately. | |
5745 | * Locking: none | |
5746 | **********************************************************************/ | |
5747 | const char * | |
5748 | objc_class::nameForLogging() | |
5749 | { | |
5750 | // Handle the easy case directly. | |
5751 | if (isRealized() || isFuture()) { | |
f192a3e2 | 5752 | if (!isAnySwift()) { |
34d5b5e8 | 5753 | return data()->ro()->getName(); |
f192a3e2 A |
5754 | } |
5755 | auto rwe = data()->ext(); | |
5756 | if (rwe && rwe->demangledName) { | |
5757 | return rwe->demangledName; | |
5758 | } | |
8070259c A |
5759 | } |
5760 | ||
5761 | char *result; | |
5762 | ||
34d5b5e8 A |
5763 | if (isStubClass()) { |
5764 | asprintf(&result, "<stub class %p>", this); | |
5765 | } else if (const char *name = nonlazyMangledName()) { | |
5766 | char *de = copySwiftV1DemangledName(name); | |
5767 | if (de) result = de; | |
5768 | else result = strdup(name); | |
5769 | } else { | |
5770 | asprintf(&result, "<lazily named class %p>", this); | |
5771 | } | |
8070259c A |
5772 | saveTemporaryString(result); |
5773 | return result; | |
5774 | } | |
5775 | ||
5776 | ||
5777 | /*********************************************************************** | |
5778 | * objc_class::demangledName | |
5779 | * If realize=false, the class must already be realized or future. | |
13ba007e | 5780 | * Locking: runtimeLock may or may not be held by the caller. |
8070259c | 5781 | **********************************************************************/ |
bd8dfcfc | 5782 | mutex_t DemangleCacheLock; |
1807f628 | 5783 | static objc::DenseSet<const char *> *DemangleCache; |
8070259c | 5784 | const char * |
f192a3e2 | 5785 | objc_class::demangledName(bool needsLock) |
8070259c | 5786 | { |
f192a3e2 A |
5787 | if (!needsLock) { |
5788 | runtimeLock.assertLocked(); | |
5789 | } | |
5790 | ||
8070259c A |
5791 | // Return previously demangled name if available. |
5792 | if (isRealized() || isFuture()) { | |
04bf5ced A |
5793 | // Swift metaclasses don't have the is-Swift bit. |
5794 | // We can't take this shortcut for them. | |
34d5b5e8 A |
5795 | if (isFuture() || (!isMetaClass() && !isAnySwift())) { |
5796 | return data()->ro()->getName(); | |
f192a3e2 A |
5797 | } |
5798 | auto rwe = data()->ext(); | |
5799 | if (rwe && rwe->demangledName) { | |
5800 | return rwe->demangledName; | |
5801 | } | |
8070259c A |
5802 | } |
5803 | ||
5804 | // Try demangling the mangled name. | |
5805 | const char *mangled = mangledName(); | |
5806 | char *de = copySwiftV1DemangledName(mangled); | |
f192a3e2 A |
5807 | class_rw_ext_t *rwe; |
5808 | ||
8070259c | 5809 | if (isRealized() || isFuture()) { |
f192a3e2 A |
5810 | if (needsLock) { |
5811 | mutex_locker_t lock(runtimeLock); | |
5812 | rwe = data()->extAllocIfNeeded(); | |
5813 | } else { | |
5814 | rwe = data()->extAllocIfNeeded(); | |
5815 | } | |
5816 | // Class is already realized or future. | |
8070259c | 5817 | // Save demangling result in rw data. |
66799735 | 5818 | // We may not own runtimeLock so use an atomic operation instead. |
8070259c | 5819 | if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled), |
f192a3e2 | 5820 | (void**)&rwe->demangledName)) |
8070259c A |
5821 | { |
5822 | if (de) free(de); | |
5823 | } | |
f192a3e2 | 5824 | return rwe->demangledName; |
8070259c A |
5825 | } |
5826 | ||
5827 | // Class is not yet realized. | |
5828 | if (!de) { | |
5829 | // Name is not mangled. Return it without caching. | |
5830 | return mangled; | |
5831 | } | |
5832 | ||
13ba007e A |
5833 | // Class is not yet realized and name is mangled. |
5834 | // Allocate the name but don't save it in the class. | |
5835 | // Save the name in a side cache instead to prevent leaks. | |
5836 | // When the class is actually realized we may allocate a second | |
5837 | // copy of the name, but we don't care. | |
5838 | // (Previously we would try to realize the class now and save the | |
5839 | // name there, but realization is more complicated for Swift classes.) | |
5840 | ||
8070259c | 5841 | // Only objc_copyClassNamesForImage() should get here. |
c1e772c4 A |
5842 | // fixme lldb's calls to class_getName() can also get here when |
5843 | // interrogating the dyld shared cache. (rdar://27258517) | |
1807f628 | 5844 | // fixme ASSERT(realize); |
13ba007e | 5845 | |
1807f628 | 5846 | const char *cached; |
13ba007e A |
5847 | { |
5848 | mutex_locker_t lock(DemangleCacheLock); | |
5849 | if (!DemangleCache) { | |
1807f628 | 5850 | DemangleCache = new objc::DenseSet<const char *>{}; |
c1e772c4 | 5851 | } |
1807f628 | 5852 | cached = *DemangleCache->insert(de).first; |
8070259c | 5853 | } |
13ba007e A |
5854 | if (cached != de) free(de); |
5855 | return cached; | |
8070259c A |
5856 | } |
5857 | ||
5858 | ||
5859 | /*********************************************************************** | |
5860 | * class_getName | |
5861 | * fixme | |
1807f628 | 5862 | * Locking: may acquire DemangleCacheLock |
8070259c A |
5863 | **********************************************************************/ |
5864 | const char *class_getName(Class cls) | |
5865 | { | |
5866 | if (!cls) return "nil"; | |
c1e772c4 | 5867 | // fixme lldb calls class_getName() on unrealized classes (rdar://27258517) |
1807f628 | 5868 | // ASSERT(cls->isRealized() || cls->isFuture()); |
f192a3e2 | 5869 | return cls->demangledName(/* needs lock */true); |
8070259c A |
5870 | } |
5871 | ||
1807f628 A |
5872 | /*********************************************************************** |
5873 | * objc_debug_class_getNameRaw | |
5874 | * fixme | |
5875 | * Locking: none | |
5876 | **********************************************************************/ | |
5877 | const char *objc_debug_class_getNameRaw(Class cls) | |
5878 | { | |
5879 | if (!cls) return "nil"; | |
5880 | return cls->mangledName(); | |
5881 | } | |
5882 | ||
8070259c | 5883 | |
b3962a83 A |
5884 | /*********************************************************************** |
5885 | * class_getVersion | |
5886 | * fixme | |
5887 | * Locking: none | |
5888 | **********************************************************************/ | |
5889 | int | |
5890 | class_getVersion(Class cls) | |
5891 | { | |
5892 | if (!cls) return 0; | |
1807f628 | 5893 | ASSERT(cls->isRealized()); |
f192a3e2 A |
5894 | auto rwe = cls->data()->ext(); |
5895 | if (rwe) { | |
5896 | return rwe->version; | |
5897 | } | |
5898 | return cls->isMetaClass() ? 7 : 0; | |
b3962a83 A |
5899 | } |
5900 | ||
5901 | ||
5902 | /*********************************************************************** | |
5903 | * class_setVersion | |
5904 | * fixme | |
5905 | * Locking: none | |
5906 | **********************************************************************/ | |
5907 | void | |
5908 | class_setVersion(Class cls, int version) | |
5909 | { | |
5910 | if (!cls) return; | |
1807f628 | 5911 | ASSERT(cls->isRealized()); |
f192a3e2 A |
5912 | auto rwe = cls->data()->ext(); |
5913 | if (!rwe) { | |
5914 | mutex_locker_t lock(runtimeLock); | |
5915 | rwe = cls->data()->extAllocIfNeeded(); | |
5916 | } | |
5917 | ||
5918 | rwe->version = version; | |
b3962a83 A |
5919 | } |
5920 | ||
1807f628 A |
5921 | /*********************************************************************** |
5922 | * search_method_list_inline | |
5923 | **********************************************************************/ | |
34d5b5e8 | 5924 | template<class getNameFunc> |
1807f628 | 5925 | ALWAYS_INLINE static method_t * |
34d5b5e8 | 5926 | findMethodInSortedMethodList(SEL key, const method_list_t *list, const getNameFunc &getName) |
8972963c | 5927 | { |
1807f628 | 5928 | ASSERT(list); |
31875a97 | 5929 | |
bc4fafce A |
5930 | auto first = list->begin(); |
5931 | auto base = first; | |
5932 | decltype(first) probe; | |
5933 | ||
8972963c A |
5934 | uintptr_t keyValue = (uintptr_t)key; |
5935 | uint32_t count; | |
5936 | ||
5937 | for (count = list->count; count != 0; count >>= 1) { | |
5938 | probe = base + (count >> 1); | |
5939 | ||
34d5b5e8 | 5940 | uintptr_t probeValue = (uintptr_t)getName(probe); |
8972963c A |
5941 | |
5942 | if (keyValue == probeValue) { | |
5943 | // `probe` is a match. | |
5944 | // Rewind looking for the *first* occurrence of this value. | |
5945 | // This is required for correct category overrides. | |
34d5b5e8 | 5946 | while (probe > first && keyValue == (uintptr_t)getName((probe - 1))) { |
8972963c A |
5947 | probe--; |
5948 | } | |
bc4fafce | 5949 | return &*probe; |
8972963c A |
5950 | } |
5951 | ||
5952 | if (keyValue > probeValue) { | |
5953 | base = probe + 1; | |
5954 | count--; | |
5955 | } | |
5956 | } | |
5957 | ||
7257e56c | 5958 | return nil; |
8972963c A |
5959 | } |
5960 | ||
34d5b5e8 A |
5961 | ALWAYS_INLINE static method_t * |
5962 | findMethodInSortedMethodList(SEL key, const method_list_t *list) | |
5963 | { | |
5964 | if (list->isSmallList()) { | |
5965 | if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) { | |
5966 | return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); }); | |
5967 | } else { | |
5968 | return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); }); | |
5969 | } | |
5970 | } else { | |
5971 | return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.big().name; }); | |
5972 | } | |
5973 | } | |
5974 | ||
5975 | template<class getNameFunc> | |
5976 | ALWAYS_INLINE static method_t * | |
5977 | findMethodInUnsortedMethodList(SEL sel, const method_list_t *list, const getNameFunc &getName) | |
5978 | { | |
5979 | for (auto& meth : *list) { | |
5980 | if (getName(meth) == sel) return &meth; | |
5981 | } | |
5982 | return nil; | |
5983 | } | |
5984 | ||
5985 | ALWAYS_INLINE static method_t * | |
5986 | findMethodInUnsortedMethodList(SEL key, const method_list_t *list) | |
5987 | { | |
5988 | if (list->isSmallList()) { | |
5989 | if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) { | |
5990 | return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); }); | |
5991 | } else { | |
5992 | return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); }); | |
5993 | } | |
5994 | } else { | |
5995 | return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.big().name; }); | |
5996 | } | |
5997 | } | |
5998 | ||
1807f628 A |
5999 | ALWAYS_INLINE static method_t * |
6000 | search_method_list_inline(const method_list_t *mlist, SEL sel) | |
8972963c | 6001 | { |
31875a97 | 6002 | int methodListIsFixedUp = mlist->isFixedUp(); |
bc4fafce | 6003 | int methodListHasExpectedSize = mlist->isExpectedSize(); |
8972963c | 6004 | |
1807f628 | 6005 | if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) { |
8972963c A |
6006 | return findMethodInSortedMethodList(sel, mlist); |
6007 | } else { | |
6008 | // Linear search of unsorted method list | |
34d5b5e8 A |
6009 | if (auto *m = findMethodInUnsortedMethodList(sel, mlist)) |
6010 | return m; | |
8972963c A |
6011 | } |
6012 | ||
31875a97 | 6013 | #if DEBUG |
8972963c | 6014 | // sanity-check negative results |
31875a97 A |
6015 | if (mlist->isFixedUp()) { |
6016 | for (auto& meth : *mlist) { | |
bc4fafce | 6017 | if (meth.name() == sel) { |
8972963c A |
6018 | _objc_fatal("linear search worked when binary search did not"); |
6019 | } | |
6020 | } | |
7af964d1 | 6021 | } |
8972963c | 6022 | #endif |
7af964d1 | 6023 | |
7257e56c | 6024 | return nil; |
8972963c | 6025 | } |
7af964d1 | 6026 | |
1807f628 A |
6027 | NEVER_INLINE static method_t * |
6028 | search_method_list(const method_list_t *mlist, SEL sel) | |
6029 | { | |
6030 | return search_method_list_inline(mlist, sel); | |
6031 | } | |
6032 | ||
6033 | /*********************************************************************** | |
6034 | * method_lists_contains_any | |
6035 | **********************************************************************/ | |
bc4fafce | 6036 | template<typename T> |
1807f628 | 6037 | static NEVER_INLINE bool |
bc4fafce | 6038 | method_lists_contains_any(T *mlists, T *end, |
1807f628 A |
6039 | SEL sels[], size_t selcount) |
6040 | { | |
6041 | while (mlists < end) { | |
6042 | const method_list_t *mlist = *mlists++; | |
6043 | int methodListIsFixedUp = mlist->isFixedUp(); | |
bc4fafce | 6044 | int methodListHasExpectedSize = mlist->entsize() == sizeof(struct method_t::big); |
1807f628 A |
6045 | |
6046 | if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) { | |
6047 | for (size_t i = 0; i < selcount; i++) { | |
6048 | if (findMethodInSortedMethodList(sels[i], mlist)) { | |
6049 | return true; | |
6050 | } | |
6051 | } | |
6052 | } else { | |
34d5b5e8 A |
6053 | for (size_t i = 0; i < selcount; i++) { |
6054 | if (findMethodInUnsortedMethodList(sels[i], mlist)) { | |
6055 | return true; | |
1807f628 A |
6056 | } |
6057 | } | |
6058 | } | |
6059 | } | |
6060 | return false; | |
6061 | } | |
6062 | ||
34d5b5e8 | 6063 | |
1807f628 A |
6064 | /*********************************************************************** |
6065 | * getMethodNoSuper_nolock | |
6066 | * fixme | |
6067 | * Locking: runtimeLock must be read- or write-locked by the caller | |
6068 | **********************************************************************/ | |
7af964d1 | 6069 | static method_t * |
7257e56c | 6070 | getMethodNoSuper_nolock(Class cls, SEL sel) |
7af964d1 | 6071 | { |
31875a97 | 6072 | runtimeLock.assertLocked(); |
7af964d1 | 6073 | |
1807f628 | 6074 | ASSERT(cls->isRealized()); |
7af964d1 | 6075 | // fixme nil cls? |
7257e56c | 6076 | // fixme nil sel? |
7af964d1 | 6077 | |
f192a3e2 A |
6078 | auto const methods = cls->data()->methods(); |
6079 | for (auto mlists = methods.beginLists(), | |
6080 | end = methods.endLists(); | |
31875a97 A |
6081 | mlists != end; |
6082 | ++mlists) | |
6083 | { | |
1807f628 A |
6084 | // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest |
6085 | // caller of search_method_list, inlining it turns | |
6086 | // getMethodNoSuper_nolock into a frame-less function and eliminates | |
6087 | // any store from this codepath. | |
6088 | method_t *m = search_method_list_inline(*mlists, sel); | |
8972963c | 6089 | if (m) return m; |
31875a97 | 6090 | } |
7af964d1 | 6091 | |
7257e56c | 6092 | return nil; |
b3962a83 A |
6093 | } |
6094 | ||
6095 | ||
6096 | /*********************************************************************** | |
7af964d1 | 6097 | * getMethod_nolock |
b3962a83 | 6098 | * fixme |
7af964d1 | 6099 | * Locking: runtimeLock must be read- or write-locked by the caller |
b3962a83 | 6100 | **********************************************************************/ |
7af964d1 | 6101 | static method_t * |
7257e56c | 6102 | getMethod_nolock(Class cls, SEL sel) |
b3962a83 | 6103 | { |
7257e56c | 6104 | method_t *m = nil; |
b3962a83 | 6105 | |
31875a97 | 6106 | runtimeLock.assertLocked(); |
b3962a83 | 6107 | |
7af964d1 | 6108 | // fixme nil cls? |
7257e56c | 6109 | // fixme nil sel? |
b3962a83 | 6110 | |
1807f628 | 6111 | ASSERT(cls->isRealized()); |
b3962a83 | 6112 | |
7257e56c | 6113 | while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) { |
34d5b5e8 | 6114 | cls = cls->getSuperclass(); |
b3962a83 A |
6115 | } |
6116 | ||
7af964d1 | 6117 | return m; |
b3962a83 A |
6118 | } |
6119 | ||
6120 | ||
6121 | /*********************************************************************** | |
7af964d1 | 6122 | * _class_getMethod |
b3962a83 | 6123 | * fixme |
7af964d1 | 6124 | * Locking: read-locks runtimeLock |
b3962a83 | 6125 | **********************************************************************/ |
7257e56c | 6126 | static Method _class_getMethod(Class cls, SEL sel) |
8972963c | 6127 | { |
66799735 | 6128 | mutex_locker_t lock(runtimeLock); |
31875a97 | 6129 | return getMethod_nolock(cls, sel); |
7af964d1 | 6130 | } |
b3962a83 | 6131 | |
7257e56c | 6132 | |
b3962a83 | 6133 | /*********************************************************************** |
7257e56c A |
6134 | * class_getInstanceMethod. Return the instance method for the |
6135 | * specified class and selector. | |
b3962a83 | 6136 | **********************************************************************/ |
7257e56c | 6137 | Method class_getInstanceMethod(Class cls, SEL sel) |
b3962a83 | 6138 | { |
7257e56c A |
6139 | if (!cls || !sel) return nil; |
6140 | ||
6141 | // This deliberately avoids +initialize because it historically did so. | |
6142 | ||
6143 | // This implementation is a bit weird because it's the only place that | |
6144 | // wants a Method instead of an IMP. | |
6145 | ||
6146 | #warning fixme build and search caches | |
6147 | ||
6148 | // Search method lists, try method resolver, etc. | |
1807f628 | 6149 | lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER); |
7257e56c A |
6150 | |
6151 | #warning fixme build and search caches | |
6152 | ||
6153 | return _class_getMethod(cls, sel); | |
b3962a83 A |
6154 | } |
6155 | ||
6156 | ||
13ba007e A |
6157 | /*********************************************************************** |
6158 | * resolveClassMethod | |
6159 | * Call +resolveClassMethod, looking for a method to be added to class cls. | |
6160 | * cls should be a metaclass. | |
6161 | * Does not check if the method already exists. | |
6162 | **********************************************************************/ | |
1807f628 | 6163 | static void resolveClassMethod(id inst, SEL sel, Class cls) |
13ba007e A |
6164 | { |
6165 | runtimeLock.assertUnlocked(); | |
1807f628 A |
6166 | ASSERT(cls->isRealized()); |
6167 | ASSERT(cls->isMetaClass()); | |
13ba007e | 6168 | |
34d5b5e8 | 6169 | if (!lookUpImpOrNilTryCache(inst, @selector(resolveClassMethod:), cls)) { |
13ba007e A |
6170 | // Resolver not implemented. |
6171 | return; | |
6172 | } | |
6173 | ||
6174 | Class nonmeta; | |
6175 | { | |
6176 | mutex_locker_t lock(runtimeLock); | |
6177 | nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst); | |
6178 | // +initialize path should have realized nonmeta already | |
6179 | if (!nonmeta->isRealized()) { | |
6180 | _objc_fatal("nonmeta class %s (%p) unexpectedly not realized", | |
6181 | nonmeta->nameForLogging(), nonmeta); | |
6182 | } | |
6183 | } | |
6184 | BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend; | |
1807f628 | 6185 | bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel); |
13ba007e A |
6186 | |
6187 | // Cache the result (good or bad) so the resolver doesn't fire next time. | |
6188 | // +resolveClassMethod adds to self->ISA() a.k.a. cls | |
34d5b5e8 | 6189 | IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); |
13ba007e A |
6190 | |
6191 | if (resolved && PrintResolving) { | |
6192 | if (imp) { | |
6193 | _objc_inform("RESOLVE: method %c[%s %s] " | |
6194 | "dynamically resolved to %p", | |
6195 | cls->isMetaClass() ? '+' : '-', | |
6196 | cls->nameForLogging(), sel_getName(sel), imp); | |
6197 | } | |
6198 | else { | |
6199 | // Method resolver didn't add anything? | |
6200 | _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES" | |
6201 | ", but no new implementation of %c[%s %s] was found", | |
6202 | cls->nameForLogging(), sel_getName(sel), | |
6203 | cls->isMetaClass() ? '+' : '-', | |
6204 | cls->nameForLogging(), sel_getName(sel)); | |
6205 | } | |
6206 | } | |
6207 | } | |
6208 | ||
6209 | ||
6210 | /*********************************************************************** | |
6211 | * resolveInstanceMethod | |
6212 | * Call +resolveInstanceMethod, looking for a method to be added to class cls. | |
6213 | * cls may be a metaclass or a non-meta class. | |
6214 | * Does not check if the method already exists. | |
6215 | **********************************************************************/ | |
1807f628 | 6216 | static void resolveInstanceMethod(id inst, SEL sel, Class cls) |
13ba007e A |
6217 | { |
6218 | runtimeLock.assertUnlocked(); | |
1807f628 A |
6219 | ASSERT(cls->isRealized()); |
6220 | SEL resolve_sel = @selector(resolveInstanceMethod:); | |
13ba007e | 6221 | |
34d5b5e8 | 6222 | if (!lookUpImpOrNilTryCache(cls, resolve_sel, cls->ISA(/*authenticated*/true))) { |
13ba007e A |
6223 | // Resolver not implemented. |
6224 | return; | |
6225 | } | |
6226 | ||
6227 | BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend; | |
1807f628 | 6228 | bool resolved = msg(cls, resolve_sel, sel); |
13ba007e A |
6229 | |
6230 | // Cache the result (good or bad) so the resolver doesn't fire next time. | |
6231 | // +resolveInstanceMethod adds to self a.k.a. cls | |
34d5b5e8 | 6232 | IMP imp = lookUpImpOrNilTryCache(inst, sel, cls); |
13ba007e A |
6233 | |
6234 | if (resolved && PrintResolving) { | |
6235 | if (imp) { | |
6236 | _objc_inform("RESOLVE: method %c[%s %s] " | |
6237 | "dynamically resolved to %p", | |
6238 | cls->isMetaClass() ? '+' : '-', | |
6239 | cls->nameForLogging(), sel_getName(sel), imp); | |
6240 | } | |
6241 | else { | |
6242 | // Method resolver didn't add anything? | |
6243 | _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES" | |
6244 | ", but no new implementation of %c[%s %s] was found", | |
6245 | cls->nameForLogging(), sel_getName(sel), | |
6246 | cls->isMetaClass() ? '+' : '-', | |
6247 | cls->nameForLogging(), sel_getName(sel)); | |
6248 | } | |
6249 | } | |
6250 | } | |
6251 | ||
6252 | ||
6253 | /*********************************************************************** | |
1807f628 | 6254 | * resolveMethod_locked |
13ba007e | 6255 | * Call +resolveClassMethod or +resolveInstanceMethod. |
1807f628 A |
6256 | * |
6257 | * Called with the runtimeLock held to avoid pressure in the caller | |
6258 | * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb | |
13ba007e | 6259 | **********************************************************************/ |
1807f628 A |
6260 | static NEVER_INLINE IMP |
6261 | resolveMethod_locked(id inst, SEL sel, Class cls, int behavior) | |
13ba007e | 6262 | { |
1807f628 A |
6263 | runtimeLock.assertLocked(); |
6264 | ASSERT(cls->isRealized()); | |
6265 | ||
6266 | runtimeLock.unlock(); | |
13ba007e A |
6267 | |
6268 | if (! cls->isMetaClass()) { | |
6269 | // try [cls resolveInstanceMethod:sel] | |
1807f628 | 6270 | resolveInstanceMethod(inst, sel, cls); |
13ba007e A |
6271 | } |
6272 | else { | |
6273 | // try [nonMetaClass resolveClassMethod:sel] | |
6274 | // and [cls resolveInstanceMethod:sel] | |
1807f628 | 6275 | resolveClassMethod(inst, sel, cls); |
34d5b5e8 | 6276 | if (!lookUpImpOrNilTryCache(inst, sel, cls)) { |
1807f628 | 6277 | resolveInstanceMethod(inst, sel, cls); |
13ba007e A |
6278 | } |
6279 | } | |
1807f628 A |
6280 | |
6281 | // chances are that calling the resolver have populated the cache | |
6282 | // so attempt using it | |
34d5b5e8 | 6283 | return lookUpImpOrForwardTryCache(inst, sel, cls, behavior); |
13ba007e A |
6284 | } |
6285 | ||
6286 | ||
b3962a83 | 6287 | /*********************************************************************** |
7257e56c A |
6288 | * log_and_fill_cache |
6289 | * Log this method call. If the logger permits it, fill the method cache. | |
6290 | * cls is the method whose cache should be filled. | |
6291 | * implementer is the class that owns the implementation in question. | |
b3962a83 | 6292 | **********************************************************************/ |
7257e56c | 6293 | static void |
31875a97 | 6294 | log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer) |
b3962a83 | 6295 | { |
7257e56c | 6296 | #if SUPPORT_MESSAGE_LOGGING |
1807f628 | 6297 | if (slowpath(objcMsgLogEnabled && implementer)) { |
7257e56c | 6298 | bool cacheIt = logMessageSend(implementer->isMetaClass(), |
8070259c A |
6299 | cls->nameForLogging(), |
6300 | implementer->nameForLogging(), | |
7257e56c A |
6301 | sel); |
6302 | if (!cacheIt) return; | |
6303 | } | |
6304 | #endif | |
34d5b5e8 | 6305 | cls->cache.insert(sel, imp, receiver); |
b3962a83 A |
6306 | } |
6307 | ||
6308 | ||
6309 | /*********************************************************************** | |
34d5b5e8 A |
6310 | * realizeAndInitializeIfNeeded_locked |
6311 | * Realize the given class if not already realized, and initialize it if | |
6312 | * not already initialized. | |
6313 | * inst is an instance of cls or a subclass, or nil if none is known. | |
6314 | * cls is the class to initialize and realize. | |
6315 | * initializer is true to initialize the class, false to skip initialization. | |
6316 | **********************************************************************/ | |
6317 | static Class | |
6318 | realizeAndInitializeIfNeeded_locked(id inst, Class cls, bool initialize) | |
6319 | { | |
6320 | runtimeLock.assertLocked(); | |
6321 | if (slowpath(!cls->isRealized())) { | |
6322 | cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock); | |
6323 | // runtimeLock may have been dropped but is now locked again | |
6324 | } | |
6325 | ||
6326 | if (slowpath(initialize && !cls->isInitialized())) { | |
6327 | cls = initializeAndLeaveLocked(cls, inst, runtimeLock); | |
6328 | // runtimeLock may have been dropped but is now locked again | |
6329 | ||
6330 | // If sel == initialize, class_initialize will send +initialize and | |
6331 | // then the messenger will send +initialize again after this | |
6332 | // procedure finishes. Of course, if this is not being called | |
6333 | // from the messenger then it won't happen. 2778172 | |
6334 | } | |
6335 | return cls; | |
6336 | } | |
6337 | ||
6338 | /*********************************************************************** | |
6339 | * lookUpImpOrForward / lookUpImpOrForwardTryCache / lookUpImpOrNilTryCache | |
6340 | * The standard IMP lookup. | |
6341 | * | |
6342 | * The TryCache variant attempts a fast-path lookup in the IMP Cache. | |
6343 | * Most callers should use lookUpImpOrForwardTryCache with LOOKUP_INITIALIZE | |
6344 | * | |
1807f628 | 6345 | * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails) |
34d5b5e8 A |
6346 | * With LOOKUP_NIL: returns nil on negative cache hits |
6347 | * | |
6348 | * inst is an instance of cls or a subclass thereof, or nil if none is known. | |
7257e56c A |
6349 | * If cls is an un-initialized metaclass then a non-nil inst is faster. |
6350 | * May return _objc_msgForward_impcache. IMPs destined for external use | |
6351 | * must be converted to _objc_msgForward or _objc_msgForward_stret. | |
1807f628 | 6352 | * If you don't want forwarding at all, use LOOKUP_NIL. |
b3962a83 | 6353 | **********************************************************************/ |
34d5b5e8 A |
6354 | ALWAYS_INLINE |
6355 | static IMP _lookUpImpTryCache(id inst, SEL sel, Class cls, int behavior) | |
6356 | { | |
6357 | runtimeLock.assertUnlocked(); | |
6358 | ||
6359 | if (slowpath(!cls->isInitialized())) { | |
6360 | // see comment in lookUpImpOrForward | |
6361 | return lookUpImpOrForward(inst, sel, cls, behavior); | |
6362 | } | |
6363 | ||
6364 | IMP imp = cache_getImp(cls, sel); | |
6365 | if (imp != NULL) goto done; | |
6366 | #if CONFIG_USE_PREOPT_CACHES | |
6367 | if (fastpath(cls->cache.isConstantOptimizedCache(/* strict */true))) { | |
6368 | imp = cache_getImp(cls->cache.preoptFallbackClass(), sel); | |
6369 | } | |
6370 | #endif | |
6371 | if (slowpath(imp == NULL)) { | |
6372 | return lookUpImpOrForward(inst, sel, cls, behavior); | |
6373 | } | |
6374 | ||
6375 | done: | |
6376 | if ((behavior & LOOKUP_NIL) && imp == (IMP)_objc_msgForward_impcache) { | |
6377 | return nil; | |
6378 | } | |
6379 | return imp; | |
6380 | } | |
6381 | ||
6382 | IMP lookUpImpOrForwardTryCache(id inst, SEL sel, Class cls, int behavior) | |
6383 | { | |
6384 | return _lookUpImpTryCache(inst, sel, cls, behavior); | |
6385 | } | |
6386 | ||
6387 | IMP lookUpImpOrNilTryCache(id inst, SEL sel, Class cls, int behavior) | |
6388 | { | |
6389 | return _lookUpImpTryCache(inst, sel, cls, behavior | LOOKUP_NIL); | |
6390 | } | |
6391 | ||
6392 | NEVER_INLINE | |
1807f628 | 6393 | IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior) |
b3962a83 | 6394 | { |
1807f628 | 6395 | const IMP forward_imp = (IMP)_objc_msgForward_impcache; |
7257e56c | 6396 | IMP imp = nil; |
1807f628 | 6397 | Class curClass; |
7af964d1 | 6398 | |
31875a97 | 6399 | runtimeLock.assertUnlocked(); |
7257e56c | 6400 | |
34d5b5e8 A |
6401 | if (slowpath(!cls->isInitialized())) { |
6402 | // The first message sent to a class is often +new or +alloc, or +self | |
6403 | // which goes through objc_opt_* or various optimized entry points. | |
6404 | // | |
6405 | // However, the class isn't realized/initialized yet at this point, | |
6406 | // and the optimized entry points fall down through objc_msgSend, | |
6407 | // which ends up here. | |
6408 | // | |
6409 | // We really want to avoid caching these, as it can cause IMP caches | |
6410 | // to be made with a single entry forever. | |
6411 | // | |
6412 | // Note that this check is racy as several threads might try to | |
6413 | // message a given class for the first time at the same time, | |
6414 | // in which case we might cache anyway. | |
6415 | behavior |= LOOKUP_NOCACHE; | |
7257e56c A |
6416 | } |
6417 | ||
bd8dfcfc A |
6418 | // runtimeLock is held during isRealized and isInitialized checking |
6419 | // to prevent races against concurrent realization. | |
6420 | ||
6421 | // runtimeLock is held during method search to make | |
6422 | // method-lookup + cache-fill atomic with respect to method addition. | |
6423 | // Otherwise, a category could be added but ignored indefinitely because | |
6424 | // the cache was re-filled with the old value after the cache flush on | |
6425 | // behalf of the category. | |
6426 | ||
66799735 | 6427 | runtimeLock.lock(); |
1807f628 A |
6428 | |
6429 | // We don't want people to be able to craft a binary blob that looks like | |
6430 | // a class but really isn't one and do a CFI attack. | |
6431 | // | |
6432 | // To make these harder we want to make sure this is a class that was | |
6433 | // either built into the binary or legitimately registered through | |
6434 | // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair. | |
66799735 | 6435 | checkIsKnownClass(cls); |
bd8dfcfc | 6436 | |
34d5b5e8 A |
6437 | cls = realizeAndInitializeIfNeeded_locked(inst, cls, behavior & LOOKUP_INITIALIZE); |
6438 | // runtimeLock may have been dropped but is now locked again | |
66799735 | 6439 | runtimeLock.assertLocked(); |
1807f628 A |
6440 | curClass = cls; |
6441 | ||
34d5b5e8 | 6442 | // The code used to lookup the class's cache again right after |
1807f628 A |
6443 | // we take the lock but for the vast majority of the cases |
6444 | // evidence shows this is a miss most of the time, hence a time loss. | |
6445 | // | |
6446 | // The only codepath calling into this without having performed some | |
6447 | // kind of cache lookup is class_getInstanceMethod(). | |
6448 | ||
6449 | for (unsigned attempts = unreasonableClassCount();;) { | |
34d5b5e8 A |
6450 | if (curClass->cache.isConstantOptimizedCache(/* strict */true)) { |
6451 | #if CONFIG_USE_PREOPT_CACHES | |
6452 | imp = cache_getImp(curClass, sel); | |
6453 | if (imp) goto done_unlock; | |
6454 | curClass = curClass->cache.preoptFallbackClass(); | |
6455 | #endif | |
6456 | } else { | |
6457 | // curClass method list. | |
6458 | Method meth = getMethodNoSuper_nolock(curClass, sel); | |
6459 | if (meth) { | |
6460 | imp = meth->imp(false); | |
6461 | goto done; | |
6462 | } | |
7257e56c | 6463 | |
34d5b5e8 A |
6464 | if (slowpath((curClass = curClass->getSuperclass()) == nil)) { |
6465 | // No implementation found, and method resolver didn't help. | |
6466 | // Use forwarding. | |
6467 | imp = forward_imp; | |
6468 | break; | |
6469 | } | |
7257e56c | 6470 | } |
7257e56c | 6471 | |
1807f628 A |
6472 | // Halt if there is a cycle in the superclass chain. |
6473 | if (slowpath(--attempts == 0)) { | |
6474 | _objc_fatal("Memory corruption in class list."); | |
6475 | } | |
7257e56c | 6476 | |
1807f628 A |
6477 | // Superclass cache. |
6478 | imp = cache_getImp(curClass, sel); | |
6479 | if (slowpath(imp == forward_imp)) { | |
6480 | // Found a forward:: entry in a superclass. | |
6481 | // Stop searching, but don't cache yet; call method | |
6482 | // resolver for this class first. | |
6483 | break; | |
6484 | } | |
6485 | if (fastpath(imp)) { | |
6486 | // Found the method in a superclass. Cache it in this class. | |
6487 | goto done; | |
6488 | } | |
7257e56c A |
6489 | } |
6490 | ||
1807f628 | 6491 | // No implementation found. Try method resolver once. |
7257e56c | 6492 | |
1807f628 A |
6493 | if (slowpath(behavior & LOOKUP_RESOLVER)) { |
6494 | behavior ^= LOOKUP_RESOLVER; | |
6495 | return resolveMethod_locked(inst, sel, cls, behavior); | |
6496 | } | |
7257e56c A |
6497 | |
6498 | done: | |
34d5b5e8 A |
6499 | if (fastpath((behavior & LOOKUP_NOCACHE) == 0)) { |
6500 | #if CONFIG_USE_PREOPT_CACHES | |
6501 | while (cls->cache.isConstantOptimizedCache(/* strict */true)) { | |
6502 | cls = cls->cache.preoptFallbackClass(); | |
6503 | } | |
6504 | #endif | |
6505 | log_and_fill_cache(cls, imp, sel, inst, curClass); | |
6506 | } | |
6507 | done_unlock: | |
66799735 | 6508 | runtimeLock.unlock(); |
1807f628 A |
6509 | if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) { |
6510 | return nil; | |
6511 | } | |
7257e56c | 6512 | return imp; |
b3962a83 A |
6513 | } |
6514 | ||
b3962a83 | 6515 | /*********************************************************************** |
7257e56c | 6516 | * lookupMethodInClassAndLoadCache. |
1807f628 | 6517 | * Like lookUpImpOrForward, but does not search superclasses. |
7257e56c | 6518 | * Caches and returns objc_msgForward if the method is not found in the class. |
b3962a83 | 6519 | **********************************************************************/ |
7257e56c | 6520 | IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel) |
b3962a83 | 6521 | { |
7257e56c A |
6522 | IMP imp; |
6523 | ||
c1e772c4 | 6524 | // fixme this is incomplete - no resolver, +initialize - |
7257e56c | 6525 | // but it's only used for .cxx_construct/destruct so we don't care |
1807f628 | 6526 | ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct); |
7257e56c A |
6527 | |
6528 | // Search cache first. | |
34d5b5e8 A |
6529 | // |
6530 | // If the cache used for the lookup is preoptimized, | |
6531 | // we ask for `_objc_msgForward_impcache` to be returned on cache misses, | |
6532 | // so that there's no TOCTOU race between using `isConstantOptimizedCache` | |
6533 | // and calling cache_getImp() when not under the runtime lock. | |
6534 | // | |
6535 | // For dynamic caches, a miss will return `nil` | |
6536 | imp = cache_getImp(cls, sel, _objc_msgForward_impcache); | |
7257e56c | 6537 | |
34d5b5e8 A |
6538 | if (slowpath(imp == nil)) { |
6539 | // Cache miss. Search method list. | |
7257e56c | 6540 | |
34d5b5e8 | 6541 | mutex_locker_t lock(runtimeLock); |
7257e56c | 6542 | |
34d5b5e8 A |
6543 | if (auto meth = getMethodNoSuper_nolock(cls, sel)) { |
6544 | // Hit in method list. Cache it. | |
6545 | imp = meth->imp(false); | |
6546 | } else { | |
6547 | imp = _objc_msgForward_impcache; | |
6548 | } | |
7257e56c | 6549 | |
34d5b5e8 A |
6550 | // Note, because we do not hold the runtime lock above |
6551 | // isConstantOptimizedCache might flip, so we need to double check | |
6552 | if (!cls->cache.isConstantOptimizedCache(true /* strict */)) { | |
6553 | cls->cache.insert(sel, imp, nil); | |
6554 | } | |
7257e56c | 6555 | } |
34d5b5e8 A |
6556 | |
6557 | return imp; | |
b3962a83 A |
6558 | } |
6559 | ||
6560 | ||
6561 | /*********************************************************************** | |
7257e56c | 6562 | * class_getProperty |
b3962a83 | 6563 | * fixme |
7257e56c | 6564 | * Locking: read-locks runtimeLock |
b3962a83 | 6565 | **********************************************************************/ |
7257e56c | 6566 | objc_property_t class_getProperty(Class cls, const char *name) |
b3962a83 | 6567 | { |
7257e56c | 6568 | if (!cls || !name) return nil; |
b3962a83 | 6569 | |
66799735 | 6570 | mutex_locker_t lock(runtimeLock); |
8972963c | 6571 | |
66799735 A |
6572 | checkIsKnownClass(cls); |
6573 | ||
1807f628 | 6574 | ASSERT(cls->isRealized()); |
b3962a83 | 6575 | |
34d5b5e8 | 6576 | for ( ; cls; cls = cls->getSuperclass()) { |
f192a3e2 | 6577 | for (auto& prop : cls->data()->properties()) { |
31875a97 A |
6578 | if (0 == strcmp(name, prop.name)) { |
6579 | return (objc_property_t)∝ | |
7257e56c A |
6580 | } |
6581 | } | |
6582 | } | |
31875a97 A |
6583 | |
6584 | return nil; | |
b3962a83 A |
6585 | } |
6586 | ||
6587 | ||
6588 | /*********************************************************************** | |
6589 | * Locking: fixme | |
6590 | **********************************************************************/ | |
7257e56c A |
6591 | |
6592 | Class gdb_class_getClass(Class cls) | |
b3962a83 | 6593 | { |
8070259c | 6594 | const char *className = cls->mangledName(); |
7257e56c A |
6595 | if(!className || !strlen(className)) return Nil; |
6596 | Class rCls = look_up_class(className, NO, NO); | |
6597 | return rCls; | |
b3962a83 A |
6598 | } |
6599 | ||
7257e56c | 6600 | Class gdb_object_getClass(id obj) |
7af964d1 | 6601 | { |
7257e56c A |
6602 | if (!obj) return nil; |
6603 | return gdb_class_getClass(obj->getIsa()); | |
7af964d1 A |
6604 | } |
6605 | ||
6606 | ||
6607 | /*********************************************************************** | |
7257e56c | 6608 | * Locking: write-locks runtimeLock |
7af964d1 | 6609 | **********************************************************************/ |
7257e56c A |
6610 | void |
6611 | objc_class::setInitialized() | |
7af964d1 | 6612 | { |
7257e56c | 6613 | Class metacls; |
8070259c | 6614 | Class cls; |
7257e56c | 6615 | |
1807f628 | 6616 | ASSERT(!isMetaClass()); |
7257e56c | 6617 | |
8070259c A |
6618 | cls = (Class)this; |
6619 | metacls = cls->ISA(); | |
6620 | ||
66799735 | 6621 | mutex_locker_t lock(runtimeLock); |
8070259c | 6622 | |
8070259c | 6623 | // Special cases: |
1807f628 A |
6624 | // - NSObject AWZ class methods are default. |
6625 | // - NSObject RR class and instance methods are default. | |
6626 | // - NSObject Core class and instance methods are default. | |
6627 | // adjustCustomFlagsForMethodChange() also knows these special cases. | |
8070259c A |
6628 | // attachMethodLists() also knows these special cases. |
6629 | ||
1807f628 A |
6630 | objc::AWZScanner::scanInitializedClass(cls, metacls); |
6631 | objc::RRScanner::scanInitializedClass(cls, metacls); | |
6632 | objc::CoreScanner::scanInitializedClass(cls, metacls); | |
7257e56c | 6633 | |
34d5b5e8 A |
6634 | #if CONFIG_USE_PREOPT_CACHES |
6635 | cls->cache.maybeConvertToPreoptimized(); | |
6636 | metacls->cache.maybeConvertToPreoptimized(); | |
6637 | #endif | |
6638 | ||
6639 | if (PrintInitializing) { | |
6640 | _objc_inform("INITIALIZE: thread %p: setInitialized(%s)", | |
6641 | objc_thread_self(), cls->nameForLogging()); | |
6642 | } | |
8070259c A |
6643 | // Update the +initialize flags. |
6644 | // Do this last. | |
7257e56c | 6645 | metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING); |
7af964d1 A |
6646 | } |
6647 | ||
6648 | ||
1807f628 | 6649 | void |
c1e772c4 | 6650 | objc_class::printInstancesRequireRawIsa(bool inherited) |
8070259c | 6651 | { |
1807f628 A |
6652 | ASSERT(PrintRawIsa); |
6653 | ASSERT(instancesRequireRawIsa()); | |
8070259c A |
6654 | _objc_inform("RAW ISA: %s%s%s", nameForLogging(), |
6655 | isMetaClass() ? " (meta)" : "", | |
6656 | inherited ? " (inherited)" : ""); | |
6657 | } | |
6658 | ||
8070259c A |
6659 | /*********************************************************************** |
6660 | * Mark this class and all of its subclasses as requiring raw isa pointers | |
6661 | **********************************************************************/ | |
1807f628 | 6662 | void objc_class::setInstancesRequireRawIsaRecursively(bool inherited) |
8070259c A |
6663 | { |
6664 | Class cls = (Class)this; | |
66799735 | 6665 | runtimeLock.assertLocked(); |
8070259c | 6666 | |
c1e772c4 | 6667 | if (instancesRequireRawIsa()) return; |
8070259c | 6668 | |
1807f628 | 6669 | foreach_realized_class_and_subclass(cls, [=](Class c){ |
c1e772c4 | 6670 | if (c->instancesRequireRawIsa()) { |
1807f628 | 6671 | return false; |
cd5f04f5 | 6672 | } |
8070259c | 6673 | |
1807f628 | 6674 | c->setInstancesRequireRawIsa(); |
8070259c | 6675 | |
c1e772c4 | 6676 | if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls); |
1807f628 | 6677 | return true; |
cd5f04f5 | 6678 | }); |
8972963c A |
6679 | } |
6680 | ||
34d5b5e8 A |
6681 | #if CONFIG_USE_PREOPT_CACHES |
6682 | void objc_class::setDisallowPreoptCachesRecursively(const char *why) | |
6683 | { | |
6684 | Class cls = (Class)this; | |
6685 | runtimeLock.assertLocked(); | |
6686 | ||
6687 | if (!allowsPreoptCaches()) return; | |
6688 | ||
6689 | foreach_realized_class_and_subclass(cls, [=](Class c){ | |
6690 | if (!c->allowsPreoptCaches()) { | |
6691 | return false; | |
6692 | } | |
6693 | ||
6694 | if (c->cache.isConstantOptimizedCache(/* strict */true)) { | |
6695 | c->cache.eraseNolock(why); | |
6696 | } else { | |
6697 | if (PrintCaches) { | |
6698 | _objc_inform("CACHES: %sclass %s: disallow preopt cache (from %s)", | |
6699 | isMetaClass() ? "meta" : "", | |
6700 | nameForLogging(), why); | |
6701 | } | |
6702 | c->setDisallowPreoptCaches(); | |
6703 | } | |
6704 | return true; | |
6705 | }); | |
6706 | } | |
6707 | ||
6708 | void objc_class::setDisallowPreoptInlinedSelsRecursively(const char *why) | |
6709 | { | |
6710 | Class cls = (Class)this; | |
6711 | runtimeLock.assertLocked(); | |
6712 | ||
6713 | if (!allowsPreoptInlinedSels()) return; | |
6714 | ||
6715 | foreach_realized_class_and_subclass(cls, [=](Class c){ | |
6716 | if (!c->allowsPreoptInlinedSels()) { | |
6717 | return false; | |
6718 | } | |
6719 | ||
6720 | if (PrintCaches) { | |
6721 | _objc_inform("CACHES: %sclass %s: disallow sel-inlined preopt cache (from %s)", | |
6722 | isMetaClass() ? "meta" : "", | |
6723 | nameForLogging(), why); | |
6724 | } | |
6725 | ||
6726 | c->setDisallowPreoptInlinedSels(); | |
6727 | if (c->cache.isConstantOptimizedCacheWithInlinedSels()) { | |
6728 | c->cache.eraseNolock(why); | |
6729 | } | |
6730 | return true; | |
6731 | }); | |
6732 | } | |
6733 | #endif | |
8972963c | 6734 | |
c1e772c4 A |
6735 | /*********************************************************************** |
6736 | * Choose a class index. | |
6737 | * Set instancesRequireRawIsa if no more class indexes are available. | |
6738 | **********************************************************************/ | |
6739 | void objc_class::chooseClassArrayIndex() | |
6740 | { | |
6741 | #if SUPPORT_INDEXED_ISA | |
6742 | Class cls = (Class)this; | |
66799735 | 6743 | runtimeLock.assertLocked(); |
c1e772c4 A |
6744 | |
6745 | if (objc_indexed_classes_count >= ISA_INDEX_COUNT) { | |
6746 | // No more indexes available. | |
1807f628 A |
6747 | ASSERT(cls->classArrayIndex() == 0); |
6748 | cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/); | |
c1e772c4 A |
6749 | return; |
6750 | } | |
6751 | ||
6752 | unsigned index = objc_indexed_classes_count++; | |
6753 | if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused | |
6754 | classForIndex(index) = cls; | |
6755 | cls->setClassArrayIndex(index); | |
6756 | #endif | |
6757 | } | |
6758 | ||
34d5b5e8 A |
6759 | static const char *empty_lazyClassNamer(Class cls __unused) { |
6760 | return nullptr; | |
6761 | } | |
6762 | ||
6763 | static ChainedHookFunction<objc_hook_lazyClassNamer> LazyClassNamerHook{empty_lazyClassNamer}; | |
6764 | ||
6765 | void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue, | |
6766 | _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue) { | |
6767 | LazyClassNamerHook.set(newValue, oldOutValue); | |
6768 | } | |
6769 | ||
6770 | const char * objc_class::installMangledNameForLazilyNamedClass() { | |
6771 | auto lazyClassNamer = LazyClassNamerHook.get(); | |
6772 | if (!*lazyClassNamer) { | |
6773 | _objc_fatal("Lazily named class %p with no lazy name handler registered", this); | |
6774 | } | |
6775 | ||
6776 | // If this is called on a metaclass, extract the original class | |
6777 | // and make it do the installation instead. It will install | |
6778 | // the metaclass's name too. | |
6779 | if (isMetaClass()) { | |
6780 | Class nonMeta = bits.safe_ro()->getNonMetaclass(); | |
6781 | return nonMeta->installMangledNameForLazilyNamedClass(); | |
6782 | } | |
6783 | ||
6784 | Class cls = (Class)this; | |
6785 | Class metaclass = ISA(); | |
6786 | ||
6787 | const char *name = lazyClassNamer((Class)this); | |
6788 | if (!name) { | |
6789 | _objc_fatal("Lazily named class %p wasn't named by lazy name handler", this); | |
6790 | } | |
6791 | ||
6792 | // Emplace the name into the class_ro_t. If we lose the race, | |
6793 | // then we'll free our name and use whatever got placed there | |
6794 | // instead of our name. | |
6795 | const char *previously = NULL; | |
6796 | class_ro_t *ro = (class_ro_t *)cls->bits.safe_ro(); | |
6797 | bool wonRace = ro->name.compare_exchange_strong(previously, name, std::memory_order_release, std::memory_order_acquire); | |
6798 | if (!wonRace) { | |
6799 | free((void *)name); | |
6800 | name = previously; | |
6801 | } | |
6802 | ||
6803 | // Emplace whatever name won the race in the metaclass too. | |
6804 | class_ro_t *metaRO = (class_ro_t *)metaclass->bits.safe_ro(); | |
6805 | ||
6806 | // Write our pointer if the current value is NULL. There's no | |
6807 | // need to loop or check success, since the only way this can | |
6808 | // fail is if another thread succeeded in writing the exact | |
6809 | // same pointer. | |
6810 | const char *expected = NULL; | |
6811 | metaRO->name.compare_exchange_strong(expected, name, std::memory_order_release, std::memory_order_acquire); | |
6812 | ||
6813 | return name; | |
6814 | } | |
c1e772c4 | 6815 | |
cd5f04f5 A |
6816 | /*********************************************************************** |
6817 | * Update custom RR and AWZ when a method changes its IMP | |
6818 | **********************************************************************/ | |
6819 | static void | |
1807f628 | 6820 | adjustCustomFlagsForMethodChange(Class cls, method_t *meth) |
cd5f04f5 | 6821 | { |
1807f628 A |
6822 | objc::AWZScanner::scanChangedMethod(cls, meth); |
6823 | objc::RRScanner::scanChangedMethod(cls, meth); | |
6824 | objc::CoreScanner::scanChangedMethod(cls, meth); | |
cd5f04f5 A |
6825 | } |
6826 | ||
b3962a83 A |
6827 | |
6828 | /*********************************************************************** | |
6829 | * class_getIvarLayout | |
6830 | * Called by the garbage collector. | |
7257e56c | 6831 | * The class must be nil or already realized. |
b3962a83 A |
6832 | * Locking: none |
6833 | **********************************************************************/ | |
8972963c | 6834 | const uint8_t * |
7257e56c | 6835 | class_getIvarLayout(Class cls) |
b3962a83 | 6836 | { |
34d5b5e8 | 6837 | if (cls) return cls->data()->ro()->getIvarLayout(); |
7257e56c | 6838 | else return nil; |
b3962a83 A |
6839 | } |
6840 | ||
6841 | ||
6842 | /*********************************************************************** | |
6843 | * class_getWeakIvarLayout | |
6844 | * Called by the garbage collector. | |
7257e56c | 6845 | * The class must be nil or already realized. |
b3962a83 A |
6846 | * Locking: none |
6847 | **********************************************************************/ | |
8972963c | 6848 | const uint8_t * |
7257e56c | 6849 | class_getWeakIvarLayout(Class cls) |
b3962a83 | 6850 | { |
f192a3e2 | 6851 | if (cls) return cls->data()->ro()->weakIvarLayout; |
7257e56c | 6852 | else return nil; |
b3962a83 A |
6853 | } |
6854 | ||
6855 | ||
6856 | /*********************************************************************** | |
6857 | * class_setIvarLayout | |
c1e772c4 | 6858 | * Changes the class's ivar layout. |
7257e56c | 6859 | * nil layout means no unscanned ivars |
b3962a83 A |
6860 | * The class must be under construction. |
6861 | * fixme: sanity-check layout vs instance size? | |
6862 | * fixme: sanity-check layout vs superclass? | |
6863 | * Locking: acquires runtimeLock | |
6864 | **********************************************************************/ | |
6865 | void | |
7257e56c | 6866 | class_setIvarLayout(Class cls, const uint8_t *layout) |
b3962a83 | 6867 | { |
b3962a83 A |
6868 | if (!cls) return; |
6869 | ||
34d5b5e8 A |
6870 | ASSERT(!cls->isMetaClass()); |
6871 | ||
66799735 | 6872 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 6873 | |
66799735 A |
6874 | checkIsKnownClass(cls); |
6875 | ||
b3962a83 A |
6876 | // Can only change layout of in-construction classes. |
6877 | // note: if modifications to post-construction classes were | |
c1e772c4 | 6878 | // allowed, there would be a race below (us vs. concurrent object_setIvar) |
8972963c | 6879 | if (!(cls->data()->flags & RW_CONSTRUCTING)) { |
b3962a83 | 6880 | _objc_inform("*** Can't set ivar layout for already-registered " |
8070259c | 6881 | "class '%s'", cls->nameForLogging()); |
b3962a83 A |
6882 | return; |
6883 | } | |
6884 | ||
8972963c | 6885 | class_ro_t *ro_w = make_ro_writeable(cls->data()); |
b3962a83 | 6886 | |
34d5b5e8 | 6887 | try_free(ro_w->getIvarLayout()); |
31875a97 | 6888 | ro_w->ivarLayout = ustrdupMaybeNil(layout); |
8972963c A |
6889 | } |
6890 | ||
b3962a83 A |
6891 | |
6892 | /*********************************************************************** | |
6893 | * class_setWeakIvarLayout | |
c1e772c4 | 6894 | * Changes the class's weak ivar layout. |
7257e56c | 6895 | * nil layout means no weak ivars |
b3962a83 A |
6896 | * The class must be under construction. |
6897 | * fixme: sanity-check layout vs instance size? | |
6898 | * fixme: sanity-check layout vs superclass? | |
6899 | * Locking: acquires runtimeLock | |
6900 | **********************************************************************/ | |
6901 | void | |
7257e56c | 6902 | class_setWeakIvarLayout(Class cls, const uint8_t *layout) |
b3962a83 | 6903 | { |
b3962a83 A |
6904 | if (!cls) return; |
6905 | ||
66799735 | 6906 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 6907 | |
66799735 A |
6908 | checkIsKnownClass(cls); |
6909 | ||
b3962a83 A |
6910 | // Can only change layout of in-construction classes. |
6911 | // note: if modifications to post-construction classes were | |
c1e772c4 | 6912 | // allowed, there would be a race below (us vs. concurrent object_setIvar) |
8972963c | 6913 | if (!(cls->data()->flags & RW_CONSTRUCTING)) { |
b3962a83 | 6914 | _objc_inform("*** Can't set weak ivar layout for already-registered " |
8070259c | 6915 | "class '%s'", cls->nameForLogging()); |
b3962a83 A |
6916 | return; |
6917 | } | |
6918 | ||
8972963c | 6919 | class_ro_t *ro_w = make_ro_writeable(cls->data()); |
b3962a83 A |
6920 | |
6921 | try_free(ro_w->weakIvarLayout); | |
31875a97 | 6922 | ro_w->weakIvarLayout = ustrdupMaybeNil(layout); |
b3962a83 A |
6923 | } |
6924 | ||
6925 | ||
c1e772c4 A |
6926 | /*********************************************************************** |
6927 | * getIvar | |
6928 | * Look up an ivar by name. | |
6929 | * Locking: runtimeLock must be read- or write-locked by the caller. | |
6930 | **********************************************************************/ | |
6931 | static ivar_t *getIvar(Class cls, const char *name) | |
6932 | { | |
6933 | runtimeLock.assertLocked(); | |
6934 | ||
6935 | const ivar_list_t *ivars; | |
1807f628 | 6936 | ASSERT(cls->isRealized()); |
f192a3e2 | 6937 | if ((ivars = cls->data()->ro()->ivars)) { |
c1e772c4 A |
6938 | for (auto& ivar : *ivars) { |
6939 | if (!ivar.offset) continue; // anonymous bitfield | |
6940 | ||
6941 | // ivar.name may be nil for anonymous bitfields etc. | |
6942 | if (ivar.name && 0 == strcmp(name, ivar.name)) { | |
6943 | return &ivar; | |
6944 | } | |
6945 | } | |
6946 | } | |
6947 | ||
6948 | return nil; | |
6949 | } | |
6950 | ||
6951 | ||
6952 | /*********************************************************************** | |
6953 | * _class_getClassForIvar | |
6954 | * Given a class and an ivar that is in it or one of its superclasses, | |
6955 | * find the actual class that defined the ivar. | |
6956 | **********************************************************************/ | |
6957 | Class _class_getClassForIvar(Class cls, Ivar ivar) | |
6958 | { | |
66799735 | 6959 | mutex_locker_t lock(runtimeLock); |
c1e772c4 | 6960 | |
34d5b5e8 | 6961 | for ( ; cls; cls = cls->getSuperclass()) { |
f192a3e2 | 6962 | if (auto ivars = cls->data()->ro()->ivars) { |
c1e772c4 A |
6963 | if (ivars->containsIvar(ivar)) { |
6964 | return cls; | |
6965 | } | |
6966 | } | |
6967 | } | |
6968 | ||
6969 | return nil; | |
6970 | } | |
6971 | ||
6972 | ||
b3962a83 A |
6973 | /*********************************************************************** |
6974 | * _class_getVariable | |
6975 | * fixme | |
7af964d1 | 6976 | * Locking: read-locks runtimeLock |
b3962a83 | 6977 | **********************************************************************/ |
cd5f04f5 | 6978 | Ivar |
c1e772c4 | 6979 | _class_getVariable(Class cls, const char *name) |
b3962a83 | 6980 | { |
66799735 | 6981 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 6982 | |
34d5b5e8 | 6983 | for ( ; cls; cls = cls->getSuperclass()) { |
7257e56c | 6984 | ivar_t *ivar = getIvar(cls, name); |
b3962a83 | 6985 | if (ivar) { |
7257e56c | 6986 | return ivar; |
b3962a83 A |
6987 | } |
6988 | } | |
6989 | ||
7257e56c | 6990 | return nil; |
b3962a83 A |
6991 | } |
6992 | ||
6993 | ||
6994 | /*********************************************************************** | |
6995 | * class_conformsToProtocol | |
6996 | * fixme | |
7af964d1 | 6997 | * Locking: read-locks runtimeLock |
b3962a83 | 6998 | **********************************************************************/ |
7257e56c | 6999 | BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen) |
b3962a83 | 7000 | { |
8972963c | 7001 | protocol_t *proto = newprotocol(proto_gen); |
7af964d1 | 7002 | |
7257e56c | 7003 | if (!cls) return NO; |
8972963c | 7004 | if (!proto_gen) return NO; |
b3962a83 | 7005 | |
66799735 | 7006 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 7007 | |
66799735 A |
7008 | checkIsKnownClass(cls); |
7009 | ||
1807f628 | 7010 | ASSERT(cls->isRealized()); |
66799735 | 7011 | |
f192a3e2 | 7012 | for (const auto& proto_ref : cls->data()->protocols()) { |
31875a97 A |
7013 | protocol_t *p = remapProtocol(proto_ref); |
7014 | if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) { | |
7015 | return YES; | |
b3962a83 A |
7016 | } |
7017 | } | |
7018 | ||
31875a97 | 7019 | return NO; |
b3962a83 A |
7020 | } |
7021 | ||
34d5b5e8 A |
7022 | static void |
7023 | addMethods_finish(Class cls, method_list_t *newlist) | |
7024 | { | |
7025 | auto rwe = cls->data()->extAllocIfNeeded(); | |
7026 | ||
7027 | if (newlist->count > 1) { | |
7028 | method_t::SortBySELAddress sorter; | |
7029 | std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter); | |
7030 | } | |
7031 | ||
7032 | prepareMethodLists(cls, &newlist, 1, NO, NO, __func__); | |
7033 | rwe->methods.attachLists(&newlist, 1); | |
7034 | ||
7035 | // If the class being modified has a constant cache, | |
7036 | // then all children classes are flattened constant caches | |
7037 | // and need to be flushed as well. | |
7038 | flushCaches(cls, __func__, [](Class c){ | |
7039 | // constant caches have been dealt with in prepareMethodLists | |
7040 | // if the class still is constant here, it's fine to keep | |
7041 | return !c->cache.isConstantOptimizedCache(); | |
7042 | }); | |
7043 | } | |
7044 | ||
b3962a83 | 7045 | |
7257e56c | 7046 | /********************************************************************** |
8972963c | 7047 | * addMethod |
b3962a83 | 7048 | * fixme |
8972963c | 7049 | * Locking: runtimeLock must be held by the caller |
b3962a83 A |
7050 | **********************************************************************/ |
7051 | static IMP | |
31875a97 | 7052 | addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace) |
b3962a83 | 7053 | { |
7257e56c | 7054 | IMP result = nil; |
b3962a83 | 7055 | |
66799735 | 7056 | runtimeLock.assertLocked(); |
b3962a83 | 7057 | |
66799735 A |
7058 | checkIsKnownClass(cls); |
7059 | ||
1807f628 A |
7060 | ASSERT(types); |
7061 | ASSERT(cls->isRealized()); | |
b3962a83 | 7062 | |
7af964d1 A |
7063 | method_t *m; |
7064 | if ((m = getMethodNoSuper_nolock(cls, name))) { | |
b3962a83 | 7065 | // already exists |
7af964d1 | 7066 | if (!replace) { |
bc4fafce | 7067 | result = m->imp(false); |
7af964d1 A |
7068 | } else { |
7069 | result = _method_setImplementation(cls, m, imp); | |
b3962a83 A |
7070 | } |
7071 | } else { | |
7072 | // fixme optimize | |
7af964d1 | 7073 | method_list_t *newlist; |
bc4fafce | 7074 | newlist = (method_list_t *)calloc(method_list_t::byteSize(method_t::bigSize, 1), 1); |
31875a97 | 7075 | newlist->entsizeAndFlags = |
bc4fafce | 7076 | (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list; |
b3962a83 | 7077 | newlist->count = 1; |
bc4fafce A |
7078 | auto &first = newlist->begin()->big(); |
7079 | first.name = name; | |
7080 | first.types = strdupIfMutable(types); | |
7081 | first.imp = imp; | |
b3962a83 | 7082 | |
34d5b5e8 | 7083 | addMethods_finish(cls, newlist); |
7257e56c | 7084 | result = nil; |
b3962a83 A |
7085 | } |
7086 | ||
b3962a83 A |
7087 | return result; |
7088 | } | |
7089 | ||
66799735 A |
7090 | /********************************************************************** |
7091 | * addMethods | |
7092 | * Add the given methods to a class in bulk. | |
7093 | * Returns the selectors which could not be added, when replace == NO and a | |
7094 | * method already exists. The returned selectors are NULL terminated and must be | |
7095 | * freed by the caller. They are NULL if no failures occurred. | |
7096 | * Locking: runtimeLock must be held by the caller | |
7097 | **********************************************************************/ | |
7098 | static SEL * | |
7099 | addMethods(Class cls, const SEL *names, const IMP *imps, const char **types, | |
7100 | uint32_t count, bool replace, uint32_t *outFailedCount) | |
7101 | { | |
7102 | runtimeLock.assertLocked(); | |
7103 | ||
1807f628 A |
7104 | ASSERT(names); |
7105 | ASSERT(imps); | |
7106 | ASSERT(types); | |
7107 | ASSERT(cls->isRealized()); | |
66799735 A |
7108 | |
7109 | method_list_t *newlist; | |
bc4fafce | 7110 | size_t newlistSize = method_list_t::byteSize(sizeof(struct method_t::big), count); |
66799735 A |
7111 | newlist = (method_list_t *)calloc(newlistSize, 1); |
7112 | newlist->entsizeAndFlags = | |
bc4fafce | 7113 | (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list; |
66799735 A |
7114 | newlist->count = 0; |
7115 | ||
66799735 A |
7116 | SEL *failedNames = nil; |
7117 | uint32_t failedCount = 0; | |
7118 | ||
7119 | for (uint32_t i = 0; i < count; i++) { | |
7120 | method_t *m; | |
7121 | if ((m = getMethodNoSuper_nolock(cls, names[i]))) { | |
7122 | // already exists | |
7123 | if (!replace) { | |
7124 | // report failure | |
7125 | if (failedNames == nil) { | |
7126 | // allocate an extra entry for a trailing NULL in case | |
7127 | // every method fails | |
7128 | failedNames = (SEL *)calloc(sizeof(*failedNames), | |
7129 | count + 1); | |
7130 | } | |
bc4fafce | 7131 | failedNames[failedCount] = m->name(); |
66799735 A |
7132 | failedCount++; |
7133 | } else { | |
7134 | _method_setImplementation(cls, m, imps[i]); | |
7135 | } | |
7136 | } else { | |
bc4fafce A |
7137 | auto &newmethod = newlist->end()->big(); |
7138 | newmethod.name = names[i]; | |
7139 | newmethod.types = strdupIfMutable(types[i]); | |
7140 | newmethod.imp = imps[i]; | |
66799735 A |
7141 | newlist->count++; |
7142 | } | |
7143 | } | |
7144 | ||
7145 | if (newlist->count > 0) { | |
7146 | // fixme resize newlist because it may have been over-allocated above. | |
7147 | // Note that realloc() alone doesn't work due to ptrauth. | |
34d5b5e8 | 7148 | addMethods_finish(cls, newlist); |
66799735 A |
7149 | } else { |
7150 | // Attaching the method list to the class consumes it. If we don't | |
7151 | // do that, we have to free the memory ourselves. | |
7152 | free(newlist); | |
7153 | } | |
7154 | ||
7155 | if (outFailedCount) *outFailedCount = failedCount; | |
7156 | ||
7157 | return failedNames; | |
7158 | } | |
7159 | ||
b3962a83 A |
7160 | |
7161 | BOOL | |
7162 | class_addMethod(Class cls, SEL name, IMP imp, const char *types) | |
7163 | { | |
7164 | if (!cls) return NO; | |
7165 | ||
66799735 | 7166 | mutex_locker_t lock(runtimeLock); |
31875a97 | 7167 | return ! addMethod(cls, name, imp, types ?: "", NO); |
b3962a83 A |
7168 | } |
7169 | ||
7170 | ||
7171 | IMP | |
7172 | class_replaceMethod(Class cls, SEL name, IMP imp, const char *types) | |
7173 | { | |
7257e56c | 7174 | if (!cls) return nil; |
b3962a83 | 7175 | |
66799735 | 7176 | mutex_locker_t lock(runtimeLock); |
31875a97 | 7177 | return addMethod(cls, name, imp, types ?: "", YES); |
b3962a83 A |
7178 | } |
7179 | ||
7180 | ||
66799735 A |
7181 | SEL * |
7182 | class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps, | |
7183 | const char **types, uint32_t count, | |
7184 | uint32_t *outFailedCount) | |
7185 | { | |
7186 | if (!cls) { | |
7187 | if (outFailedCount) *outFailedCount = count; | |
7188 | return (SEL *)memdup(names, count * sizeof(*names)); | |
7189 | } | |
7190 | ||
7191 | mutex_locker_t lock(runtimeLock); | |
7192 | return addMethods(cls, names, imps, types, count, NO, outFailedCount); | |
7193 | } | |
7194 | ||
7195 | void | |
7196 | class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps, | |
7197 | const char **types, uint32_t count) | |
7198 | { | |
7199 | if (!cls) return; | |
7200 | ||
7201 | mutex_locker_t lock(runtimeLock); | |
7202 | addMethods(cls, names, imps, types, count, YES, nil); | |
7203 | } | |
7204 | ||
7205 | ||
b3962a83 A |
7206 | /*********************************************************************** |
7207 | * class_addIvar | |
7208 | * Adds an ivar to a class. | |
7209 | * Locking: acquires runtimeLock | |
7210 | **********************************************************************/ | |
7211 | BOOL | |
7257e56c | 7212 | class_addIvar(Class cls, const char *name, size_t size, |
b3962a83 A |
7213 | uint8_t alignment, const char *type) |
7214 | { | |
b3962a83 A |
7215 | if (!cls) return NO; |
7216 | ||
7217 | if (!type) type = ""; | |
7257e56c | 7218 | if (name && 0 == strcmp(name, "")) name = nil; |
b3962a83 | 7219 | |
66799735 | 7220 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 7221 | |
66799735 | 7222 | checkIsKnownClass(cls); |
1807f628 | 7223 | ASSERT(cls->isRealized()); |
b3962a83 A |
7224 | |
7225 | // No class variables | |
7257e56c | 7226 | if (cls->isMetaClass()) { |
b3962a83 A |
7227 | return NO; |
7228 | } | |
7229 | ||
7230 | // Can only add ivars to in-construction classes. | |
8972963c | 7231 | if (!(cls->data()->flags & RW_CONSTRUCTING)) { |
b3962a83 A |
7232 | return NO; |
7233 | } | |
7234 | ||
7235 | // Check for existing ivar with this name, unless it's anonymous. | |
7236 | // Check for too-big ivar. | |
7237 | // fixme check for superclass ivar too? | |
7238 | if ((name && getIvar(cls, name)) || size > UINT32_MAX) { | |
b3962a83 A |
7239 | return NO; |
7240 | } | |
7241 | ||
8972963c | 7242 | class_ro_t *ro_w = make_ro_writeable(cls->data()); |
b3962a83 A |
7243 | |
7244 | // fixme allocate less memory here | |
7245 | ||
7246 | ivar_list_t *oldlist, *newlist; | |
f192a3e2 | 7247 | if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) { |
31875a97 A |
7248 | size_t oldsize = oldlist->byteSize(); |
7249 | newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1); | |
b3962a83 | 7250 | memcpy(newlist, oldlist, oldsize); |
31875a97 | 7251 | free(oldlist); |
b3962a83 | 7252 | } else { |
bc4fafce | 7253 | newlist = (ivar_list_t *)calloc(ivar_list_t::byteSize(sizeof(ivar_t), 1), 1); |
31875a97 | 7254 | newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t); |
b3962a83 A |
7255 | } |
7256 | ||
7257e56c | 7257 | uint32_t offset = cls->unalignedInstanceSize(); |
b3962a83 A |
7258 | uint32_t alignMask = (1<<alignment)-1; |
7259 | offset = (offset + alignMask) & ~alignMask; | |
7260 | ||
31875a97 | 7261 | ivar_t& ivar = newlist->get(newlist->count++); |
7257e56c A |
7262 | #if __x86_64__ |
7263 | // Deliberately over-allocate the ivar offset variable. | |
7264 | // Use calloc() to clear all 64 bits. See the note in struct ivar_t. | |
31875a97 | 7265 | ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1); |
7257e56c | 7266 | #else |
31875a97 | 7267 | ivar.offset = (int32_t *)malloc(sizeof(int32_t)); |
7257e56c | 7268 | #endif |
31875a97 | 7269 | *ivar.offset = offset; |
c1e772c4 A |
7270 | ivar.name = name ? strdupIfMutable(name) : nil; |
7271 | ivar.type = strdupIfMutable(type); | |
31875a97 A |
7272 | ivar.alignment_raw = alignment; |
7273 | ivar.size = (uint32_t)size; | |
b3962a83 A |
7274 | |
7275 | ro_w->ivars = newlist; | |
8070259c | 7276 | cls->setInstanceSize((uint32_t)(offset + size)); |
b3962a83 A |
7277 | |
7278 | // Ivar layout updated in registerClass. | |
7279 | ||
b3962a83 A |
7280 | return YES; |
7281 | } | |
7282 | ||
7283 | ||
7284 | /*********************************************************************** | |
7285 | * class_addProtocol | |
7286 | * Adds a protocol to a class. | |
7287 | * Locking: acquires runtimeLock | |
7288 | **********************************************************************/ | |
7257e56c | 7289 | BOOL class_addProtocol(Class cls, Protocol *protocol_gen) |
b3962a83 | 7290 | { |
b3962a83 | 7291 | protocol_t *protocol = newprotocol(protocol_gen); |
b3962a83 A |
7292 | |
7293 | if (!cls) return NO; | |
7257e56c | 7294 | if (class_conformsToProtocol(cls, protocol_gen)) return NO; |
b3962a83 | 7295 | |
66799735 | 7296 | mutex_locker_t lock(runtimeLock); |
f192a3e2 | 7297 | auto rwe = cls->data()->extAllocIfNeeded(); |
b3962a83 | 7298 | |
1807f628 | 7299 | ASSERT(cls->isRealized()); |
b3962a83 A |
7300 | |
7301 | // fixme optimize | |
31875a97 A |
7302 | protocol_list_t *protolist = (protocol_list_t *) |
7303 | malloc(sizeof(protocol_list_t) + sizeof(protocol_t *)); | |
7304 | protolist->count = 1; | |
7305 | protolist->list[0] = (protocol_ref_t)protocol; | |
b3962a83 | 7306 | |
f192a3e2 | 7307 | rwe->protocols.attachLists(&protolist, 1); |
b3962a83 A |
7308 | |
7309 | // fixme metaclass? | |
7310 | ||
b3962a83 A |
7311 | return YES; |
7312 | } | |
7313 | ||
7314 | ||
8972963c A |
7315 | /*********************************************************************** |
7316 | * class_addProperty | |
7317 | * Adds a property to a class. | |
7318 | * Locking: acquires runtimeLock | |
7319 | **********************************************************************/ | |
31875a97 | 7320 | static bool |
7257e56c | 7321 | _class_addProperty(Class cls, const char *name, |
8972963c | 7322 | const objc_property_attribute_t *attrs, unsigned int count, |
31875a97 | 7323 | bool replace) |
8972963c | 7324 | { |
8972963c A |
7325 | if (!cls) return NO; |
7326 | if (!name) return NO; | |
7327 | ||
7257e56c | 7328 | property_t *prop = class_getProperty(cls, name); |
8972963c A |
7329 | if (prop && !replace) { |
7330 | // already exists, refuse to replace | |
7331 | return NO; | |
7332 | } | |
7333 | else if (prop) { | |
7334 | // replace existing | |
66799735 | 7335 | mutex_locker_t lock(runtimeLock); |
8972963c A |
7336 | try_free(prop->attributes); |
7337 | prop->attributes = copyPropertyAttributeString(attrs, count); | |
8972963c A |
7338 | return YES; |
7339 | } | |
7340 | else { | |
66799735 | 7341 | mutex_locker_t lock(runtimeLock); |
f192a3e2 | 7342 | auto rwe = cls->data()->extAllocIfNeeded(); |
8972963c | 7343 | |
1807f628 | 7344 | ASSERT(cls->isRealized()); |
8972963c | 7345 | |
31875a97 | 7346 | property_list_t *proplist = (property_list_t *) |
bc4fafce | 7347 | malloc(property_list_t::byteSize(sizeof(property_t), 1)); |
31875a97 | 7348 | proplist->count = 1; |
bc4fafce A |
7349 | proplist->entsizeAndFlags = sizeof(property_t); |
7350 | proplist->begin()->name = strdupIfMutable(name); | |
7351 | proplist->begin()->attributes = copyPropertyAttributeString(attrs, count); | |
8972963c | 7352 | |
f192a3e2 | 7353 | rwe->properties.attachLists(&proplist, 1); |
8972963c A |
7354 | |
7355 | return YES; | |
7356 | } | |
7357 | } | |
7358 | ||
7359 | BOOL | |
7257e56c | 7360 | class_addProperty(Class cls, const char *name, |
8972963c A |
7361 | const objc_property_attribute_t *attrs, unsigned int n) |
7362 | { | |
7257e56c | 7363 | return _class_addProperty(cls, name, attrs, n, NO); |
8972963c A |
7364 | } |
7365 | ||
7366 | void | |
7257e56c | 7367 | class_replaceProperty(Class cls, const char *name, |
8972963c A |
7368 | const objc_property_attribute_t *attrs, unsigned int n) |
7369 | { | |
7257e56c | 7370 | _class_addProperty(cls, name, attrs, n, YES); |
8972963c A |
7371 | } |
7372 | ||
7373 | ||
b3962a83 A |
7374 | /*********************************************************************** |
7375 | * look_up_class | |
7376 | * Look up a class by name, and realize it. | |
7377 | * Locking: acquires runtimeLock | |
b3962a83 | 7378 | **********************************************************************/ |
13ba007e A |
7379 | static BOOL empty_getClass(const char *name, Class *outClass) |
7380 | { | |
7381 | *outClass = nil; | |
7382 | return NO; | |
7383 | } | |
7384 | ||
7385 | static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass}; | |
7386 | ||
7387 | void objc_setHook_getClass(objc_hook_getClass newValue, | |
7388 | objc_hook_getClass *outOldValue) | |
7389 | { | |
7390 | GetClassHook.set(newValue, outOldValue); | |
7391 | } | |
7392 | ||
7257e56c | 7393 | Class |
b3962a83 | 7394 | look_up_class(const char *name, |
31875a97 A |
7395 | bool includeUnconnected __attribute__((unused)), |
7396 | bool includeClassHandler __attribute__((unused))) | |
b3962a83 A |
7397 | { |
7398 | if (!name) return nil; | |
7399 | ||
31875a97 A |
7400 | Class result; |
7401 | bool unrealized; | |
7402 | { | |
13ba007e A |
7403 | runtimeLock.lock(); |
7404 | result = getClassExceptSomeSwift(name); | |
31875a97 | 7405 | unrealized = result && !result->isRealized(); |
13ba007e A |
7406 | if (unrealized) { |
7407 | result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock); | |
7408 | // runtimeLock is now unlocked | |
7409 | } else { | |
7410 | runtimeLock.unlock(); | |
7411 | } | |
31875a97 | 7412 | } |
13ba007e A |
7413 | |
7414 | if (!result) { | |
7415 | // Ask Swift about its un-instantiated classes. | |
7416 | ||
7417 | // We use thread-local storage to prevent infinite recursion | |
7418 | // if the hook function provokes another lookup of the same name | |
7419 | // (for example, if the hook calls objc_allocateClassPair) | |
7420 | ||
7421 | auto *tls = _objc_fetch_pthread_data(true); | |
7422 | ||
7423 | // Stop if this thread is already looking up this name. | |
7424 | for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) { | |
7425 | if (0 == strcmp(name, tls->classNameLookups[i])) { | |
7426 | return nil; | |
7427 | } | |
7428 | } | |
7429 | ||
7430 | // Save this lookup in tls. | |
7431 | if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) { | |
7432 | tls->classNameLookupsAllocated = | |
7433 | (tls->classNameLookupsAllocated * 2 ?: 1); | |
7434 | size_t size = tls->classNameLookupsAllocated * | |
7435 | sizeof(tls->classNameLookups[0]); | |
7436 | tls->classNameLookups = (const char **) | |
7437 | realloc(tls->classNameLookups, size); | |
7438 | } | |
7439 | tls->classNameLookups[tls->classNameLookupsUsed++] = name; | |
7440 | ||
7441 | // Call the hook. | |
7442 | Class swiftcls = nil; | |
7443 | if (GetClassHook.get()(name, &swiftcls)) { | |
1807f628 | 7444 | ASSERT(swiftcls->isRealized()); |
13ba007e A |
7445 | result = swiftcls; |
7446 | } | |
7447 | ||
7448 | // Erase the name from tls. | |
7449 | unsigned slot = --tls->classNameLookupsUsed; | |
1807f628 A |
7450 | ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated); |
7451 | ASSERT(name == tls->classNameLookups[slot]); | |
13ba007e | 7452 | tls->classNameLookups[slot] = nil; |
7af964d1 | 7453 | } |
13ba007e | 7454 | |
7257e56c | 7455 | return result; |
b3962a83 A |
7456 | } |
7457 | ||
7458 | ||
7459 | /*********************************************************************** | |
7460 | * objc_duplicateClass | |
7461 | * fixme | |
7462 | * Locking: acquires runtimeLock | |
7463 | **********************************************************************/ | |
7464 | Class | |
7257e56c | 7465 | objc_duplicateClass(Class original, const char *name, |
b3962a83 A |
7466 | size_t extraBytes) |
7467 | { | |
7257e56c | 7468 | Class duplicate; |
b3962a83 | 7469 | |
66799735 A |
7470 | mutex_locker_t lock(runtimeLock); |
7471 | ||
7472 | checkIsKnownClass(original); | |
b3962a83 | 7473 | |
f192a3e2 A |
7474 | auto orig_rw = original->data(); |
7475 | auto orig_rwe = orig_rw->ext(); | |
7476 | auto orig_ro = orig_rw->ro(); | |
7477 | ||
1807f628 A |
7478 | ASSERT(original->isRealized()); |
7479 | ASSERT(!original->isMetaClass()); | |
b3962a83 | 7480 | |
8070259c | 7481 | duplicate = alloc_class_for_subclass(original, extraBytes); |
b3962a83 | 7482 | |
8070259c | 7483 | duplicate->initClassIsa(original->ISA()); |
34d5b5e8 | 7484 | duplicate->setSuperclass(original->getSuperclass()); |
7257e56c | 7485 | |
31875a97 | 7486 | duplicate->cache.initializeToEmpty(); |
b3962a83 | 7487 | |
f192a3e2 A |
7488 | class_rw_t *rw = objc::zalloc<class_rw_t>(); |
7489 | rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING); | |
8070259c A |
7490 | rw->firstSubclass = nil; |
7491 | rw->nextSiblingClass = nil; | |
b3962a83 | 7492 | |
8070259c A |
7493 | duplicate->bits = original->bits; |
7494 | duplicate->setData(rw); | |
7495 | ||
f192a3e2 A |
7496 | auto ro = orig_ro->duplicate(); |
7497 | *(char **)&ro->name = strdupIfMutable(name); | |
7498 | rw->set_ro(ro); | |
31875a97 | 7499 | |
f192a3e2 A |
7500 | if (orig_rwe) { |
7501 | auto rwe = rw->extAllocIfNeeded(); | |
7502 | rwe->version = orig_rwe->version; | |
bc4fafce | 7503 | orig_rwe->methods.duplicateInto(rwe->methods); |
b3962a83 | 7504 | |
f192a3e2 A |
7505 | // fixme dies when categories are added to the base |
7506 | rwe->properties = orig_rwe->properties; | |
7507 | rwe->protocols = orig_rwe->protocols; | |
7508 | } else if (ro->baseMethods()) { | |
7509 | // if we have base methods, we need to make a deep copy | |
7510 | // which requires a class_rw_ext_t to be allocated | |
7511 | rw->deepCopy(ro); | |
7512 | } | |
b3962a83 | 7513 | |
c1e772c4 A |
7514 | duplicate->chooseClassArrayIndex(); |
7515 | ||
34d5b5e8 A |
7516 | if (duplicate->getSuperclass()) { |
7517 | addSubclass(duplicate->getSuperclass(), duplicate); | |
c1e772c4 A |
7518 | // duplicate->isa == original->isa so don't addSubclass() for it |
7519 | } else { | |
7520 | addRootClass(duplicate); | |
b3962a83 A |
7521 | } |
7522 | ||
7af964d1 A |
7523 | // Don't methodize class - construction above is correct |
7524 | ||
34d5b5e8 | 7525 | addNamedClass(duplicate, ro->getName()); |
66799735 A |
7526 | addClassTableEntry(duplicate, /*addMeta=*/false); |
7527 | ||
b3962a83 A |
7528 | if (PrintConnecting) { |
7529 | _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p", | |
f192a3e2 | 7530 | name, original->nameForLogging(), (void*)duplicate, ro); |
b3962a83 A |
7531 | } |
7532 | ||
8070259c A |
7533 | duplicate->clearInfo(RW_REALIZING); |
7534 | ||
7257e56c | 7535 | return duplicate; |
b3962a83 A |
7536 | } |
7537 | ||
b3962a83 | 7538 | /*********************************************************************** |
7af964d1 A |
7539 | * objc_initializeClassPair |
7540 | * Locking: runtimeLock must be write-locked by the caller | |
b3962a83 | 7541 | **********************************************************************/ |
8972963c A |
7542 | |
7543 | // &UnsetLayout is the default ivar layout during class construction | |
7544 | static const uint8_t UnsetLayout = 0; | |
7545 | ||
7257e56c | 7546 | static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta) |
b3962a83 | 7547 | { |
66799735 | 7548 | runtimeLock.assertLocked(); |
7af964d1 | 7549 | |
b3962a83 | 7550 | class_ro_t *cls_ro_w, *meta_ro_w; |
f192a3e2 | 7551 | class_rw_t *cls_rw_w, *meta_rw_w; |
b3962a83 | 7552 | |
f192a3e2 A |
7553 | cls_rw_w = objc::zalloc<class_rw_t>(); |
7554 | meta_rw_w = objc::zalloc<class_rw_t>(); | |
31875a97 A |
7555 | cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1); |
7556 | meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1); | |
f192a3e2 A |
7557 | |
7558 | cls->setData(cls_rw_w); | |
7559 | cls_rw_w->set_ro(cls_ro_w); | |
7560 | meta->setData(meta_rw_w); | |
7561 | meta_rw_w->set_ro(meta_ro_w); | |
b3962a83 A |
7562 | |
7563 | // Set basic info | |
b3962a83 | 7564 | |
f192a3e2 A |
7565 | cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING; |
7566 | meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META; | |
b3962a83 A |
7567 | |
7568 | cls_ro_w->flags = 0; | |
7569 | meta_ro_w->flags = RO_META; | |
b3962a83 | 7570 | if (superclass) { |
13ba007e | 7571 | uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS; |
f192a3e2 | 7572 | cls_rw_w->flags |= superclass->data()->flags & flagsToCopy; |
7257e56c A |
7573 | cls_ro_w->instanceStart = superclass->unalignedInstanceSize(); |
7574 | meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize(); | |
8070259c A |
7575 | cls->setInstanceSize(cls_ro_w->instanceStart); |
7576 | meta->setInstanceSize(meta_ro_w->instanceStart); | |
b3962a83 | 7577 | } else { |
f192a3e2 A |
7578 | cls_ro_w->flags |= RO_ROOT; |
7579 | meta_ro_w->flags |= RO_ROOT; | |
b3962a83 | 7580 | cls_ro_w->instanceStart = 0; |
7257e56c | 7581 | meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class); |
8070259c A |
7582 | cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa |
7583 | meta->setInstanceSize(meta_ro_w->instanceStart); | |
b3962a83 A |
7584 | } |
7585 | ||
34d5b5e8 A |
7586 | cls_ro_w->name.store(strdupIfMutable(name), std::memory_order_release); |
7587 | meta_ro_w->name.store(strdupIfMutable(name), std::memory_order_release); | |
b3962a83 | 7588 | |
8972963c A |
7589 | cls_ro_w->ivarLayout = &UnsetLayout; |
7590 | cls_ro_w->weakIvarLayout = &UnsetLayout; | |
7591 | ||
c1e772c4 A |
7592 | meta->chooseClassArrayIndex(); |
7593 | cls->chooseClassArrayIndex(); | |
7594 | ||
1807f628 A |
7595 | // This absolutely needs to be done before addSubclass |
7596 | // as initializeToEmpty() clobbers the FAST_CACHE bits | |
7597 | cls->cache.initializeToEmpty(); | |
7598 | meta->cache.initializeToEmpty(); | |
7599 | ||
7600 | #if FAST_CACHE_META | |
7601 | meta->cache.setBit(FAST_CACHE_META); | |
7602 | #endif | |
7603 | meta->setInstancesRequireRawIsa(); | |
7604 | ||
b3962a83 | 7605 | // Connect to superclasses and metaclasses |
8070259c | 7606 | cls->initClassIsa(meta); |
1807f628 | 7607 | |
b3962a83 | 7608 | if (superclass) { |
8070259c | 7609 | meta->initClassIsa(superclass->ISA()->ISA()); |
34d5b5e8 A |
7610 | cls->setSuperclass(superclass); |
7611 | meta->setSuperclass(superclass->ISA()); | |
b3962a83 | 7612 | addSubclass(superclass, cls); |
7257e56c | 7613 | addSubclass(superclass->ISA(), meta); |
b3962a83 | 7614 | } else { |
8070259c | 7615 | meta->initClassIsa(meta); |
34d5b5e8 A |
7616 | cls->setSuperclass(Nil); |
7617 | meta->setSuperclass(cls); | |
c1e772c4 | 7618 | addRootClass(cls); |
b3962a83 A |
7619 | addSubclass(cls, meta); |
7620 | } | |
c1e772c4 | 7621 | |
66799735 | 7622 | addClassTableEntry(cls); |
7af964d1 A |
7623 | } |
7624 | ||
8070259c A |
7625 | |
7626 | /*********************************************************************** | |
7627 | * verifySuperclass | |
7628 | * Sanity-check the superclass provided to | |
7629 | * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair. | |
7630 | **********************************************************************/ | |
7631 | bool | |
7632 | verifySuperclass(Class superclass, bool rootOK) | |
7633 | { | |
7634 | if (!superclass) { | |
7635 | // Superclass does not exist. | |
7636 | // If subclass may be a root class, this is OK. | |
7637 | // If subclass must not be a root class, this is bad. | |
7638 | return rootOK; | |
7639 | } | |
7640 | ||
7641 | // Superclass must be realized. | |
7642 | if (! superclass->isRealized()) return false; | |
7643 | ||
7644 | // Superclass must not be under construction. | |
7645 | if (superclass->data()->flags & RW_CONSTRUCTING) return false; | |
7646 | ||
7647 | return true; | |
7648 | } | |
7649 | ||
7650 | ||
7af964d1 A |
7651 | /*********************************************************************** |
7652 | * objc_initializeClassPair | |
7653 | **********************************************************************/ | |
7257e56c | 7654 | Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta) |
7af964d1 | 7655 | { |
13ba007e A |
7656 | // Fail if the class name is in use. |
7657 | if (look_up_class(name, NO, NO)) return nil; | |
7658 | ||
66799735 | 7659 | mutex_locker_t lock(runtimeLock); |
7af964d1 | 7660 | |
8070259c A |
7661 | // Fail if the class name is in use. |
7662 | // Fail if the superclass isn't kosher. | |
13ba007e A |
7663 | if (getClassExceptSomeSwift(name) || |
7664 | !verifySuperclass(superclass, true/*rootOK*/)) | |
7665 | { | |
8070259c | 7666 | return nil; |
7af964d1 A |
7667 | } |
7668 | ||
7257e56c | 7669 | objc_initializeClassPair_internal(superclass, name, cls, meta); |
7af964d1 | 7670 | |
7257e56c | 7671 | return cls; |
7af964d1 A |
7672 | } |
7673 | ||
8070259c | 7674 | |
7af964d1 A |
7675 | /*********************************************************************** |
7676 | * objc_allocateClassPair | |
7677 | * fixme | |
7678 | * Locking: acquires runtimeLock | |
7679 | **********************************************************************/ | |
7257e56c | 7680 | Class objc_allocateClassPair(Class superclass, const char *name, |
7af964d1 A |
7681 | size_t extraBytes) |
7682 | { | |
7af964d1 A |
7683 | Class cls, meta; |
7684 | ||
13ba007e A |
7685 | // Fail if the class name is in use. |
7686 | if (look_up_class(name, NO, NO)) return nil; | |
7687 | ||
66799735 | 7688 | mutex_locker_t lock(runtimeLock); |
7af964d1 | 7689 | |
8070259c A |
7690 | // Fail if the class name is in use. |
7691 | // Fail if the superclass isn't kosher. | |
13ba007e A |
7692 | if (getClassExceptSomeSwift(name) || |
7693 | !verifySuperclass(superclass, true/*rootOK*/)) | |
7694 | { | |
8070259c | 7695 | return nil; |
7af964d1 A |
7696 | } |
7697 | ||
7af964d1 | 7698 | // Allocate new classes. |
8070259c A |
7699 | cls = alloc_class_for_subclass(superclass, extraBytes); |
7700 | meta = alloc_class_for_subclass(superclass, extraBytes); | |
b3962a83 | 7701 | |
8070259c | 7702 | // fixme mangle the name if it looks swift-y? |
7257e56c | 7703 | objc_initializeClassPair_internal(superclass, name, cls, meta); |
7af964d1 | 7704 | |
7257e56c | 7705 | return cls; |
b3962a83 A |
7706 | } |
7707 | ||
7708 | ||
7709 | /*********************************************************************** | |
7710 | * objc_registerClassPair | |
7711 | * fixme | |
7712 | * Locking: acquires runtimeLock | |
7713 | **********************************************************************/ | |
7257e56c | 7714 | void objc_registerClassPair(Class cls) |
b3962a83 | 7715 | { |
66799735 | 7716 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 7717 | |
66799735 A |
7718 | checkIsKnownClass(cls); |
7719 | ||
7720 | if ((cls->data()->flags & RW_CONSTRUCTED) || | |
7257e56c | 7721 | (cls->ISA()->data()->flags & RW_CONSTRUCTED)) |
b3962a83 A |
7722 | { |
7723 | _objc_inform("objc_registerClassPair: class '%s' was already " | |
34d5b5e8 | 7724 | "registered!", cls->data()->ro()->getName()); |
b3962a83 A |
7725 | return; |
7726 | } | |
7727 | ||
8972963c | 7728 | if (!(cls->data()->flags & RW_CONSTRUCTING) || |
7257e56c | 7729 | !(cls->ISA()->data()->flags & RW_CONSTRUCTING)) |
b3962a83 A |
7730 | { |
7731 | _objc_inform("objc_registerClassPair: class '%s' was not " | |
7732 | "allocated with objc_allocateClassPair!", | |
34d5b5e8 | 7733 | cls->data()->ro()->getName()); |
b3962a83 A |
7734 | return; |
7735 | } | |
7736 | ||
b3962a83 | 7737 | // Clear "under construction" bit, set "done constructing" bit |
8070259c A |
7738 | cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING); |
7739 | cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING); | |
b3962a83 | 7740 | |
c1e772c4 | 7741 | // Add to named class table. |
34d5b5e8 | 7742 | addNamedClass(cls, cls->data()->ro()->getName()); |
b3962a83 A |
7743 | } |
7744 | ||
7745 | ||
8070259c A |
7746 | /*********************************************************************** |
7747 | * objc_readClassPair() | |
7748 | * Read a class and metaclass as written by a compiler. | |
7749 | * Assumes the class and metaclass are not referenced by other things | |
7750 | * that might need to be fixed up (such as categories and subclasses). | |
7751 | * Does not call +load. | |
7752 | * Returns the class pointer, or nil. | |
7753 | * | |
7754 | * Locking: runtimeLock acquired by map_images | |
7755 | **********************************************************************/ | |
7756 | Class objc_readClassPair(Class bits, const struct objc_image_info *info) | |
7757 | { | |
66799735 | 7758 | mutex_locker_t lock(runtimeLock); |
8070259c A |
7759 | |
7760 | // No info bits are significant yet. | |
7761 | (void)info; | |
7762 | ||
8070259c | 7763 | // Fail if the superclass isn't kosher. |
8070259c | 7764 | bool rootOK = bits->data()->flags & RO_ROOT; |
34d5b5e8 | 7765 | if (!verifySuperclass(bits->getSuperclass(), rootOK)){ |
8070259c A |
7766 | return nil; |
7767 | } | |
7768 | ||
66799735 A |
7769 | // Duplicate classes are allowed, just like they are for image loading. |
7770 | // readClass will complain about the duplicate. | |
7771 | ||
8070259c A |
7772 | Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/); |
7773 | if (cls != bits) { | |
7774 | // This function isn't allowed to remap anything. | |
7775 | _objc_fatal("objc_readClassPair for class %s changed %p to %p", | |
7776 | cls->nameForLogging(), bits, cls); | |
7777 | } | |
13ba007e A |
7778 | |
7779 | // The only client of this function is old Swift. | |
7780 | // Stable Swift won't use it. | |
7781 | // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()). | |
1807f628 | 7782 | cls = realizeClassWithoutSwift(cls, nil); |
8070259c A |
7783 | |
7784 | return cls; | |
7785 | } | |
7786 | ||
7787 | ||
cd5f04f5 A |
7788 | /*********************************************************************** |
7789 | * detach_class | |
7790 | * Disconnect a class from other data structures. | |
7791 | * Exception: does not remove the class from the +load list | |
7792 | * Call this before free_class. | |
7793 | * Locking: runtimeLock must be held by the caller. | |
7794 | **********************************************************************/ | |
31875a97 | 7795 | static void detach_class(Class cls, bool isMeta) |
b3962a83 | 7796 | { |
66799735 | 7797 | runtimeLock.assertLocked(); |
b3962a83 | 7798 | |
7af964d1 | 7799 | // categories not yet attached to this class |
1807f628 | 7800 | objc::unattachedCategories.eraseClass(cls); |
b3962a83 | 7801 | |
cd5f04f5 | 7802 | // superclass's subclass list |
7257e56c | 7803 | if (cls->isRealized()) { |
34d5b5e8 | 7804 | Class supercls = cls->getSuperclass(); |
cd5f04f5 A |
7805 | if (supercls) { |
7806 | removeSubclass(supercls, cls); | |
c1e772c4 A |
7807 | } else { |
7808 | removeRootClass(cls); | |
cd5f04f5 A |
7809 | } |
7810 | } | |
7811 | ||
7af964d1 A |
7812 | // class tables and +load queue |
7813 | if (!isMeta) { | |
8070259c | 7814 | removeNamedClass(cls, cls->mangledName()); |
b3962a83 | 7815 | } |
1807f628 | 7816 | objc::allocatedClasses.get().erase(cls); |
cd5f04f5 | 7817 | } |
b3962a83 | 7818 | |
b3962a83 | 7819 | |
cd5f04f5 A |
7820 | /*********************************************************************** |
7821 | * free_class | |
7822 | * Frees a class's data structures. | |
7823 | * Call this after detach_class. | |
7824 | * Locking: runtimeLock must be held by the caller | |
7825 | **********************************************************************/ | |
7257e56c | 7826 | static void free_class(Class cls) |
cd5f04f5 | 7827 | { |
66799735 | 7828 | runtimeLock.assertLocked(); |
b3962a83 | 7829 | |
7257e56c | 7830 | if (! cls->isRealized()) return; |
7af964d1 | 7831 | |
31875a97 | 7832 | auto rw = cls->data(); |
f192a3e2 A |
7833 | auto rwe = rw->ext(); |
7834 | auto ro = rw->ro(); | |
7af964d1 | 7835 | |
34d5b5e8 | 7836 | cls->cache.destroy(); |
f192a3e2 A |
7837 | |
7838 | if (rwe) { | |
7839 | for (auto& meth : rwe->methods) { | |
bc4fafce | 7840 | try_free(meth.types()); |
f192a3e2 A |
7841 | } |
7842 | rwe->methods.tryFree(); | |
cd5f04f5 | 7843 | } |
cd5f04f5 | 7844 | |
31875a97 A |
7845 | const ivar_list_t *ivars = ro->ivars; |
7846 | if (ivars) { | |
7847 | for (auto& ivar : *ivars) { | |
7848 | try_free(ivar.offset); | |
7849 | try_free(ivar.name); | |
7850 | try_free(ivar.type); | |
8972963c | 7851 | } |
31875a97 A |
7852 | try_free(ivars); |
7853 | } | |
7854 | ||
f192a3e2 A |
7855 | if (rwe) { |
7856 | for (auto& prop : rwe->properties) { | |
7857 | try_free(prop.name); | |
7858 | try_free(prop.attributes); | |
7859 | } | |
7860 | rwe->properties.tryFree(); | |
31875a97 | 7861 | |
f192a3e2 A |
7862 | rwe->protocols.tryFree(); |
7863 | } | |
cd5f04f5 | 7864 | |
34d5b5e8 | 7865 | try_free(ro->getIvarLayout()); |
31875a97 | 7866 | try_free(ro->weakIvarLayout); |
34d5b5e8 | 7867 | try_free(ro->getName()); |
31875a97 | 7868 | try_free(ro); |
f192a3e2 A |
7869 | objc::zfree(rwe); |
7870 | objc::zfree(rw); | |
cd5f04f5 | 7871 | try_free(cls); |
b3962a83 A |
7872 | } |
7873 | ||
cd5f04f5 | 7874 | |
7257e56c | 7875 | void objc_disposeClassPair(Class cls) |
b3962a83 | 7876 | { |
66799735 | 7877 | mutex_locker_t lock(runtimeLock); |
b3962a83 | 7878 | |
66799735 A |
7879 | checkIsKnownClass(cls); |
7880 | ||
7881 | if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) || | |
7257e56c | 7882 | !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING))) |
b3962a83 A |
7883 | { |
7884 | // class not allocated with objc_allocateClassPair | |
7885 | // disposing still-unregistered class is OK! | |
7886 | _objc_inform("objc_disposeClassPair: class '%s' was not " | |
7887 | "allocated with objc_allocateClassPair!", | |
34d5b5e8 | 7888 | cls->data()->ro()->getName()); |
b3962a83 A |
7889 | return; |
7890 | } | |
7891 | ||
7257e56c | 7892 | if (cls->isMetaClass()) { |
b3962a83 | 7893 | _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, " |
34d5b5e8 | 7894 | "not a class!", cls->data()->ro()->getName()); |
b3962a83 A |
7895 | return; |
7896 | } | |
7897 | ||
b3962a83 | 7898 | // Shouldn't have any live subclasses. |
8972963c | 7899 | if (cls->data()->firstSubclass) { |
b3962a83 | 7900 | _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, " |
34d5b5e8 | 7901 | "including '%s'!", cls->data()->ro()->getName(), |
8070259c | 7902 | cls->data()->firstSubclass->nameForLogging()); |
b3962a83 | 7903 | } |
7257e56c | 7904 | if (cls->ISA()->data()->firstSubclass) { |
b3962a83 | 7905 | _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, " |
34d5b5e8 | 7906 | "including '%s'!", cls->data()->ro()->getName(), |
8070259c | 7907 | cls->ISA()->data()->firstSubclass->nameForLogging()); |
b3962a83 A |
7908 | } |
7909 | ||
7af964d1 A |
7910 | // don't remove_class_from_loadable_list() |
7911 | // - it's not there and we don't have the lock | |
7257e56c | 7912 | detach_class(cls->ISA(), YES); |
cd5f04f5 | 7913 | detach_class(cls, NO); |
7257e56c | 7914 | free_class(cls->ISA()); |
cd5f04f5 | 7915 | free_class(cls); |
b3962a83 A |
7916 | } |
7917 | ||
7918 | ||
8070259c A |
7919 | /*********************************************************************** |
7920 | * objc_constructInstance | |
7921 | * Creates an instance of `cls` at the location pointed to by `bytes`. | |
7922 | * `bytes` must point to at least class_getInstanceSize(cls) bytes of | |
7923 | * well-aligned zero-filled memory. | |
7924 | * The new object's isa is set. Any C++ constructors are called. | |
7925 | * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is | |
7926 | * nil, or if C++ constructors fail. | |
7927 | * Note: class_createInstance() and class_createInstances() preflight this. | |
7928 | **********************************************************************/ | |
7929 | id | |
1807f628 | 7930 | objc_constructInstance(Class cls, void *bytes) |
8070259c A |
7931 | { |
7932 | if (!cls || !bytes) return nil; | |
7933 | ||
7934 | id obj = (id)bytes; | |
7935 | ||
7936 | // Read class's info bits all at once for performance | |
7937 | bool hasCxxCtor = cls->hasCxxCtor(); | |
7938 | bool hasCxxDtor = cls->hasCxxDtor(); | |
c1e772c4 | 7939 | bool fast = cls->canAllocNonpointer(); |
8070259c | 7940 | |
c1e772c4 | 7941 | if (fast) { |
8070259c A |
7942 | obj->initInstanceIsa(cls, hasCxxDtor); |
7943 | } else { | |
7944 | obj->initIsa(cls); | |
7945 | } | |
7946 | ||
7947 | if (hasCxxCtor) { | |
1807f628 | 7948 | return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE); |
8070259c A |
7949 | } else { |
7950 | return obj; | |
7951 | } | |
7952 | } | |
7953 | ||
7954 | ||
b3962a83 | 7955 | /*********************************************************************** |
8972963c | 7956 | * class_createInstance |
b3962a83 A |
7957 | * fixme |
7958 | * Locking: none | |
1807f628 A |
7959 | * |
7960 | * Note: this function has been carefully written so that the fastpath | |
7961 | * takes no branch. | |
b3962a83 | 7962 | **********************************************************************/ |
1807f628 A |
7963 | static ALWAYS_INLINE id |
7964 | _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone, | |
7965 | int construct_flags = OBJECT_CONSTRUCT_NONE, | |
7966 | bool cxxConstruct = true, | |
31875a97 | 7967 | size_t *outAllocatedSize = nil) |
b3962a83 | 7968 | { |
1807f628 | 7969 | ASSERT(cls->isRealized()); |
8972963c | 7970 | |
8070259c | 7971 | // Read class's info bits all at once for performance |
1807f628 | 7972 | bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor(); |
8070259c | 7973 | bool hasCxxDtor = cls->hasCxxDtor(); |
c1e772c4 | 7974 | bool fast = cls->canAllocNonpointer(); |
1807f628 | 7975 | size_t size; |
8972963c | 7976 | |
1807f628 | 7977 | size = cls->instanceSize(extraBytes); |
31875a97 | 7978 | if (outAllocatedSize) *outAllocatedSize = size; |
8972963c A |
7979 | |
7980 | id obj; | |
1807f628 A |
7981 | if (zone) { |
7982 | obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size); | |
7983 | } else { | |
8070259c | 7984 | obj = (id)calloc(1, size); |
1807f628 A |
7985 | } |
7986 | if (slowpath(!obj)) { | |
7987 | if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) { | |
7988 | return _objc_callBadAllocHandler(cls); | |
8070259c | 7989 | } |
1807f628 A |
7990 | return nil; |
7991 | } | |
8972963c | 7992 | |
1807f628 A |
7993 | if (!zone && fast) { |
7994 | obj->initInstanceIsa(cls, hasCxxDtor); | |
7995 | } else { | |
7996 | // Use raw pointer isa on the assumption that they might be | |
8070259c A |
7997 | // doing something weird with the zone or RR. |
7998 | obj->initIsa(cls); | |
7999 | } | |
8972963c | 8000 | |
1807f628 A |
8001 | if (fastpath(!hasCxxCtor)) { |
8002 | return obj; | |
8972963c A |
8003 | } |
8004 | ||
1807f628 A |
8005 | construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE; |
8006 | return object_cxxConstructFromClass(obj, cls, construct_flags); | |
b3962a83 A |
8007 | } |
8008 | ||
1807f628 | 8009 | id |
b3962a83 A |
8010 | class_createInstance(Class cls, size_t extraBytes) |
8011 | { | |
1807f628 | 8012 | if (!cls) return nil; |
7257e56c | 8013 | return _class_createInstanceFromZone(cls, extraBytes, nil); |
b3962a83 A |
8014 | } |
8015 | ||
1807f628 A |
8016 | NEVER_INLINE |
8017 | id | |
8018 | _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused) | |
8019 | { | |
8020 | // allocWithZone under __OBJC2__ ignores the zone parameter | |
8021 | return _class_createInstanceFromZone(cls, 0, nil, | |
8022 | OBJECT_CONSTRUCT_CALL_BADALLOC); | |
8023 | } | |
8070259c | 8024 | |
8972963c A |
8025 | /*********************************************************************** |
8026 | * class_createInstances | |
8027 | * fixme | |
8028 | * Locking: none | |
8029 | **********************************************************************/ | |
8070259c A |
8030 | #if SUPPORT_NONPOINTER_ISA |
8031 | #warning fixme optimize class_createInstances | |
8032 | #endif | |
8972963c A |
8033 | unsigned |
8034 | class_createInstances(Class cls, size_t extraBytes, | |
8035 | id *results, unsigned num_requested) | |
8036 | { | |
7257e56c | 8037 | return _class_createInstancesFromZone(cls, extraBytes, nil, |
8972963c A |
8038 | results, num_requested); |
8039 | } | |
8040 | ||
b3962a83 A |
8041 | /*********************************************************************** |
8042 | * object_copyFromZone | |
8043 | * fixme | |
8044 | * Locking: none | |
8045 | **********************************************************************/ | |
8972963c A |
8046 | static id |
8047 | _object_copyFromZone(id oldObj, size_t extraBytes, void *zone) | |
b3962a83 | 8048 | { |
34d5b5e8 | 8049 | if (oldObj->isTaggedPointerOrNil()) return oldObj; |
b3962a83 | 8050 | |
31875a97 A |
8051 | // fixme this doesn't handle C++ ivars correctly (#4619414) |
8052 | ||
34d5b5e8 | 8053 | Class cls = oldObj->ISA(/*authenticated*/true); |
31875a97 | 8054 | size_t size; |
1807f628 A |
8055 | id obj = _class_createInstanceFromZone(cls, extraBytes, zone, |
8056 | OBJECT_CONSTRUCT_NONE, false, &size); | |
b3962a83 A |
8057 | if (!obj) return nil; |
8058 | ||
31875a97 A |
8059 | // Copy everything except the isa, which was already set above. |
8060 | uint8_t *copyDst = (uint8_t *)obj + sizeof(Class); | |
8061 | uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class); | |
8062 | size_t copySize = size - sizeof(Class); | |
31875a97 | 8063 | memmove(copyDst, copySrc, copySize); |
7af964d1 | 8064 | |
c1e772c4 | 8065 | fixupCopiedIvars(obj, oldObj); |
b3962a83 A |
8066 | |
8067 | return obj; | |
8068 | } | |
8069 | ||
8070 | ||
8071 | /*********************************************************************** | |
8072 | * object_copy | |
8073 | * fixme | |
8074 | * Locking: none | |
8075 | **********************************************************************/ | |
8076 | id | |
8077 | object_copy(id oldObj, size_t extraBytes) | |
8078 | { | |
8972963c A |
8079 | return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone()); |
8080 | } | |
8081 | ||
8082 | ||
66799735 | 8083 | #if SUPPORT_ZONES |
8972963c A |
8084 | |
8085 | /*********************************************************************** | |
8086 | * class_createInstanceFromZone | |
8087 | * fixme | |
8088 | * Locking: none | |
8089 | **********************************************************************/ | |
8090 | id | |
8091 | class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone) | |
8092 | { | |
1807f628 | 8093 | if (!cls) return nil; |
8972963c A |
8094 | return _class_createInstanceFromZone(cls, extraBytes, zone); |
8095 | } | |
8096 | ||
8097 | /*********************************************************************** | |
8098 | * object_copyFromZone | |
8099 | * fixme | |
8100 | * Locking: none | |
8101 | **********************************************************************/ | |
8102 | id | |
8103 | object_copyFromZone(id oldObj, size_t extraBytes, void *zone) | |
8104 | { | |
8105 | return _object_copyFromZone(oldObj, extraBytes, zone); | |
8106 | } | |
8107 | ||
8108 | #endif | |
8109 | ||
8110 | ||
8111 | /*********************************************************************** | |
8112 | * objc_destructInstance | |
8113 | * Destroys an instance without freeing memory. | |
8114 | * Calls C++ destructors. | |
c1e772c4 | 8115 | * Calls ARC ivar cleanup. |
8972963c A |
8116 | * Removes associative references. |
8117 | * Returns `obj`. Does nothing if `obj` is nil. | |
8972963c A |
8118 | **********************************************************************/ |
8119 | void *objc_destructInstance(id obj) | |
8120 | { | |
8121 | if (obj) { | |
8972963c | 8122 | // Read all of the flags at once for performance. |
8070259c | 8123 | bool cxx = obj->hasCxxDtor(); |
c1e772c4 | 8124 | bool assoc = obj->hasAssociatedObjects(); |
8972963c A |
8125 | |
8126 | // This order is important. | |
8127 | if (cxx) object_cxxDestruct(obj); | |
34d5b5e8 | 8128 | if (assoc) _object_remove_assocations(obj, /*deallocating*/true); |
c1e772c4 | 8129 | obj->clearDeallocating(); |
8972963c A |
8130 | } |
8131 | ||
8132 | return obj; | |
b3962a83 A |
8133 | } |
8134 | ||
8135 | ||
8136 | /*********************************************************************** | |
8137 | * object_dispose | |
8138 | * fixme | |
8139 | * Locking: none | |
8140 | **********************************************************************/ | |
8141 | id | |
8142 | object_dispose(id obj) | |
8143 | { | |
8972963c A |
8144 | if (!obj) return nil; |
8145 | ||
c1e772c4 | 8146 | objc_destructInstance(obj); |
8972963c A |
8147 | free(obj); |
8148 | ||
8149 | return nil; | |
b3962a83 A |
8150 | } |
8151 | ||
8152 | ||
b3962a83 A |
8153 | /*********************************************************************** |
8154 | * _objc_getFreedObjectClass | |
8155 | * fixme | |
8156 | * Locking: none | |
8157 | **********************************************************************/ | |
8158 | Class _objc_getFreedObjectClass (void) | |
8159 | { | |
7af964d1 | 8160 | return nil; |
b3962a83 A |
8161 | } |
8162 | ||
7af964d1 | 8163 | |
b3962a83 A |
8164 | |
8165 | /*********************************************************************** | |
7257e56c A |
8166 | * Tagged pointer objects. |
8167 | * | |
8168 | * Tagged pointer objects store the class and the object value in the | |
8169 | * object pointer; the "pointer" does not actually point to anything. | |
8170 | * | |
8171 | * Tagged pointer objects currently use this representation: | |
8172 | * (LSB) | |
8173 | * 1 bit set if tagged, clear if ordinary object pointer | |
8174 | * 3 bits tag index | |
8175 | * 60 bits payload | |
8176 | * (MSB) | |
8177 | * The tag index defines the object's class. | |
8178 | * The payload format is defined by the object's class. | |
8179 | * | |
c1e772c4 A |
8180 | * If the tag index is 0b111, the tagged pointer object uses an |
8181 | * "extended" representation, allowing more classes but with smaller payloads: | |
8182 | * (LSB) | |
8183 | * 1 bit set if tagged, clear if ordinary object pointer | |
8184 | * 3 bits 0b111 | |
8185 | * 8 bits extended tag index | |
8186 | * 52 bits payload | |
8187 | * (MSB) | |
8188 | * | |
8189 | * Some architectures reverse the MSB and LSB in these representations. | |
8190 | * | |
7257e56c A |
8191 | * This representation is subject to change. Representation-agnostic SPI is: |
8192 | * objc-internal.h for class implementers. | |
8193 | * objc-gdb.h for debuggers. | |
b3962a83 | 8194 | **********************************************************************/ |
7257e56c A |
8195 | #if !SUPPORT_TAGGED_POINTERS |
8196 | ||
8197 | // These variables are always provided for debuggers. | |
66799735 | 8198 | uintptr_t objc_debug_taggedpointer_obfuscator = 0; |
7257e56c A |
8199 | uintptr_t objc_debug_taggedpointer_mask = 0; |
8200 | unsigned objc_debug_taggedpointer_slot_shift = 0; | |
8201 | uintptr_t objc_debug_taggedpointer_slot_mask = 0; | |
8202 | unsigned objc_debug_taggedpointer_payload_lshift = 0; | |
8203 | unsigned objc_debug_taggedpointer_payload_rshift = 0; | |
8204 | Class objc_debug_taggedpointer_classes[1] = { nil }; | |
8205 | ||
c1e772c4 A |
8206 | uintptr_t objc_debug_taggedpointer_ext_mask = 0; |
8207 | unsigned objc_debug_taggedpointer_ext_slot_shift = 0; | |
8208 | uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0; | |
8209 | unsigned objc_debug_taggedpointer_ext_payload_lshift = 0; | |
8210 | unsigned objc_debug_taggedpointer_ext_payload_rshift = 0; | |
8211 | Class objc_debug_taggedpointer_ext_classes[1] = { nil }; | |
8212 | ||
34d5b5e8 A |
8213 | uintptr_t objc_debug_constant_cfstring_tag_bits = 0; |
8214 | ||
7257e56c A |
8215 | static void |
8216 | disableTaggedPointers() { } | |
8217 | ||
66799735 A |
8218 | static void |
8219 | initializeTaggedPointerObfuscator(void) { } | |
8220 | ||
7257e56c A |
8221 | #else |
8222 | ||
8223 | // The "slot" used in the class table and given to the debugger | |
8224 | // includes the is-tagged bit. This makes objc_msgSend faster. | |
c1e772c4 | 8225 | // The "ext" representation doesn't do that. |
7257e56c | 8226 | |
66799735 | 8227 | uintptr_t objc_debug_taggedpointer_obfuscator; |
c1e772c4 A |
8228 | uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK; |
8229 | unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT; | |
8230 | uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK; | |
8231 | unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT; | |
8232 | unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT; | |
7257e56c A |
8233 | // objc_debug_taggedpointer_classes is defined in objc-msg-*.s |
8234 | ||
c1e772c4 A |
8235 | uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK; |
8236 | unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT; | |
8237 | uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK; | |
8238 | unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT; | |
8239 | unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT; | |
8240 | // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s | |
8241 | ||
34d5b5e8 A |
8242 | #if OBJC_SPLIT_TAGGED_POINTERS |
8243 | uint8_t objc_debug_tag60_permutations[8] = { 0, 1, 2, 3, 4, 5, 6, 7 }; | |
8244 | uintptr_t objc_debug_constant_cfstring_tag_bits = _OBJC_TAG_EXT_MASK | ((uintptr_t)(OBJC_TAG_Constant_CFString - OBJC_TAG_First52BitPayload) << _OBJC_TAG_EXT_SLOT_SHIFT); | |
8245 | #else | |
8246 | uintptr_t objc_debug_constant_cfstring_tag_bits = 0; | |
8247 | #endif | |
8248 | ||
7257e56c A |
8249 | static void |
8250 | disableTaggedPointers() | |
b3962a83 | 8251 | { |
7257e56c A |
8252 | objc_debug_taggedpointer_mask = 0; |
8253 | objc_debug_taggedpointer_slot_shift = 0; | |
8254 | objc_debug_taggedpointer_slot_mask = 0; | |
8255 | objc_debug_taggedpointer_payload_lshift = 0; | |
8256 | objc_debug_taggedpointer_payload_rshift = 0; | |
c1e772c4 A |
8257 | |
8258 | objc_debug_taggedpointer_ext_mask = 0; | |
8259 | objc_debug_taggedpointer_ext_slot_shift = 0; | |
8260 | objc_debug_taggedpointer_ext_slot_mask = 0; | |
8261 | objc_debug_taggedpointer_ext_payload_lshift = 0; | |
8262 | objc_debug_taggedpointer_ext_payload_rshift = 0; | |
7257e56c A |
8263 | } |
8264 | ||
c1e772c4 A |
8265 | |
8266 | // Returns a pointer to the class's storage in the tagged class arrays. | |
8267 | // Assumes the tag is a valid basic tag. | |
8268 | static Class * | |
8269 | classSlotForBasicTagIndex(objc_tag_index_t tag) | |
7257e56c | 8270 | { |
34d5b5e8 A |
8271 | #if OBJC_SPLIT_TAGGED_POINTERS |
8272 | uintptr_t obfuscatedTag = _objc_basicTagToObfuscatedTag(tag); | |
8273 | return &objc_tag_classes[obfuscatedTag]; | |
8274 | #else | |
66799735 A |
8275 | uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator |
8276 | >> _OBJC_TAG_INDEX_SHIFT) | |
8277 | & _OBJC_TAG_INDEX_MASK); | |
8278 | uintptr_t obfuscatedTag = tag ^ tagObfuscator; | |
34d5b5e8 | 8279 | |
c1e772c4 | 8280 | // Array index in objc_tag_classes includes the tagged bit itself |
34d5b5e8 | 8281 | # if SUPPORT_MSB_TAGGED_POINTERS |
66799735 | 8282 | return &objc_tag_classes[0x8 | obfuscatedTag]; |
34d5b5e8 | 8283 | # else |
66799735 | 8284 | return &objc_tag_classes[(obfuscatedTag << 1) | 1]; |
34d5b5e8 | 8285 | # endif |
8972963c | 8286 | #endif |
7257e56c | 8287 | } |
b3962a83 | 8288 | |
b3962a83 | 8289 | |
c1e772c4 A |
8290 | // Returns a pointer to the class's storage in the tagged class arrays, |
8291 | // or nil if the tag is out of range. | |
8292 | static Class * | |
8293 | classSlotForTagIndex(objc_tag_index_t tag) | |
8294 | { | |
8295 | if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) { | |
8296 | return classSlotForBasicTagIndex(tag); | |
8297 | } | |
8298 | ||
8299 | if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) { | |
66799735 | 8300 | int index = tag - OBJC_TAG_First52BitPayload; |
34d5b5e8 A |
8301 | #if OBJC_SPLIT_TAGGED_POINTERS |
8302 | if (tag >= OBJC_TAG_FirstUnobfuscatedSplitTag) | |
8303 | return &objc_tag_ext_classes[index]; | |
8304 | #endif | |
66799735 A |
8305 | uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator |
8306 | >> _OBJC_TAG_EXT_INDEX_SHIFT) | |
8307 | & _OBJC_TAG_EXT_INDEX_MASK); | |
8308 | return &objc_tag_ext_classes[index ^ tagObfuscator]; | |
c1e772c4 A |
8309 | } |
8310 | ||
8311 | return nil; | |
8312 | } | |
8313 | ||
66799735 A |
8314 | /*********************************************************************** |
8315 | * initializeTaggedPointerObfuscator | |
8316 | * Initialize objc_debug_taggedpointer_obfuscator with randomness. | |
8317 | * | |
8318 | * The tagged pointer obfuscator is intended to make it more difficult | |
8319 | * for an attacker to construct a particular object as a tagged pointer, | |
8320 | * in the presence of a buffer overflow or other write control over some | |
8321 | * memory. The obfuscator is XORed with the tagged pointers when setting | |
8322 | * or retrieving payload values. They are filled with randomness on first | |
8323 | * use. | |
8324 | **********************************************************************/ | |
8325 | static void | |
8326 | initializeTaggedPointerObfuscator(void) | |
8327 | { | |
34d5b5e8 | 8328 | if (!DisableTaggedPointerObfuscation && dyld_program_sdk_at_least(dyld_fall_2018_os_versions)) { |
66799735 A |
8329 | // Pull random data into the variable, then shift away all non-payload bits. |
8330 | arc4random_buf(&objc_debug_taggedpointer_obfuscator, | |
8331 | sizeof(objc_debug_taggedpointer_obfuscator)); | |
8332 | objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK; | |
34d5b5e8 A |
8333 | |
8334 | #if OBJC_SPLIT_TAGGED_POINTERS | |
8335 | // The obfuscator doesn't apply to any of the extended tag mask or the no-obfuscation bit. | |
8336 | objc_debug_taggedpointer_obfuscator &= ~(_OBJC_TAG_EXT_MASK | _OBJC_TAG_NO_OBFUSCATION_MASK); | |
8337 | ||
8338 | // Shuffle the first seven entries of the tag permutator. | |
8339 | int max = 7; | |
8340 | for (int i = max - 1; i >= 0; i--) { | |
8341 | int target = arc4random_uniform(i + 1); | |
8342 | swap(objc_debug_tag60_permutations[i], | |
8343 | objc_debug_tag60_permutations[target]); | |
8344 | } | |
8345 | #endif | |
8346 | } else { | |
8347 | // Set the obfuscator to zero for apps linked against older SDKs, | |
8348 | // in case they're relying on the tagged pointer representation. | |
8349 | objc_debug_taggedpointer_obfuscator = 0; | |
66799735 A |
8350 | } |
8351 | } | |
8352 | ||
c1e772c4 | 8353 | |
7257e56c A |
8354 | /*********************************************************************** |
8355 | * _objc_registerTaggedPointerClass | |
8356 | * Set the class to use for the given tagged pointer index. | |
8357 | * Aborts if the tag is out of range, or if the tag is already | |
8358 | * used by some other class. | |
8359 | **********************************************************************/ | |
8360 | void | |
8361 | _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls) | |
c1e772c4 | 8362 | { |
7257e56c A |
8363 | if (objc_debug_taggedpointer_mask == 0) { |
8364 | _objc_fatal("tagged pointers are disabled"); | |
b3962a83 | 8365 | } |
7257e56c | 8366 | |
c1e772c4 A |
8367 | Class *slot = classSlotForTagIndex(tag); |
8368 | if (!slot) { | |
8369 | _objc_fatal("tag index %u is invalid", (unsigned int)tag); | |
7257e56c A |
8370 | } |
8371 | ||
c1e772c4 | 8372 | Class oldCls = *slot; |
7257e56c A |
8373 | |
8374 | if (cls && oldCls && cls != oldCls) { | |
8375 | _objc_fatal("tag index %u used for two different classes " | |
8376 | "(was %p %s, now %p %s)", tag, | |
8070259c A |
8377 | oldCls, oldCls->nameForLogging(), |
8378 | cls, cls->nameForLogging()); | |
b3962a83 A |
8379 | } |
8380 | ||
c1e772c4 | 8381 | *slot = cls; |
7257e56c | 8382 | |
c1e772c4 A |
8383 | // Store a placeholder class in the basic tag slot that is |
8384 | // reserved for the extended tag space, if it isn't set already. | |
8385 | // Do this lazily when the first extended tag is registered so | |
8386 | // that old debuggers characterize bogus pointers correctly more often. | |
8387 | if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) { | |
8388 | Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7); | |
8389 | if (*extSlot == nil) { | |
8390 | extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer; | |
8391 | *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer; | |
8392 | } | |
8393 | } | |
7257e56c A |
8394 | } |
8395 | ||
8396 | ||
8397 | /*********************************************************************** | |
8398 | * _objc_getClassForTag | |
8399 | * Returns the class that is using the given tagged pointer tag. | |
8400 | * Returns nil if no class is using that tag or the tag is out of range. | |
8401 | **********************************************************************/ | |
8402 | Class | |
8403 | _objc_getClassForTag(objc_tag_index_t tag) | |
8404 | { | |
c1e772c4 A |
8405 | Class *slot = classSlotForTagIndex(tag); |
8406 | if (slot) return *slot; | |
8407 | else return nil; | |
7257e56c A |
8408 | } |
8409 | ||
8410 | #endif | |
8411 | ||
8412 | ||
8413 | #if SUPPORT_FIXUP | |
8414 | ||
8070259c A |
8415 | OBJC_EXTERN void objc_msgSend_fixup(void); |
8416 | OBJC_EXTERN void objc_msgSendSuper2_fixup(void); | |
8417 | OBJC_EXTERN void objc_msgSend_stret_fixup(void); | |
8418 | OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void); | |
8419 | #if defined(__i386__) || defined(__x86_64__) | |
8420 | OBJC_EXTERN void objc_msgSend_fpret_fixup(void); | |
8421 | #endif | |
8422 | #if defined(__x86_64__) | |
8423 | OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void); | |
8424 | #endif | |
8425 | ||
7257e56c A |
8426 | OBJC_EXTERN void objc_msgSend_fixedup(void); |
8427 | OBJC_EXTERN void objc_msgSendSuper2_fixedup(void); | |
8428 | OBJC_EXTERN void objc_msgSend_stret_fixedup(void); | |
8429 | OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void); | |
8430 | #if defined(__i386__) || defined(__x86_64__) | |
8431 | OBJC_EXTERN void objc_msgSend_fpret_fixedup(void); | |
8432 | #endif | |
8433 | #if defined(__x86_64__) | |
8434 | OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void); | |
8435 | #endif | |
8436 | ||
8437 | /*********************************************************************** | |
8438 | * fixupMessageRef | |
8439 | * Repairs an old vtable dispatch call site. | |
8440 | * vtable dispatch itself is not supported. | |
8441 | **********************************************************************/ | |
8442 | static void | |
8443 | fixupMessageRef(message_ref_t *msg) | |
8444 | { | |
b3962a83 | 8445 | msg->sel = sel_registerName((const char *)msg->sel); |
8972963c | 8446 | |
c1e772c4 | 8447 | if (msg->imp == &objc_msgSend_fixup) { |
1807f628 | 8448 | if (msg->sel == @selector(alloc)) { |
8070259c | 8449 | msg->imp = (IMP)&objc_alloc; |
1807f628 | 8450 | } else if (msg->sel == @selector(allocWithZone:)) { |
8070259c | 8451 | msg->imp = (IMP)&objc_allocWithZone; |
1807f628 | 8452 | } else if (msg->sel == @selector(retain)) { |
8070259c | 8453 | msg->imp = (IMP)&objc_retain; |
1807f628 | 8454 | } else if (msg->sel == @selector(release)) { |
8070259c | 8455 | msg->imp = (IMP)&objc_release; |
1807f628 | 8456 | } else if (msg->sel == @selector(autorelease)) { |
8070259c A |
8457 | msg->imp = (IMP)&objc_autorelease; |
8458 | } else { | |
8459 | msg->imp = &objc_msgSend_fixedup; | |
8460 | } | |
7257e56c A |
8461 | } |
8462 | else if (msg->imp == &objc_msgSendSuper2_fixup) { | |
8463 | msg->imp = &objc_msgSendSuper2_fixedup; | |
8464 | } | |
8465 | else if (msg->imp == &objc_msgSend_stret_fixup) { | |
8466 | msg->imp = &objc_msgSend_stret_fixedup; | |
8467 | } | |
8468 | else if (msg->imp == &objc_msgSendSuper2_stret_fixup) { | |
8469 | msg->imp = &objc_msgSendSuper2_stret_fixedup; | |
8470 | } | |
7af964d1 | 8471 | #if defined(__i386__) || defined(__x86_64__) |
7257e56c A |
8472 | else if (msg->imp == &objc_msgSend_fpret_fixup) { |
8473 | msg->imp = &objc_msgSend_fpret_fixedup; | |
8474 | } | |
7af964d1 A |
8475 | #endif |
8476 | #if defined(__x86_64__) | |
7257e56c A |
8477 | else if (msg->imp == &objc_msgSend_fp2ret_fixup) { |
8478 | msg->imp = &objc_msgSend_fp2ret_fixedup; | |
8479 | } | |
7af964d1 | 8480 | #endif |
b3962a83 A |
8481 | } |
8482 | ||
8972963c | 8483 | // SUPPORT_FIXUP |
7af964d1 A |
8484 | #endif |
8485 | ||
8486 | ||
b3962a83 | 8487 | // ProKit SPI |
7257e56c | 8488 | static Class setSuperclass(Class cls, Class newSuper) |
b3962a83 | 8489 | { |
7257e56c | 8490 | Class oldSuper; |
b3962a83 | 8491 | |
66799735 | 8492 | runtimeLock.assertLocked(); |
b3962a83 | 8493 | |
1807f628 A |
8494 | ASSERT(cls->isRealized()); |
8495 | ASSERT(newSuper->isRealized()); | |
cd5f04f5 | 8496 | |
34d5b5e8 | 8497 | oldSuper = cls->getSuperclass(); |
b3962a83 | 8498 | removeSubclass(oldSuper, cls); |
7257e56c | 8499 | removeSubclass(oldSuper->ISA(), cls->ISA()); |
b3962a83 | 8500 | |
34d5b5e8 A |
8501 | cls->setSuperclass(newSuper); |
8502 | cls->ISA()->setSuperclass(newSuper->ISA(/*authenticated*/true)); | |
b3962a83 | 8503 | addSubclass(newSuper, cls); |
7257e56c | 8504 | addSubclass(newSuper->ISA(), cls->ISA()); |
b3962a83 | 8505 | |
7257e56c | 8506 | // Flush subclass's method caches. |
34d5b5e8 A |
8507 | flushCaches(cls, __func__, [](Class c){ return true; }); |
8508 | flushCaches(cls->ISA(), __func__, [](Class c){ return true; }); | |
8509 | ||
b3962a83 A |
8510 | return oldSuper; |
8511 | } | |
8512 | ||
8513 | ||
7257e56c | 8514 | Class class_setSuperclass(Class cls, Class newSuper) |
b3962a83 | 8515 | { |
66799735 | 8516 | mutex_locker_t lock(runtimeLock); |
31875a97 | 8517 | return setSuperclass(cls, newSuper); |
b3962a83 A |
8518 | } |
8519 | ||
1807f628 A |
8520 | void runtime_init(void) |
8521 | { | |
8522 | objc::unattachedCategories.init(32); | |
8523 | objc::allocatedClasses.init(); | |
8524 | } | |
8070259c A |
8525 | |
8526 | // __OBJC2__ | |
b3962a83 | 8527 | #endif |