2 * Copyright (c) 2005-2007 Apple Inc. All Rights Reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #ifndef _OBJC_RUNTIME_NEW_H
25 #define _OBJC_RUNTIME_NEW_H
28 typedef uint32_t mask_t
; // x86_64 & arm64 asm are less efficient with 16-bits
30 typedef uint16_t mask_t
;
32 typedef uintptr_t SEL
;
36 enum Atomicity
{ Atomic
= true, NotAtomic
= false };
40 // IMP-first is better for arm64e ptrauth and no worse for arm64.
41 // SEL-first is better for armv7* and i386 and x86_64.
50 // Compute the ptrauth signing modifier from &_imp and newSel
51 uintptr_t modifierForSEL(SEL newSel
) const {
52 return (uintptr_t)&_imp
^ (uintptr_t)newSel
;
55 // Sign newImp, with &_imp and newSel as modifiers.
56 uintptr_t signIMP(IMP newImp
, SEL newSel
) const {
57 if (!newImp
) return 0;
59 ptrauth_auth_and_resign(newImp
,
60 ptrauth_key_function_pointer
, 0,
61 ptrauth_key_process_dependent_code
,
62 modifierForSEL(newSel
));
66 inline SEL
sel() const { return _sel
; }
68 inline IMP
imp() const {
69 if (!_imp
) return nil
;
71 ptrauth_auth_and_resign((const void *)_imp
,
72 ptrauth_key_process_dependent_code
,
74 ptrauth_key_function_pointer
, 0);
78 void set(SEL newSel
, IMP newImp
);
83 struct bucket_t
*_buckets
;
88 struct bucket_t
*buckets();
91 void incrementOccupied();
92 void setBucketsAndMask(struct bucket_t
*newBuckets
, mask_t newMask
);
93 void initializeToEmpty();
96 bool isConstantEmptyCache();
99 static size_t bytesForCapacity(uint32_t cap
);
100 static struct bucket_t
* endMarker(struct bucket_t
*b
, uint32_t cap
);
103 void reallocate(mask_t oldCapacity
, mask_t newCapacity
);
104 struct bucket_t
* find(SEL sel
, id receiver
);
106 static void bad_cache(id receiver
, SEL sel
, Class isa
) __attribute__((noreturn
));
110 // classref_t is unremapped class_t*
111 typedef struct classref
* classref_t
;
113 /***********************************************************************
114 * entsize_list_tt<Element, List, FlagMask>
115 * Generic implementation of an array of non-fragile structs.
117 * Element is the struct type (e.g. method_t)
118 * List is the specialization of entsize_list_tt (e.g. method_list_t)
119 * FlagMask is used to stash extra bits in the entsize field
120 * (e.g. method list fixup markers)
121 **********************************************************************/
122 template <typename Element
, typename List
, uint32_t FlagMask
>
123 struct entsize_list_tt
{
124 uint32_t entsizeAndFlags
;
128 uint32_t entsize() const {
129 return entsizeAndFlags
& ~FlagMask
;
131 uint32_t flags() const {
132 return entsizeAndFlags
& FlagMask
;
135 Element
& getOrEnd(uint32_t i
) const {
137 return *(Element
*)((uint8_t *)&first
+ i
*entsize());
139 Element
& get(uint32_t i
) const {
144 size_t byteSize() const {
145 return byteSize(entsize(), count
);
148 static size_t byteSize(uint32_t entsize
, uint32_t count
) {
149 return sizeof(entsize_list_tt
) + (count
-1)*entsize
;
152 List
*duplicate() const {
153 auto *dup
= (List
*)calloc(this->byteSize(), 1);
154 dup
->entsizeAndFlags
= this->entsizeAndFlags
;
155 dup
->count
= this->count
;
156 std::copy(begin(), end(), dup
->begin());
161 const iterator
begin() const {
162 return iterator(*static_cast<const List
*>(this), 0);
165 return iterator(*static_cast<const List
*>(this), 0);
167 const iterator
end() const {
168 return iterator(*static_cast<const List
*>(this), count
);
171 return iterator(*static_cast<const List
*>(this), count
);
176 uint32_t index
; // keeping track of this saves a divide in operator-
179 typedef std::random_access_iterator_tag iterator_category
;
180 typedef Element value_type
;
181 typedef ptrdiff_t difference_type
;
182 typedef Element
* pointer
;
183 typedef Element
& reference
;
187 iterator(const List
& list
, uint32_t start
= 0)
188 : entsize(list
.entsize())
190 , element(&list
.getOrEnd(start
))
193 const iterator
& operator += (ptrdiff_t delta
) {
194 element
= (Element
*)((uint8_t *)element
+ delta
*entsize
);
195 index
+= (int32_t)delta
;
198 const iterator
& operator -= (ptrdiff_t delta
) {
199 element
= (Element
*)((uint8_t *)element
- delta
*entsize
);
200 index
-= (int32_t)delta
;
203 const iterator
operator + (ptrdiff_t delta
) const {
204 return iterator(*this) += delta
;
206 const iterator
operator - (ptrdiff_t delta
) const {
207 return iterator(*this) -= delta
;
210 iterator
& operator ++ () { *this += 1; return *this; }
211 iterator
& operator -- () { *this -= 1; return *this; }
212 iterator
operator ++ (int) {
213 iterator
result(*this); *this += 1; return result
;
215 iterator
operator -- (int) {
216 iterator
result(*this); *this -= 1; return result
;
219 ptrdiff_t operator - (const iterator
& rhs
) const {
220 return (ptrdiff_t)this->index
- (ptrdiff_t)rhs
.index
;
223 Element
& operator * () const { return *element
; }
224 Element
* operator -> () const { return element
; }
226 operator Element
& () const { return *element
; }
228 bool operator == (const iterator
& rhs
) const {
229 return this->element
== rhs
.element
;
231 bool operator != (const iterator
& rhs
) const {
232 return this->element
!= rhs
.element
;
235 bool operator < (const iterator
& rhs
) const {
236 return this->element
< rhs
.element
;
238 bool operator > (const iterator
& rhs
) const {
239 return this->element
> rhs
.element
;
250 struct SortBySELAddress
:
251 public std::binary_function
<const method_t
&,
252 const method_t
&, bool>
254 bool operator() (const method_t
& lhs
,
256 { return lhs
.name
< rhs
.name
; }
262 // *offset was originally 64-bit on some x86_64 platforms.
263 // We read and write only 32 bits of it.
264 // Some metadata provides all 64 bits. This is harmless for unsigned
265 // little-endian values.
266 // Some code uses all 64 bits. class_addIvar() over-allocates the
267 // offset for their benefit.
272 // alignment is sometimes -1; use alignment() instead
273 uint32_t alignment_raw
;
276 uint32_t alignment() const {
277 if (alignment_raw
== ~(uint32_t)0) return 1U << WORD_SHIFT
;
278 return 1 << alignment_raw
;
284 const char *attributes
;
287 // Two bits of entsize are used for fixup markers.
288 struct method_list_t
: entsize_list_tt
<method_t
, method_list_t
, 0x3> {
289 bool isFixedUp() const;
292 uint32_t indexOfMethod(const method_t
*meth
) const {
294 (uint32_t)(((uintptr_t)meth
- (uintptr_t)this) / entsize());
300 struct ivar_list_t
: entsize_list_tt
<ivar_t
, ivar_list_t
, 0> {
301 bool containsIvar(Ivar ivar
) const {
302 return (ivar
>= (Ivar
)&*begin() && ivar
< (Ivar
)&*end());
306 struct property_list_t
: entsize_list_tt
<property_t
, property_list_t
, 0> {
310 typedef uintptr_t protocol_ref_t
; // protocol_t *, but unremapped
312 // Values for protocol_t->flags
313 #define PROTOCOL_FIXED_UP_2 (1<<31) // must never be set by compiler
314 #define PROTOCOL_FIXED_UP_1 (1<<30) // must never be set by compiler
315 // Bits 0..15 are reserved for Swift's use.
317 #define PROTOCOL_FIXED_UP_MASK (PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2)
319 struct protocol_t
: objc_object
{
320 const char *mangledName
;
321 struct protocol_list_t
*protocols
;
322 method_list_t
*instanceMethods
;
323 method_list_t
*classMethods
;
324 method_list_t
*optionalInstanceMethods
;
325 method_list_t
*optionalClassMethods
;
326 property_list_t
*instanceProperties
;
327 uint32_t size
; // sizeof(protocol_t)
329 // Fields below this point are not always present on disk.
330 const char **_extendedMethodTypes
;
331 const char *_demangledName
;
332 property_list_t
*_classProperties
;
334 const char *demangledName();
336 const char *nameForLogging() {
337 return demangledName();
340 bool isFixedUp() const;
343 # define HAS_FIELD(f) (size >= offsetof(protocol_t, f) + sizeof(f))
345 bool hasExtendedMethodTypesField() const {
346 return HAS_FIELD(_extendedMethodTypes
);
348 bool hasDemangledNameField() const {
349 return HAS_FIELD(_demangledName
);
351 bool hasClassPropertiesField() const {
352 return HAS_FIELD(_classProperties
);
357 const char **extendedMethodTypes() const {
358 return hasExtendedMethodTypesField() ? _extendedMethodTypes
: nil
;
361 property_list_t
*classProperties() const {
362 return hasClassPropertiesField() ? _classProperties
: nil
;
366 struct protocol_list_t
{
367 // count is 64-bit by accident.
369 protocol_ref_t list
[0]; // variable-size
371 size_t byteSize() const {
372 return sizeof(*this) + count
*sizeof(list
[0]);
375 protocol_list_t
*duplicate() const {
376 return (protocol_list_t
*)memdup(this, this->byteSize());
379 typedef protocol_ref_t
* iterator
;
380 typedef const protocol_ref_t
* const_iterator
;
382 const_iterator
begin() const {
388 const_iterator
end() const {
396 struct locstamped_category_t
{
398 struct header_info
*hi
;
401 struct locstamped_category_list_t
{
406 locstamped_category_t list
[0];
410 // class_data_bits_t is the class_t->data field (class_rw_t pointer plus flags)
411 // The extra bits are optimized for the retain/release and alloc/dealloc paths.
413 // Values for class_ro_t->flags
414 // These are emitted by the compiler and are part of the ABI.
415 // Note: See CGObjCNonFragileABIMac::BuildClassRoTInitializer in clang
416 // class is a metaclass
417 #define RO_META (1<<0)
418 // class is a root class
419 #define RO_ROOT (1<<1)
420 // class has .cxx_construct/destruct implementations
421 #define RO_HAS_CXX_STRUCTORS (1<<2)
422 // class has +load implementation
423 // #define RO_HAS_LOAD_METHOD (1<<3)
424 // class has visibility=hidden set
425 #define RO_HIDDEN (1<<4)
426 // class has attribute(objc_exception): OBJC_EHTYPE_$_ThisClass is non-weak
427 #define RO_EXCEPTION (1<<5)
428 // class has ro field for Swift metadata initializer callback
429 #define RO_HAS_SWIFT_INITIALIZER (1<<6)
430 // class compiled with ARC
431 #define RO_IS_ARC (1<<7)
432 // class has .cxx_destruct but no .cxx_construct (with RO_HAS_CXX_STRUCTORS)
433 #define RO_HAS_CXX_DTOR_ONLY (1<<8)
434 // class is not ARC but has ARC-style weak ivar layout
435 #define RO_HAS_WEAK_WITHOUT_ARC (1<<9)
436 // class does not allow associated objects on instances
437 #define RO_FORBIDS_ASSOCIATED_OBJECTS (1<<10)
439 // class is in an unloadable bundle - must never be set by compiler
440 #define RO_FROM_BUNDLE (1<<29)
441 // class is unrealized future class - must never be set by compiler
442 #define RO_FUTURE (1<<30)
443 // class is realized - must never be set by compiler
444 #define RO_REALIZED (1<<31)
446 // Values for class_rw_t->flags
447 // These are not emitted by the compiler and are never used in class_ro_t.
448 // Their presence should be considered in future ABI versions.
449 // class_t->data is class_rw_t, not class_ro_t
450 #define RW_REALIZED (1<<31)
451 // class is unresolved future class
452 #define RW_FUTURE (1<<30)
453 // class is initialized
454 #define RW_INITIALIZED (1<<29)
455 // class is initializing
456 #define RW_INITIALIZING (1<<28)
457 // class_rw_t->ro is heap copy of class_ro_t
458 #define RW_COPIED_RO (1<<27)
459 // class allocated but not yet registered
460 #define RW_CONSTRUCTING (1<<26)
461 // class allocated and registered
462 #define RW_CONSTRUCTED (1<<25)
463 // available for use; was RW_FINALIZE_ON_MAIN_THREAD
464 // #define RW_24 (1<<24)
465 // class +load has been called
466 #define RW_LOADED (1<<23)
467 #if !SUPPORT_NONPOINTER_ISA
468 // class instances may have associative references
469 #define RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS (1<<22)
471 // class has instance-specific GC layout
472 #define RW_HAS_INSTANCE_SPECIFIC_LAYOUT (1 << 21)
473 // class does not allow associated objects on its instances
474 #define RW_FORBIDS_ASSOCIATED_OBJECTS (1<<20)
475 // class has started realizing but not yet completed it
476 #define RW_REALIZING (1<<19)
478 // NOTE: MORE RW_ FLAGS DEFINED BELOW
481 // Values for class_rw_t->flags or class_t->bits
482 // These flags are optimized for retain/release and alloc/dealloc
483 // 64-bit stores more of them in class_t->bits to reduce pointer indirection.
487 // class or superclass has .cxx_construct implementation
488 #define RW_HAS_CXX_CTOR (1<<18)
489 // class or superclass has .cxx_destruct implementation
490 #define RW_HAS_CXX_DTOR (1<<17)
491 // class or superclass has default alloc/allocWithZone: implementation
492 // Note this is is stored in the metaclass.
493 #define RW_HAS_DEFAULT_AWZ (1<<16)
494 // class's instances requires raw isa
495 #if SUPPORT_NONPOINTER_ISA
496 #define RW_REQUIRES_RAW_ISA (1<<15)
498 // class or superclass has default retain/release/autorelease/retainCount/
499 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
500 #define RW_HAS_DEFAULT_RR (1<<14)
502 // class is a Swift class from the pre-stable Swift ABI
503 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
504 // class is a Swift class from the stable Swift ABI
505 #define FAST_IS_SWIFT_STABLE (1UL<<1)
507 #define FAST_DATA_MASK 0xfffffffcUL
510 // Leaks-compatible version that steals low bits only.
512 // class or superclass has .cxx_construct implementation
513 #define RW_HAS_CXX_CTOR (1<<18)
514 // class or superclass has .cxx_destruct implementation
515 #define RW_HAS_CXX_DTOR (1<<17)
516 // class or superclass has default alloc/allocWithZone: implementation
517 // Note this is is stored in the metaclass.
518 #define RW_HAS_DEFAULT_AWZ (1<<16)
519 // class's instances requires raw isa
520 #define RW_REQUIRES_RAW_ISA (1<<15)
522 // class is a Swift class from the pre-stable Swift ABI
523 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
524 // class is a Swift class from the stable Swift ABI
525 #define FAST_IS_SWIFT_STABLE (1UL<<1)
526 // class or superclass has default retain/release/autorelease/retainCount/
527 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
528 #define FAST_HAS_DEFAULT_RR (1UL<<2)
530 #define FAST_DATA_MASK 0x00007ffffffffff8UL
533 // Leaks-incompatible version that steals lots of bits.
535 // class is a Swift class from the pre-stable Swift ABI
536 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
537 // class is a Swift class from the stable Swift ABI
538 #define FAST_IS_SWIFT_STABLE (1UL<<1)
539 // summary bit for fast alloc path: !hasCxxCtor and
540 // !instancesRequireRawIsa and instanceSize fits into shiftedSize
541 #define FAST_ALLOC (1UL<<2)
543 #define FAST_DATA_MASK 0x00007ffffffffff8UL
544 // class or superclass has .cxx_construct implementation
545 #define FAST_HAS_CXX_CTOR (1UL<<47)
546 // class or superclass has default alloc/allocWithZone: implementation
547 // Note this is is stored in the metaclass.
548 #define FAST_HAS_DEFAULT_AWZ (1UL<<48)
549 // class or superclass has default retain/release/autorelease/retainCount/
550 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
551 #define FAST_HAS_DEFAULT_RR (1UL<<49)
552 // class's instances requires raw isa
553 // This bit is aligned with isa_t->hasCxxDtor to save an instruction.
554 #define FAST_REQUIRES_RAW_ISA (1UL<<50)
555 // class or superclass has .cxx_destruct implementation
556 #define FAST_HAS_CXX_DTOR (1UL<<51)
557 // instance size in units of 16 bytes
558 // or 0 if the instance size is too big in this field
559 // This field must be LAST
560 #define FAST_SHIFTED_SIZE_SHIFT 52
563 // FAST_HAS_CXX_CTOR is set
564 // FAST_REQUIRES_RAW_ISA is not set
565 // FAST_SHIFTED_SIZE is not zero
566 // FAST_ALLOC does NOT check FAST_HAS_DEFAULT_AWZ because that
567 // bit is stored on the metaclass.
568 #define FAST_ALLOC_MASK (FAST_HAS_CXX_CTOR | FAST_REQUIRES_RAW_ISA)
569 #define FAST_ALLOC_VALUE (0)
573 // The Swift ABI requires that these bits be defined like this on all platforms.
574 static_assert(FAST_IS_SWIFT_LEGACY
== 1, "resistance is futile");
575 static_assert(FAST_IS_SWIFT_STABLE
== 2, "resistance is futile");
580 uint32_t instanceStart
;
581 uint32_t instanceSize
;
586 const uint8_t * ivarLayout
;
589 method_list_t
* baseMethodList
;
590 protocol_list_t
* baseProtocols
;
591 const ivar_list_t
* ivars
;
593 const uint8_t * weakIvarLayout
;
594 property_list_t
*baseProperties
;
596 // This field exists only when RO_HAS_SWIFT_INITIALIZER is set.
597 _objc_swiftMetadataInitializer __ptrauth_objc_method_list_imp _swiftMetadataInitializer_NEVER_USE
[0];
599 _objc_swiftMetadataInitializer
swiftMetadataInitializer() const {
600 if (flags
& RO_HAS_SWIFT_INITIALIZER
) {
601 return _swiftMetadataInitializer_NEVER_USE
[0];
607 method_list_t
*baseMethods() const {
608 return baseMethodList
;
611 class_ro_t
*duplicate() const {
612 if (flags
& RO_HAS_SWIFT_INITIALIZER
) {
613 size_t size
= sizeof(*this) + sizeof(_swiftMetadataInitializer_NEVER_USE
[0]);
614 class_ro_t
*ro
= (class_ro_t
*)memdup(this, size
);
615 ro
->_swiftMetadataInitializer_NEVER_USE
[0] = this->_swiftMetadataInitializer_NEVER_USE
[0];
618 size_t size
= sizeof(*this);
619 class_ro_t
*ro
= (class_ro_t
*)memdup(this, size
);
626 /***********************************************************************
627 * list_array_tt<Element, List>
628 * Generic implementation for metadata that can be augmented by categories.
630 * Element is the underlying metadata type (e.g. method_t)
631 * List is the metadata's list type (e.g. method_list_t)
633 * A list_array_tt has one of three values:
635 * - a pointer to a single list
636 * - an array of pointers to lists
638 * countLists/beginLists/endLists iterate the metadata lists
639 * count/begin/end iterate the underlying metadata elements
640 **********************************************************************/
641 template <typename Element
, typename List
>
642 class list_array_tt
{
647 static size_t byteSize(uint32_t count
) {
648 return sizeof(array_t
) + count
*sizeof(lists
[0]);
651 return byteSize(count
);
659 typename
List::iterator m
, mEnd
;
662 iterator(List
**begin
, List
**end
)
663 : lists(begin
), listsEnd(end
)
666 m
= (*begin
)->begin();
667 mEnd
= (*begin
)->end();
671 const Element
& operator * () const {
674 Element
& operator * () {
678 bool operator != (const iterator
& rhs
) const {
679 if (lists
!= rhs
.lists
) return true;
680 if (lists
== listsEnd
) return false; // m is undefined
681 if (m
!= rhs
.m
) return true;
685 const iterator
& operator ++ () {
689 assert(lists
!= listsEnd
);
691 if (lists
!= listsEnd
) {
692 m
= (*lists
)->begin();
693 mEnd
= (*lists
)->end();
703 uintptr_t arrayAndFlag
;
706 bool hasArray() const {
707 return arrayAndFlag
& 1;
711 return (array_t
*)(arrayAndFlag
& ~1);
714 void setArray(array_t
*array
) {
715 arrayAndFlag
= (uintptr_t)array
| 1;
722 for (auto lists
= beginLists(), end
= endLists();
726 result
+= (*lists
)->count
;
732 return iterator(beginLists(), endLists());
736 List
**e
= endLists();
737 return iterator(e
, e
);
741 uint32_t countLists() {
743 return array()->count
;
751 List
** beginLists() {
753 return array()->lists
;
761 return array()->lists
+ array()->count
;
769 void attachLists(List
* const * addedLists
, uint32_t addedCount
) {
770 if (addedCount
== 0) return;
773 // many lists -> many lists
774 uint32_t oldCount
= array()->count
;
775 uint32_t newCount
= oldCount
+ addedCount
;
776 setArray((array_t
*)realloc(array(), array_t::byteSize(newCount
)));
777 array()->count
= newCount
;
778 memmove(array()->lists
+ addedCount
, array()->lists
,
779 oldCount
* sizeof(array()->lists
[0]));
780 memcpy(array()->lists
, addedLists
,
781 addedCount
* sizeof(array()->lists
[0]));
783 else if (!list
&& addedCount
== 1) {
785 list
= addedLists
[0];
788 // 1 list -> many lists
789 List
* oldList
= list
;
790 uint32_t oldCount
= oldList
? 1 : 0;
791 uint32_t newCount
= oldCount
+ addedCount
;
792 setArray((array_t
*)malloc(array_t::byteSize(newCount
)));
793 array()->count
= newCount
;
794 if (oldList
) array()->lists
[addedCount
] = oldList
;
795 memcpy(array()->lists
, addedLists
,
796 addedCount
* sizeof(array()->lists
[0]));
802 for (uint32_t i
= 0; i
< array()->count
; i
++) {
803 try_free(array()->lists
[i
]);
812 template<typename Result
>
817 array_t
*a
= array();
818 result
.setArray((array_t
*)memdup(a
, a
->byteSize()));
819 for (uint32_t i
= 0; i
< a
->count
; i
++) {
820 result
.array()->lists
[i
] = a
->lists
[i
]->duplicate();
823 result
.list
= list
->duplicate();
833 class method_array_t
:
834 public list_array_tt
<method_t
, method_list_t
>
836 typedef list_array_tt
<method_t
, method_list_t
> Super
;
839 method_list_t
**beginCategoryMethodLists() {
843 method_list_t
**endCategoryMethodLists(Class cls
);
845 method_array_t
duplicate() {
846 return Super::duplicate
<method_array_t
>();
851 class property_array_t
:
852 public list_array_tt
<property_t
, property_list_t
>
854 typedef list_array_tt
<property_t
, property_list_t
> Super
;
857 property_array_t
duplicate() {
858 return Super::duplicate
<property_array_t
>();
863 class protocol_array_t
:
864 public list_array_tt
<protocol_ref_t
, protocol_list_t
>
866 typedef list_array_tt
<protocol_ref_t
, protocol_list_t
> Super
;
869 protocol_array_t
duplicate() {
870 return Super::duplicate
<protocol_array_t
>();
876 // Be warned that Symbolication knows the layout of this structure.
880 const class_ro_t
*ro
;
882 method_array_t methods
;
883 property_array_t properties
;
884 protocol_array_t protocols
;
887 Class nextSiblingClass
;
891 #if SUPPORT_INDEXED_ISA
895 void setFlags(uint32_t set
)
897 OSAtomicOr32Barrier(set
, &flags
);
900 void clearFlags(uint32_t clear
)
902 OSAtomicXor32Barrier(clear
, &flags
);
905 // set and clear must not overlap
906 void changeFlags(uint32_t set
, uint32_t clear
)
908 assert((set
& clear
) == 0);
913 newf
= (oldf
| set
) & ~clear
;
914 } while (!OSAtomicCompareAndSwap32Barrier(oldf
, newf
, (volatile int32_t *)&flags
));
919 struct class_data_bits_t
{
921 // Values are the FAST_ flags above.
924 bool getBit(uintptr_t bit
)
930 // On entry, `newBits` is a bits value after setting and/or clearing
931 // the bits in `change`. Fix the fast-alloc parts of newBits if necessary
932 // and return the updated value.
933 static uintptr_t updateFastAlloc(uintptr_t newBits
, uintptr_t change
)
935 if (change
& FAST_ALLOC_MASK
) {
936 if (((newBits
& FAST_ALLOC_MASK
) == FAST_ALLOC_VALUE
) &&
937 ((newBits
>> FAST_SHIFTED_SIZE_SHIFT
) != 0))
939 newBits
|= FAST_ALLOC
;
941 newBits
&= ~FAST_ALLOC
;
947 static uintptr_t updateFastAlloc(uintptr_t newBits
, uintptr_t change
) {
952 // Atomically set the bits in `set` and clear the bits in `clear`.
953 // set and clear must not overlap.
954 void setAndClearBits(uintptr_t set
, uintptr_t clear
)
956 assert((set
& clear
) == 0);
960 oldBits
= LoadExclusive(&bits
);
961 newBits
= updateFastAlloc((oldBits
| set
) & ~clear
, set
| clear
);
962 } while (!StoreReleaseExclusive(&bits
, oldBits
, newBits
));
965 void setBits(uintptr_t set
) {
966 setAndClearBits(set
, 0);
969 void clearBits(uintptr_t clear
) {
970 setAndClearBits(0, clear
);
976 return (class_rw_t
*)(bits
& FAST_DATA_MASK
);
978 void setData(class_rw_t
*newData
)
980 assert(!data() || (newData
->flags
& (RW_REALIZING
| RW_FUTURE
)));
981 // Set during realization or construction only. No locking needed.
982 // Use a store-release fence because there may be concurrent
983 // readers of data and data's contents.
984 uintptr_t newBits
= (bits
& ~FAST_DATA_MASK
) | (uintptr_t)newData
;
985 atomic_thread_fence(memory_order_release
);
989 // Get the class's ro data, even in the presence of concurrent realization.
990 // fixme this isn't really safe without a compiler barrier at least
991 // and probably a memory barrier when realizeClass changes the data field
992 const class_ro_t
*safe_ro() {
993 class_rw_t
*maybe_rw
= data();
994 if (maybe_rw
->flags
& RW_REALIZED
) {
998 // maybe_rw is actually ro
999 return (class_ro_t
*)maybe_rw
;
1003 #if FAST_HAS_DEFAULT_RR
1004 bool hasDefaultRR() {
1005 return getBit(FAST_HAS_DEFAULT_RR
);
1007 void setHasDefaultRR() {
1008 setBits(FAST_HAS_DEFAULT_RR
);
1010 void setHasCustomRR() {
1011 clearBits(FAST_HAS_DEFAULT_RR
);
1014 bool hasDefaultRR() {
1015 return data()->flags
& RW_HAS_DEFAULT_RR
;
1017 void setHasDefaultRR() {
1018 data()->setFlags(RW_HAS_DEFAULT_RR
);
1020 void setHasCustomRR() {
1021 data()->clearFlags(RW_HAS_DEFAULT_RR
);
1025 #if FAST_HAS_DEFAULT_AWZ
1026 bool hasDefaultAWZ() {
1027 return getBit(FAST_HAS_DEFAULT_AWZ
);
1029 void setHasDefaultAWZ() {
1030 setBits(FAST_HAS_DEFAULT_AWZ
);
1032 void setHasCustomAWZ() {
1033 clearBits(FAST_HAS_DEFAULT_AWZ
);
1036 bool hasDefaultAWZ() {
1037 return data()->flags
& RW_HAS_DEFAULT_AWZ
;
1039 void setHasDefaultAWZ() {
1040 data()->setFlags(RW_HAS_DEFAULT_AWZ
);
1042 void setHasCustomAWZ() {
1043 data()->clearFlags(RW_HAS_DEFAULT_AWZ
);
1047 #if FAST_HAS_CXX_CTOR
1049 return getBit(FAST_HAS_CXX_CTOR
);
1051 void setHasCxxCtor() {
1052 setBits(FAST_HAS_CXX_CTOR
);
1056 return data()->flags
& RW_HAS_CXX_CTOR
;
1058 void setHasCxxCtor() {
1059 data()->setFlags(RW_HAS_CXX_CTOR
);
1063 #if FAST_HAS_CXX_DTOR
1065 return getBit(FAST_HAS_CXX_DTOR
);
1067 void setHasCxxDtor() {
1068 setBits(FAST_HAS_CXX_DTOR
);
1072 return data()->flags
& RW_HAS_CXX_DTOR
;
1074 void setHasCxxDtor() {
1075 data()->setFlags(RW_HAS_CXX_DTOR
);
1079 #if FAST_REQUIRES_RAW_ISA
1080 bool instancesRequireRawIsa() {
1081 return getBit(FAST_REQUIRES_RAW_ISA
);
1083 void setInstancesRequireRawIsa() {
1084 setBits(FAST_REQUIRES_RAW_ISA
);
1086 #elif SUPPORT_NONPOINTER_ISA
1087 bool instancesRequireRawIsa() {
1088 return data()->flags
& RW_REQUIRES_RAW_ISA
;
1090 void setInstancesRequireRawIsa() {
1091 data()->setFlags(RW_REQUIRES_RAW_ISA
);
1094 bool instancesRequireRawIsa() {
1097 void setInstancesRequireRawIsa() {
1103 size_t fastInstanceSize()
1105 assert(bits
& FAST_ALLOC
);
1106 return (bits
>> FAST_SHIFTED_SIZE_SHIFT
) * 16;
1108 void setFastInstanceSize(size_t newSize
)
1110 // Set during realization or construction only. No locking needed.
1111 assert(data()->flags
& RW_REALIZING
);
1113 // Round up to 16-byte boundary, then divide to get 16-byte units
1114 newSize
= ((newSize
+ 15) & ~15) / 16;
1116 uintptr_t newBits
= newSize
<< FAST_SHIFTED_SIZE_SHIFT
;
1117 if ((newBits
>> FAST_SHIFTED_SIZE_SHIFT
) == newSize
) {
1118 int shift
= WORD_BITS
- FAST_SHIFTED_SIZE_SHIFT
;
1119 uintptr_t oldBits
= (bits
<< shift
) >> shift
;
1120 if ((oldBits
& FAST_ALLOC_MASK
) == FAST_ALLOC_VALUE
) {
1121 newBits
|= FAST_ALLOC
;
1123 bits
= oldBits
| newBits
;
1127 bool canAllocFast() {
1128 return bits
& FAST_ALLOC
;
1131 size_t fastInstanceSize() {
1134 void setFastInstanceSize(size_t) {
1137 bool canAllocFast() {
1142 void setClassArrayIndex(unsigned Idx
) {
1143 #if SUPPORT_INDEXED_ISA
1144 // 0 is unused as then we can rely on zero-initialisation from calloc.
1146 data()->index
= Idx
;
1150 unsigned classArrayIndex() {
1151 #if SUPPORT_INDEXED_ISA
1152 return data()->index
;
1159 return isSwiftStable() || isSwiftLegacy();
1162 bool isSwiftStable() {
1163 return getBit(FAST_IS_SWIFT_STABLE
);
1165 void setIsSwiftStable() {
1166 setAndClearBits(FAST_IS_SWIFT_STABLE
, FAST_IS_SWIFT_LEGACY
);
1169 bool isSwiftLegacy() {
1170 return getBit(FAST_IS_SWIFT_LEGACY
);
1172 void setIsSwiftLegacy() {
1173 setAndClearBits(FAST_IS_SWIFT_LEGACY
, FAST_IS_SWIFT_STABLE
);
1176 // fixme remove this once the Swift runtime uses the stable bits
1177 bool isSwiftStable_ButAllowLegacyForNow() {
1178 return isAnySwift();
1181 _objc_swiftMetadataInitializer
swiftMetadataInitializer() {
1182 // This function is called on un-realized classes without
1183 // holding any locks.
1184 // Beware of races with other realizers.
1185 return safe_ro()->swiftMetadataInitializer();
1190 struct objc_class
: objc_object
{
1193 cache_t cache
; // formerly cache pointer and vtable
1194 class_data_bits_t bits
; // class_rw_t * plus custom rr/alloc flags
1196 class_rw_t
*data() {
1199 void setData(class_rw_t
*newData
) {
1200 bits
.setData(newData
);
1203 void setInfo(uint32_t set
) {
1204 assert(isFuture() || isRealized());
1205 data()->setFlags(set
);
1208 void clearInfo(uint32_t clear
) {
1209 assert(isFuture() || isRealized());
1210 data()->clearFlags(clear
);
1213 // set and clear must not overlap
1214 void changeInfo(uint32_t set
, uint32_t clear
) {
1215 assert(isFuture() || isRealized());
1216 assert((set
& clear
) == 0);
1217 data()->changeFlags(set
, clear
);
1220 bool hasCustomRR() {
1221 return ! bits
.hasDefaultRR();
1223 void setHasDefaultRR() {
1224 assert(isInitializing());
1225 bits
.setHasDefaultRR();
1227 void setHasCustomRR(bool inherited
= false);
1228 void printCustomRR(bool inherited
);
1230 bool hasCustomAWZ() {
1231 return ! bits
.hasDefaultAWZ();
1233 void setHasDefaultAWZ() {
1234 assert(isInitializing());
1235 bits
.setHasDefaultAWZ();
1237 void setHasCustomAWZ(bool inherited
= false);
1238 void printCustomAWZ(bool inherited
);
1240 bool instancesRequireRawIsa() {
1241 return bits
.instancesRequireRawIsa();
1243 void setInstancesRequireRawIsa(bool inherited
= false);
1244 void printInstancesRequireRawIsa(bool inherited
);
1246 bool canAllocNonpointer() {
1247 assert(!isFuture());
1248 return !instancesRequireRawIsa();
1250 bool canAllocFast() {
1251 assert(!isFuture());
1252 return bits
.canAllocFast();
1257 // addSubclass() propagates this flag from the superclass.
1258 assert(isRealized());
1259 return bits
.hasCxxCtor();
1261 void setHasCxxCtor() {
1262 bits
.setHasCxxCtor();
1266 // addSubclass() propagates this flag from the superclass.
1267 assert(isRealized());
1268 return bits
.hasCxxDtor();
1270 void setHasCxxDtor() {
1271 bits
.setHasCxxDtor();
1275 bool isSwiftStable() {
1276 return bits
.isSwiftStable();
1279 bool isSwiftLegacy() {
1280 return bits
.isSwiftLegacy();
1284 return bits
.isAnySwift();
1287 bool isSwiftStable_ButAllowLegacyForNow() {
1288 return bits
.isSwiftStable_ButAllowLegacyForNow();
1291 // Swift stable ABI built for old deployment targets looks weird.
1292 // The is-legacy bit is set for compatibility with old libobjc.
1293 // We are on a "new" deployment target so we need to rewrite that bit.
1294 // These stable-with-legacy-bit classes are distinguished from real
1295 // legacy classes using another bit in the Swift data
1296 // (ClassFlags::IsSwiftPreStableABI)
1298 bool isUnfixedBackwardDeployingStableSwift() {
1299 // Only classes marked as Swift legacy need apply.
1300 if (!bits
.isSwiftLegacy()) return false;
1302 // Check the true legacy vs stable distinguisher.
1303 // The low bit of Swift's ClassFlags is SET for true legacy
1304 // and UNSET for stable pretending to be legacy.
1305 uint32_t swiftClassFlags
= *(uint32_t *)(&bits
+ 1);
1306 bool isActuallySwiftLegacy
= bool(swiftClassFlags
& 1);
1307 return !isActuallySwiftLegacy
;
1310 void fixupBackwardDeployingStableSwift() {
1311 if (isUnfixedBackwardDeployingStableSwift()) {
1312 // Class really is stable Swift, pretending to be pre-stable.
1314 bits
.setIsSwiftStable();
1318 _objc_swiftMetadataInitializer
swiftMetadataInitializer() {
1319 return bits
.swiftMetadataInitializer();
1322 // Return YES if the class's ivars are managed by ARC,
1323 // or the class is MRC but has ARC-style weak ivars.
1324 bool hasAutomaticIvars() {
1325 return data()->ro
->flags
& (RO_IS_ARC
| RO_HAS_WEAK_WITHOUT_ARC
);
1328 // Return YES if the class's ivars are managed by ARC.
1330 return data()->ro
->flags
& RO_IS_ARC
;
1334 bool forbidsAssociatedObjects() {
1335 return (data()->flags
& RW_FORBIDS_ASSOCIATED_OBJECTS
);
1338 #if SUPPORT_NONPOINTER_ISA
1339 // Tracked in non-pointer isas; not tracked otherwise
1341 bool instancesHaveAssociatedObjects() {
1342 // this may be an unrealized future class in the CF-bridged case
1343 assert(isFuture() || isRealized());
1344 return data()->flags
& RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS
;
1347 void setInstancesHaveAssociatedObjects() {
1348 // this may be an unrealized future class in the CF-bridged case
1349 assert(isFuture() || isRealized());
1350 setInfo(RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS
);
1354 bool shouldGrowCache() {
1358 void setShouldGrowCache(bool) {
1359 // fixme good or bad for memory use?
1362 bool isInitializing() {
1363 return getMeta()->data()->flags
& RW_INITIALIZING
;
1366 void setInitializing() {
1367 assert(!isMetaClass());
1368 ISA()->setInfo(RW_INITIALIZING
);
1371 bool isInitialized() {
1372 return getMeta()->data()->flags
& RW_INITIALIZED
;
1375 void setInitialized();
1378 assert(isRealized());
1379 return true; // any class registered for +load is definitely loadable
1382 IMP
getLoadMethod();
1384 // Locking: To prevent concurrent realization, hold runtimeLock.
1386 return data()->flags
& RW_REALIZED
;
1389 // Returns true if this is an unrealized future class.
1390 // Locking: To prevent concurrent realization, hold runtimeLock.
1392 return data()->flags
& RW_FUTURE
;
1395 bool isMetaClass() {
1397 assert(isRealized());
1398 return data()->ro
->flags
& RO_META
;
1401 // Like isMetaClass, but also valid on un-realized classes
1402 bool isMetaClassMaybeUnrealized() {
1403 return bits
.safe_ro()->flags
& RO_META
;
1406 // NOT identical to this->ISA when this is a metaclass
1408 if (isMetaClass()) return (Class
)this;
1409 else return this->ISA();
1412 bool isRootClass() {
1413 return superclass
== nil
;
1415 bool isRootMetaclass() {
1416 return ISA() == (Class
)this;
1419 const char *mangledName() {
1420 // fixme can't assert locks here
1423 if (isRealized() || isFuture()) {
1424 return data()->ro
->name
;
1426 return ((const class_ro_t
*)data())->name
;
1430 const char *demangledName();
1431 const char *nameForLogging();
1433 // May be unaligned depending on class's ivars.
1434 uint32_t unalignedInstanceStart() {
1435 assert(isRealized());
1436 return data()->ro
->instanceStart
;
1439 // Class's instance start rounded up to a pointer-size boundary.
1440 // This is used for ARC layout bitmaps.
1441 uint32_t alignedInstanceStart() {
1442 return word_align(unalignedInstanceStart());
1445 // May be unaligned depending on class's ivars.
1446 uint32_t unalignedInstanceSize() {
1447 assert(isRealized());
1448 return data()->ro
->instanceSize
;
1451 // Class's ivar size rounded up to a pointer-size boundary.
1452 uint32_t alignedInstanceSize() {
1453 return word_align(unalignedInstanceSize());
1456 size_t instanceSize(size_t extraBytes
) {
1457 size_t size
= alignedInstanceSize() + extraBytes
;
1458 // CF requires all objects be at least 16 bytes.
1459 if (size
< 16) size
= 16;
1463 void setInstanceSize(uint32_t newSize
) {
1464 assert(isRealized());
1465 if (newSize
!= data()->ro
->instanceSize
) {
1466 assert(data()->flags
& RW_COPIED_RO
);
1467 *const_cast<uint32_t *>(&data()->ro
->instanceSize
) = newSize
;
1469 bits
.setFastInstanceSize(newSize
);
1472 void chooseClassArrayIndex();
1474 void setClassArrayIndex(unsigned Idx
) {
1475 bits
.setClassArrayIndex(Idx
);
1478 unsigned classArrayIndex() {
1479 return bits
.classArrayIndex();
1485 struct swift_class_t
: objc_class
{
1487 uint32_t instanceAddressOffset
;
1488 uint32_t instanceSize
;
1489 uint16_t instanceAlignMask
;
1493 uint32_t classAddressOffset
;
1497 void *baseAddress() {
1498 return (void *)((uint8_t *)this - classAddressOffset
);
1506 struct method_list_t
*instanceMethods
;
1507 struct method_list_t
*classMethods
;
1508 struct protocol_list_t
*protocols
;
1509 struct property_list_t
*instanceProperties
;
1510 // Fields below this point are not always present on disk.
1511 struct property_list_t
*_classProperties
;
1513 method_list_t
*methodsForMeta(bool isMeta
) {
1514 if (isMeta
) return classMethods
;
1515 else return instanceMethods
;
1518 property_list_t
*propertiesForMeta(bool isMeta
, struct header_info
*hi
);
1521 struct objc_super2
{
1523 Class current_class
;
1526 struct message_ref_t
{
1532 extern Method
protocol_getMethod(protocol_t
*p
, SEL sel
, bool isRequiredMethod
, bool isInstanceMethod
, bool recursive
);
1535 foreach_realized_class_and_subclass_2(Class top
, unsigned& count
,
1536 std::function
<bool (Class
)> code
)
1538 // runtimeLock.assertLocked();
1543 _objc_fatal("Memory corruption in class list.");
1545 if (!code(cls
)) break;
1547 if (cls
->data()->firstSubclass
) {
1548 cls
= cls
->data()->firstSubclass
;
1550 while (!cls
->data()->nextSiblingClass
&& cls
!= top
) {
1551 cls
= cls
->superclass
;
1553 _objc_fatal("Memory corruption in class list.");
1556 if (cls
== top
) break;
1557 cls
= cls
->data()->nextSiblingClass
;
1562 extern Class
firstRealizedClass();
1563 extern unsigned int unreasonableClassCount();
1565 // Enumerates a class and all of its realized subclasses.
1567 foreach_realized_class_and_subclass(Class top
,
1568 std::function
<void (Class
)> code
)
1570 unsigned int count
= unreasonableClassCount();
1572 foreach_realized_class_and_subclass_2(top
, count
,
1573 [&code
](Class cls
) -> bool
1580 // Enumerates all realized classes and metaclasses.
1582 foreach_realized_class_and_metaclass(std::function
<void (Class
)> code
)
1584 unsigned int count
= unreasonableClassCount();
1586 for (Class top
= firstRealizedClass();
1588 top
= top
->data()->nextSiblingClass
)
1590 foreach_realized_class_and_subclass_2(top
, count
,
1591 [&code
](Class cls
) -> bool