2 * Copyright (c) 2005-2007 Apple Inc. All Rights Reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #ifndef _OBJC_RUNTIME_NEW_H
25 #define _OBJC_RUNTIME_NEW_H
28 typedef uint32_t mask_t
; // x86_64 & arm64 asm are less efficient with 16-bits
30 typedef uint16_t mask_t
;
32 typedef uintptr_t cache_key_t
;
39 // IMP-first is better for arm64e ptrauth and no worse for arm64.
40 // SEL-first is better for armv7* and i386 and x86_64.
50 inline cache_key_t
key() const { return _key
; }
51 inline IMP
imp() const { return (IMP
)_imp
; }
52 inline void setKey(cache_key_t newKey
) { _key
= newKey
; }
53 inline void setImp(IMP newImp
) { _imp
= newImp
; }
55 void set(cache_key_t newKey
, IMP newImp
);
60 struct bucket_t
*_buckets
;
65 struct bucket_t
*buckets();
68 void incrementOccupied();
69 void setBucketsAndMask(struct bucket_t
*newBuckets
, mask_t newMask
);
70 void initializeToEmpty();
73 bool isConstantEmptyCache();
76 static size_t bytesForCapacity(uint32_t cap
);
77 static struct bucket_t
* endMarker(struct bucket_t
*b
, uint32_t cap
);
80 void reallocate(mask_t oldCapacity
, mask_t newCapacity
);
81 struct bucket_t
* find(cache_key_t key
, id receiver
);
83 static void bad_cache(id receiver
, SEL sel
, Class isa
) __attribute__((noreturn
));
87 // classref_t is unremapped class_t*
88 typedef struct classref
* classref_t
;
90 /***********************************************************************
91 * entsize_list_tt<Element, List, FlagMask>
92 * Generic implementation of an array of non-fragile structs.
94 * Element is the struct type (e.g. method_t)
95 * List is the specialization of entsize_list_tt (e.g. method_list_t)
96 * FlagMask is used to stash extra bits in the entsize field
97 * (e.g. method list fixup markers)
98 **********************************************************************/
99 template <typename Element
, typename List
, uint32_t FlagMask
>
100 struct entsize_list_tt
{
101 uint32_t entsizeAndFlags
;
105 uint32_t entsize() const {
106 return entsizeAndFlags
& ~FlagMask
;
108 uint32_t flags() const {
109 return entsizeAndFlags
& FlagMask
;
112 Element
& getOrEnd(uint32_t i
) const {
114 return *(Element
*)((uint8_t *)&first
+ i
*entsize());
116 Element
& get(uint32_t i
) const {
121 size_t byteSize() const {
122 return byteSize(entsize(), count
);
125 static size_t byteSize(uint32_t entsize
, uint32_t count
) {
126 return sizeof(entsize_list_tt
) + (count
-1)*entsize
;
129 List
*duplicate() const {
130 auto *dup
= (List
*)calloc(this->byteSize(), 1);
131 dup
->entsizeAndFlags
= this->entsizeAndFlags
;
132 dup
->count
= this->count
;
133 std::copy(begin(), end(), dup
->begin());
138 const iterator
begin() const {
139 return iterator(*static_cast<const List
*>(this), 0);
142 return iterator(*static_cast<const List
*>(this), 0);
144 const iterator
end() const {
145 return iterator(*static_cast<const List
*>(this), count
);
148 return iterator(*static_cast<const List
*>(this), count
);
153 uint32_t index
; // keeping track of this saves a divide in operator-
156 typedef std::random_access_iterator_tag iterator_category
;
157 typedef Element value_type
;
158 typedef ptrdiff_t difference_type
;
159 typedef Element
* pointer
;
160 typedef Element
& reference
;
164 iterator(const List
& list
, uint32_t start
= 0)
165 : entsize(list
.entsize())
167 , element(&list
.getOrEnd(start
))
170 const iterator
& operator += (ptrdiff_t delta
) {
171 element
= (Element
*)((uint8_t *)element
+ delta
*entsize
);
172 index
+= (int32_t)delta
;
175 const iterator
& operator -= (ptrdiff_t delta
) {
176 element
= (Element
*)((uint8_t *)element
- delta
*entsize
);
177 index
-= (int32_t)delta
;
180 const iterator
operator + (ptrdiff_t delta
) const {
181 return iterator(*this) += delta
;
183 const iterator
operator - (ptrdiff_t delta
) const {
184 return iterator(*this) -= delta
;
187 iterator
& operator ++ () { *this += 1; return *this; }
188 iterator
& operator -- () { *this -= 1; return *this; }
189 iterator
operator ++ (int) {
190 iterator
result(*this); *this += 1; return result
;
192 iterator
operator -- (int) {
193 iterator
result(*this); *this -= 1; return result
;
196 ptrdiff_t operator - (const iterator
& rhs
) const {
197 return (ptrdiff_t)this->index
- (ptrdiff_t)rhs
.index
;
200 Element
& operator * () const { return *element
; }
201 Element
* operator -> () const { return element
; }
203 operator Element
& () const { return *element
; }
205 bool operator == (const iterator
& rhs
) const {
206 return this->element
== rhs
.element
;
208 bool operator != (const iterator
& rhs
) const {
209 return this->element
!= rhs
.element
;
212 bool operator < (const iterator
& rhs
) const {
213 return this->element
< rhs
.element
;
215 bool operator > (const iterator
& rhs
) const {
216 return this->element
> rhs
.element
;
227 struct SortBySELAddress
:
228 public std::binary_function
<const method_t
&,
229 const method_t
&, bool>
231 bool operator() (const method_t
& lhs
,
233 { return lhs
.name
< rhs
.name
; }
239 // *offset was originally 64-bit on some x86_64 platforms.
240 // We read and write only 32 bits of it.
241 // Some metadata provides all 64 bits. This is harmless for unsigned
242 // little-endian values.
243 // Some code uses all 64 bits. class_addIvar() over-allocates the
244 // offset for their benefit.
249 // alignment is sometimes -1; use alignment() instead
250 uint32_t alignment_raw
;
253 uint32_t alignment() const {
254 if (alignment_raw
== ~(uint32_t)0) return 1U << WORD_SHIFT
;
255 return 1 << alignment_raw
;
261 const char *attributes
;
264 // Two bits of entsize are used for fixup markers.
265 struct method_list_t
: entsize_list_tt
<method_t
, method_list_t
, 0x3> {
266 bool isFixedUp() const;
269 uint32_t indexOfMethod(const method_t
*meth
) const {
271 (uint32_t)(((uintptr_t)meth
- (uintptr_t)this) / entsize());
277 struct ivar_list_t
: entsize_list_tt
<ivar_t
, ivar_list_t
, 0> {
278 bool containsIvar(Ivar ivar
) const {
279 return (ivar
>= (Ivar
)&*begin() && ivar
< (Ivar
)&*end());
283 struct property_list_t
: entsize_list_tt
<property_t
, property_list_t
, 0> {
287 typedef uintptr_t protocol_ref_t
; // protocol_t *, but unremapped
289 // Values for protocol_t->flags
290 #define PROTOCOL_FIXED_UP_2 (1<<31) // must never be set by compiler
291 #define PROTOCOL_FIXED_UP_1 (1<<30) // must never be set by compiler
292 // Bits 0..15 are reserved for Swift's use.
294 #define PROTOCOL_FIXED_UP_MASK (PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2)
296 struct protocol_t
: objc_object
{
297 const char *mangledName
;
298 struct protocol_list_t
*protocols
;
299 method_list_t
*instanceMethods
;
300 method_list_t
*classMethods
;
301 method_list_t
*optionalInstanceMethods
;
302 method_list_t
*optionalClassMethods
;
303 property_list_t
*instanceProperties
;
304 uint32_t size
; // sizeof(protocol_t)
306 // Fields below this point are not always present on disk.
307 const char **_extendedMethodTypes
;
308 const char *_demangledName
;
309 property_list_t
*_classProperties
;
311 const char *demangledName();
313 const char *nameForLogging() {
314 return demangledName();
317 bool isFixedUp() const;
320 # define HAS_FIELD(f) (size >= offsetof(protocol_t, f) + sizeof(f))
322 bool hasExtendedMethodTypesField() const {
323 return HAS_FIELD(_extendedMethodTypes
);
325 bool hasDemangledNameField() const {
326 return HAS_FIELD(_demangledName
);
328 bool hasClassPropertiesField() const {
329 return HAS_FIELD(_classProperties
);
334 const char **extendedMethodTypes() const {
335 return hasExtendedMethodTypesField() ? _extendedMethodTypes
: nil
;
338 property_list_t
*classProperties() const {
339 return hasClassPropertiesField() ? _classProperties
: nil
;
343 struct protocol_list_t
{
344 // count is 64-bit by accident.
346 protocol_ref_t list
[0]; // variable-size
348 size_t byteSize() const {
349 return sizeof(*this) + count
*sizeof(list
[0]);
352 protocol_list_t
*duplicate() const {
353 return (protocol_list_t
*)memdup(this, this->byteSize());
356 typedef protocol_ref_t
* iterator
;
357 typedef const protocol_ref_t
* const_iterator
;
359 const_iterator
begin() const {
365 const_iterator
end() const {
373 struct locstamped_category_t
{
375 struct header_info
*hi
;
378 struct locstamped_category_list_t
{
383 locstamped_category_t list
[0];
387 // class_data_bits_t is the class_t->data field (class_rw_t pointer plus flags)
388 // The extra bits are optimized for the retain/release and alloc/dealloc paths.
390 // Values for class_ro_t->flags
391 // These are emitted by the compiler and are part of the ABI.
392 // Note: See CGObjCNonFragileABIMac::BuildClassRoTInitializer in clang
393 // class is a metaclass
394 #define RO_META (1<<0)
395 // class is a root class
396 #define RO_ROOT (1<<1)
397 // class has .cxx_construct/destruct implementations
398 #define RO_HAS_CXX_STRUCTORS (1<<2)
399 // class has +load implementation
400 // #define RO_HAS_LOAD_METHOD (1<<3)
401 // class has visibility=hidden set
402 #define RO_HIDDEN (1<<4)
403 // class has attribute(objc_exception): OBJC_EHTYPE_$_ThisClass is non-weak
404 #define RO_EXCEPTION (1<<5)
405 // this bit is available for reassignment
406 // #define RO_REUSE_ME (1<<6)
407 // class compiled with ARC
408 #define RO_IS_ARC (1<<7)
409 // class has .cxx_destruct but no .cxx_construct (with RO_HAS_CXX_STRUCTORS)
410 #define RO_HAS_CXX_DTOR_ONLY (1<<8)
411 // class is not ARC but has ARC-style weak ivar layout
412 #define RO_HAS_WEAK_WITHOUT_ARC (1<<9)
414 // class is in an unloadable bundle - must never be set by compiler
415 #define RO_FROM_BUNDLE (1<<29)
416 // class is unrealized future class - must never be set by compiler
417 #define RO_FUTURE (1<<30)
418 // class is realized - must never be set by compiler
419 #define RO_REALIZED (1<<31)
421 // Values for class_rw_t->flags
422 // These are not emitted by the compiler and are never used in class_ro_t.
423 // Their presence should be considered in future ABI versions.
424 // class_t->data is class_rw_t, not class_ro_t
425 #define RW_REALIZED (1<<31)
426 // class is unresolved future class
427 #define RW_FUTURE (1<<30)
428 // class is initialized
429 #define RW_INITIALIZED (1<<29)
430 // class is initializing
431 #define RW_INITIALIZING (1<<28)
432 // class_rw_t->ro is heap copy of class_ro_t
433 #define RW_COPIED_RO (1<<27)
434 // class allocated but not yet registered
435 #define RW_CONSTRUCTING (1<<26)
436 // class allocated and registered
437 #define RW_CONSTRUCTED (1<<25)
438 // available for use; was RW_FINALIZE_ON_MAIN_THREAD
439 // #define RW_24 (1<<24)
440 // class +load has been called
441 #define RW_LOADED (1<<23)
442 #if !SUPPORT_NONPOINTER_ISA
443 // class instances may have associative references
444 #define RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS (1<<22)
446 // class has instance-specific GC layout
447 #define RW_HAS_INSTANCE_SPECIFIC_LAYOUT (1 << 21)
449 // #define RW_20 (1<<20)
450 // class has started realizing but not yet completed it
451 #define RW_REALIZING (1<<19)
453 // NOTE: MORE RW_ FLAGS DEFINED BELOW
456 // Values for class_rw_t->flags or class_t->bits
457 // These flags are optimized for retain/release and alloc/dealloc
458 // 64-bit stores more of them in class_t->bits to reduce pointer indirection.
462 // class or superclass has .cxx_construct implementation
463 #define RW_HAS_CXX_CTOR (1<<18)
464 // class or superclass has .cxx_destruct implementation
465 #define RW_HAS_CXX_DTOR (1<<17)
466 // class or superclass has default alloc/allocWithZone: implementation
467 // Note this is is stored in the metaclass.
468 #define RW_HAS_DEFAULT_AWZ (1<<16)
469 // class's instances requires raw isa
470 #if SUPPORT_NONPOINTER_ISA
471 #define RW_REQUIRES_RAW_ISA (1<<15)
473 // class or superclass has default retain/release/autorelease/retainCount/
474 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
475 #define RW_HAS_DEFAULT_RR (1<<14)
477 // class is a Swift class from the pre-stable Swift ABI
478 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
479 // class is a Swift class from the stable Swift ABI
480 #define FAST_IS_SWIFT_STABLE (1UL<<1)
482 #define FAST_DATA_MASK 0xfffffffcUL
485 // Leaks-compatible version that steals low bits only.
487 // class or superclass has .cxx_construct implementation
488 #define RW_HAS_CXX_CTOR (1<<18)
489 // class or superclass has .cxx_destruct implementation
490 #define RW_HAS_CXX_DTOR (1<<17)
491 // class or superclass has default alloc/allocWithZone: implementation
492 // Note this is is stored in the metaclass.
493 #define RW_HAS_DEFAULT_AWZ (1<<16)
494 // class's instances requires raw isa
495 #define RW_REQUIRES_RAW_ISA (1<<15)
497 // class is a Swift class from the pre-stable Swift ABI
498 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
499 // class is a Swift class from the stable Swift ABI
500 #define FAST_IS_SWIFT_STABLE (1UL<<1)
501 // class or superclass has default retain/release/autorelease/retainCount/
502 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
503 #define FAST_HAS_DEFAULT_RR (1UL<<2)
505 #define FAST_DATA_MASK 0x00007ffffffffff8UL
508 // Leaks-incompatible version that steals lots of bits.
510 // class is a Swift class from the pre-stable Swift ABI
511 #define FAST_IS_SWIFT_LEGACY (1UL<<0)
512 // class is a Swift class from the stable Swift ABI
513 #define FAST_IS_SWIFT_STABLE (1UL<<1)
514 // summary bit for fast alloc path: !hasCxxCtor and
515 // !instancesRequireRawIsa and instanceSize fits into shiftedSize
516 #define FAST_ALLOC (1UL<<2)
518 #define FAST_DATA_MASK 0x00007ffffffffff8UL
519 // class or superclass has .cxx_construct implementation
520 #define FAST_HAS_CXX_CTOR (1UL<<47)
521 // class or superclass has default alloc/allocWithZone: implementation
522 // Note this is is stored in the metaclass.
523 #define FAST_HAS_DEFAULT_AWZ (1UL<<48)
524 // class or superclass has default retain/release/autorelease/retainCount/
525 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
526 #define FAST_HAS_DEFAULT_RR (1UL<<49)
527 // class's instances requires raw isa
528 // This bit is aligned with isa_t->hasCxxDtor to save an instruction.
529 #define FAST_REQUIRES_RAW_ISA (1UL<<50)
530 // class or superclass has .cxx_destruct implementation
531 #define FAST_HAS_CXX_DTOR (1UL<<51)
532 // instance size in units of 16 bytes
533 // or 0 if the instance size is too big in this field
534 // This field must be LAST
535 #define FAST_SHIFTED_SIZE_SHIFT 52
538 // FAST_HAS_CXX_CTOR is set
539 // FAST_REQUIRES_RAW_ISA is not set
540 // FAST_SHIFTED_SIZE is not zero
541 // FAST_ALLOC does NOT check FAST_HAS_DEFAULT_AWZ because that
542 // bit is stored on the metaclass.
543 #define FAST_ALLOC_MASK (FAST_HAS_CXX_CTOR | FAST_REQUIRES_RAW_ISA)
544 #define FAST_ALLOC_VALUE (0)
548 // The Swift ABI requires that these bits be defined like this on all platforms.
549 static_assert(FAST_IS_SWIFT_LEGACY
== 1, "resistance is futile");
550 static_assert(FAST_IS_SWIFT_STABLE
== 2, "resistance is futile");
555 uint32_t instanceStart
;
556 uint32_t instanceSize
;
561 const uint8_t * ivarLayout
;
564 method_list_t
* baseMethodList
;
565 protocol_list_t
* baseProtocols
;
566 const ivar_list_t
* ivars
;
568 const uint8_t * weakIvarLayout
;
569 property_list_t
*baseProperties
;
571 method_list_t
*baseMethods() const {
572 return baseMethodList
;
577 /***********************************************************************
578 * list_array_tt<Element, List>
579 * Generic implementation for metadata that can be augmented by categories.
581 * Element is the underlying metadata type (e.g. method_t)
582 * List is the metadata's list type (e.g. method_list_t)
584 * A list_array_tt has one of three values:
586 * - a pointer to a single list
587 * - an array of pointers to lists
589 * countLists/beginLists/endLists iterate the metadata lists
590 * count/begin/end iterate the underlying metadata elements
591 **********************************************************************/
592 template <typename Element
, typename List
>
593 class list_array_tt
{
598 static size_t byteSize(uint32_t count
) {
599 return sizeof(array_t
) + count
*sizeof(lists
[0]);
602 return byteSize(count
);
610 typename
List::iterator m
, mEnd
;
613 iterator(List
**begin
, List
**end
)
614 : lists(begin
), listsEnd(end
)
617 m
= (*begin
)->begin();
618 mEnd
= (*begin
)->end();
622 const Element
& operator * () const {
625 Element
& operator * () {
629 bool operator != (const iterator
& rhs
) const {
630 if (lists
!= rhs
.lists
) return true;
631 if (lists
== listsEnd
) return false; // m is undefined
632 if (m
!= rhs
.m
) return true;
636 const iterator
& operator ++ () {
640 assert(lists
!= listsEnd
);
642 if (lists
!= listsEnd
) {
643 m
= (*lists
)->begin();
644 mEnd
= (*lists
)->end();
654 uintptr_t arrayAndFlag
;
657 bool hasArray() const {
658 return arrayAndFlag
& 1;
662 return (array_t
*)(arrayAndFlag
& ~1);
665 void setArray(array_t
*array
) {
666 arrayAndFlag
= (uintptr_t)array
| 1;
673 for (auto lists
= beginLists(), end
= endLists();
677 result
+= (*lists
)->count
;
683 return iterator(beginLists(), endLists());
687 List
**e
= endLists();
688 return iterator(e
, e
);
692 uint32_t countLists() {
694 return array()->count
;
702 List
** beginLists() {
704 return array()->lists
;
712 return array()->lists
+ array()->count
;
720 void attachLists(List
* const * addedLists
, uint32_t addedCount
) {
721 if (addedCount
== 0) return;
724 // many lists -> many lists
725 uint32_t oldCount
= array()->count
;
726 uint32_t newCount
= oldCount
+ addedCount
;
727 setArray((array_t
*)realloc(array(), array_t::byteSize(newCount
)));
728 array()->count
= newCount
;
729 memmove(array()->lists
+ addedCount
, array()->lists
,
730 oldCount
* sizeof(array()->lists
[0]));
731 memcpy(array()->lists
, addedLists
,
732 addedCount
* sizeof(array()->lists
[0]));
734 else if (!list
&& addedCount
== 1) {
736 list
= addedLists
[0];
739 // 1 list -> many lists
740 List
* oldList
= list
;
741 uint32_t oldCount
= oldList
? 1 : 0;
742 uint32_t newCount
= oldCount
+ addedCount
;
743 setArray((array_t
*)malloc(array_t::byteSize(newCount
)));
744 array()->count
= newCount
;
745 if (oldList
) array()->lists
[addedCount
] = oldList
;
746 memcpy(array()->lists
, addedLists
,
747 addedCount
* sizeof(array()->lists
[0]));
753 for (uint32_t i
= 0; i
< array()->count
; i
++) {
754 try_free(array()->lists
[i
]);
763 template<typename Result
>
768 array_t
*a
= array();
769 result
.setArray((array_t
*)memdup(a
, a
->byteSize()));
770 for (uint32_t i
= 0; i
< a
->count
; i
++) {
771 result
.array()->lists
[i
] = a
->lists
[i
]->duplicate();
774 result
.list
= list
->duplicate();
784 class method_array_t
:
785 public list_array_tt
<method_t
, method_list_t
>
787 typedef list_array_tt
<method_t
, method_list_t
> Super
;
790 method_list_t
**beginCategoryMethodLists() {
794 method_list_t
**endCategoryMethodLists(Class cls
);
796 method_array_t
duplicate() {
797 return Super::duplicate
<method_array_t
>();
802 class property_array_t
:
803 public list_array_tt
<property_t
, property_list_t
>
805 typedef list_array_tt
<property_t
, property_list_t
> Super
;
808 property_array_t
duplicate() {
809 return Super::duplicate
<property_array_t
>();
814 class protocol_array_t
:
815 public list_array_tt
<protocol_ref_t
, protocol_list_t
>
817 typedef list_array_tt
<protocol_ref_t
, protocol_list_t
> Super
;
820 protocol_array_t
duplicate() {
821 return Super::duplicate
<protocol_array_t
>();
827 // Be warned that Symbolication knows the layout of this structure.
831 const class_ro_t
*ro
;
833 method_array_t methods
;
834 property_array_t properties
;
835 protocol_array_t protocols
;
838 Class nextSiblingClass
;
842 #if SUPPORT_INDEXED_ISA
846 void setFlags(uint32_t set
)
848 OSAtomicOr32Barrier(set
, &flags
);
851 void clearFlags(uint32_t clear
)
853 OSAtomicXor32Barrier(clear
, &flags
);
856 // set and clear must not overlap
857 void changeFlags(uint32_t set
, uint32_t clear
)
859 assert((set
& clear
) == 0);
864 newf
= (oldf
| set
) & ~clear
;
865 } while (!OSAtomicCompareAndSwap32Barrier(oldf
, newf
, (volatile int32_t *)&flags
));
870 struct class_data_bits_t
{
872 // Values are the FAST_ flags above.
875 bool getBit(uintptr_t bit
)
881 static uintptr_t updateFastAlloc(uintptr_t oldBits
, uintptr_t change
)
883 if (change
& FAST_ALLOC_MASK
) {
884 if (((oldBits
& FAST_ALLOC_MASK
) == FAST_ALLOC_VALUE
) &&
885 ((oldBits
>> FAST_SHIFTED_SIZE_SHIFT
) != 0))
887 oldBits
|= FAST_ALLOC
;
889 oldBits
&= ~FAST_ALLOC
;
895 static uintptr_t updateFastAlloc(uintptr_t oldBits
, uintptr_t change
) {
900 void setBits(uintptr_t set
)
905 oldBits
= LoadExclusive(&bits
);
906 newBits
= updateFastAlloc(oldBits
| set
, set
);
907 } while (!StoreReleaseExclusive(&bits
, oldBits
, newBits
));
910 void clearBits(uintptr_t clear
)
915 oldBits
= LoadExclusive(&bits
);
916 newBits
= updateFastAlloc(oldBits
& ~clear
, clear
);
917 } while (!StoreReleaseExclusive(&bits
, oldBits
, newBits
));
923 return (class_rw_t
*)(bits
& FAST_DATA_MASK
);
925 void setData(class_rw_t
*newData
)
927 assert(!data() || (newData
->flags
& (RW_REALIZING
| RW_FUTURE
)));
928 // Set during realization or construction only. No locking needed.
929 // Use a store-release fence because there may be concurrent
930 // readers of data and data's contents.
931 uintptr_t newBits
= (bits
& ~FAST_DATA_MASK
) | (uintptr_t)newData
;
932 atomic_thread_fence(memory_order_release
);
936 #if FAST_HAS_DEFAULT_RR
937 bool hasDefaultRR() {
938 return getBit(FAST_HAS_DEFAULT_RR
);
940 void setHasDefaultRR() {
941 setBits(FAST_HAS_DEFAULT_RR
);
943 void setHasCustomRR() {
944 clearBits(FAST_HAS_DEFAULT_RR
);
947 bool hasDefaultRR() {
948 return data()->flags
& RW_HAS_DEFAULT_RR
;
950 void setHasDefaultRR() {
951 data()->setFlags(RW_HAS_DEFAULT_RR
);
953 void setHasCustomRR() {
954 data()->clearFlags(RW_HAS_DEFAULT_RR
);
958 #if FAST_HAS_DEFAULT_AWZ
959 bool hasDefaultAWZ() {
960 return getBit(FAST_HAS_DEFAULT_AWZ
);
962 void setHasDefaultAWZ() {
963 setBits(FAST_HAS_DEFAULT_AWZ
);
965 void setHasCustomAWZ() {
966 clearBits(FAST_HAS_DEFAULT_AWZ
);
969 bool hasDefaultAWZ() {
970 return data()->flags
& RW_HAS_DEFAULT_AWZ
;
972 void setHasDefaultAWZ() {
973 data()->setFlags(RW_HAS_DEFAULT_AWZ
);
975 void setHasCustomAWZ() {
976 data()->clearFlags(RW_HAS_DEFAULT_AWZ
);
980 #if FAST_HAS_CXX_CTOR
982 return getBit(FAST_HAS_CXX_CTOR
);
984 void setHasCxxCtor() {
985 setBits(FAST_HAS_CXX_CTOR
);
989 return data()->flags
& RW_HAS_CXX_CTOR
;
991 void setHasCxxCtor() {
992 data()->setFlags(RW_HAS_CXX_CTOR
);
996 #if FAST_HAS_CXX_DTOR
998 return getBit(FAST_HAS_CXX_DTOR
);
1000 void setHasCxxDtor() {
1001 setBits(FAST_HAS_CXX_DTOR
);
1005 return data()->flags
& RW_HAS_CXX_DTOR
;
1007 void setHasCxxDtor() {
1008 data()->setFlags(RW_HAS_CXX_DTOR
);
1012 #if FAST_REQUIRES_RAW_ISA
1013 bool instancesRequireRawIsa() {
1014 return getBit(FAST_REQUIRES_RAW_ISA
);
1016 void setInstancesRequireRawIsa() {
1017 setBits(FAST_REQUIRES_RAW_ISA
);
1019 #elif SUPPORT_NONPOINTER_ISA
1020 bool instancesRequireRawIsa() {
1021 return data()->flags
& RW_REQUIRES_RAW_ISA
;
1023 void setInstancesRequireRawIsa() {
1024 data()->setFlags(RW_REQUIRES_RAW_ISA
);
1027 bool instancesRequireRawIsa() {
1030 void setInstancesRequireRawIsa() {
1036 size_t fastInstanceSize()
1038 assert(bits
& FAST_ALLOC
);
1039 return (bits
>> FAST_SHIFTED_SIZE_SHIFT
) * 16;
1041 void setFastInstanceSize(size_t newSize
)
1043 // Set during realization or construction only. No locking needed.
1044 assert(data()->flags
& RW_REALIZING
);
1046 // Round up to 16-byte boundary, then divide to get 16-byte units
1047 newSize
= ((newSize
+ 15) & ~15) / 16;
1049 uintptr_t newBits
= newSize
<< FAST_SHIFTED_SIZE_SHIFT
;
1050 if ((newBits
>> FAST_SHIFTED_SIZE_SHIFT
) == newSize
) {
1051 int shift
= WORD_BITS
- FAST_SHIFTED_SIZE_SHIFT
;
1052 uintptr_t oldBits
= (bits
<< shift
) >> shift
;
1053 if ((oldBits
& FAST_ALLOC_MASK
) == FAST_ALLOC_VALUE
) {
1054 newBits
|= FAST_ALLOC
;
1056 bits
= oldBits
| newBits
;
1060 bool canAllocFast() {
1061 return bits
& FAST_ALLOC
;
1064 size_t fastInstanceSize() {
1067 void setFastInstanceSize(size_t) {
1070 bool canAllocFast() {
1075 void setClassArrayIndex(unsigned Idx
) {
1076 #if SUPPORT_INDEXED_ISA
1077 // 0 is unused as then we can rely on zero-initialisation from calloc.
1079 data()->index
= Idx
;
1083 unsigned classArrayIndex() {
1084 #if SUPPORT_INDEXED_ISA
1085 return data()->index
;
1092 return isSwiftStable() || isSwiftLegacy();
1095 bool isSwiftStable() {
1096 return getBit(FAST_IS_SWIFT_STABLE
);
1098 void setIsSwiftStable() {
1099 setBits(FAST_IS_SWIFT_STABLE
);
1102 bool isSwiftLegacy() {
1103 return getBit(FAST_IS_SWIFT_LEGACY
);
1105 void setIsSwiftLegacy() {
1106 setBits(FAST_IS_SWIFT_LEGACY
);
1111 struct objc_class
: objc_object
{
1114 cache_t cache
; // formerly cache pointer and vtable
1115 class_data_bits_t bits
; // class_rw_t * plus custom rr/alloc flags
1117 class_rw_t
*data() {
1120 void setData(class_rw_t
*newData
) {
1121 bits
.setData(newData
);
1124 void setInfo(uint32_t set
) {
1125 assert(isFuture() || isRealized());
1126 data()->setFlags(set
);
1129 void clearInfo(uint32_t clear
) {
1130 assert(isFuture() || isRealized());
1131 data()->clearFlags(clear
);
1134 // set and clear must not overlap
1135 void changeInfo(uint32_t set
, uint32_t clear
) {
1136 assert(isFuture() || isRealized());
1137 assert((set
& clear
) == 0);
1138 data()->changeFlags(set
, clear
);
1141 bool hasCustomRR() {
1142 return ! bits
.hasDefaultRR();
1144 void setHasDefaultRR() {
1145 assert(isInitializing());
1146 bits
.setHasDefaultRR();
1148 void setHasCustomRR(bool inherited
= false);
1149 void printCustomRR(bool inherited
);
1151 bool hasCustomAWZ() {
1152 return ! bits
.hasDefaultAWZ();
1154 void setHasDefaultAWZ() {
1155 assert(isInitializing());
1156 bits
.setHasDefaultAWZ();
1158 void setHasCustomAWZ(bool inherited
= false);
1159 void printCustomAWZ(bool inherited
);
1161 bool instancesRequireRawIsa() {
1162 return bits
.instancesRequireRawIsa();
1164 void setInstancesRequireRawIsa(bool inherited
= false);
1165 void printInstancesRequireRawIsa(bool inherited
);
1167 bool canAllocNonpointer() {
1168 assert(!isFuture());
1169 return !instancesRequireRawIsa();
1171 bool canAllocFast() {
1172 assert(!isFuture());
1173 return bits
.canAllocFast();
1178 // addSubclass() propagates this flag from the superclass.
1179 assert(isRealized());
1180 return bits
.hasCxxCtor();
1182 void setHasCxxCtor() {
1183 bits
.setHasCxxCtor();
1187 // addSubclass() propagates this flag from the superclass.
1188 assert(isRealized());
1189 return bits
.hasCxxDtor();
1191 void setHasCxxDtor() {
1192 bits
.setHasCxxDtor();
1196 bool isSwiftStable() {
1197 return bits
.isSwiftStable();
1200 bool isSwiftLegacy() {
1201 return bits
.isSwiftLegacy();
1205 return bits
.isAnySwift();
1209 // Return YES if the class's ivars are managed by ARC,
1210 // or the class is MRC but has ARC-style weak ivars.
1211 bool hasAutomaticIvars() {
1212 return data()->ro
->flags
& (RO_IS_ARC
| RO_HAS_WEAK_WITHOUT_ARC
);
1215 // Return YES if the class's ivars are managed by ARC.
1217 return data()->ro
->flags
& RO_IS_ARC
;
1221 #if SUPPORT_NONPOINTER_ISA
1222 // Tracked in non-pointer isas; not tracked otherwise
1224 bool instancesHaveAssociatedObjects() {
1225 // this may be an unrealized future class in the CF-bridged case
1226 assert(isFuture() || isRealized());
1227 return data()->flags
& RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS
;
1230 void setInstancesHaveAssociatedObjects() {
1231 // this may be an unrealized future class in the CF-bridged case
1232 assert(isFuture() || isRealized());
1233 setInfo(RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS
);
1237 bool shouldGrowCache() {
1241 void setShouldGrowCache(bool) {
1242 // fixme good or bad for memory use?
1245 bool isInitializing() {
1246 return getMeta()->data()->flags
& RW_INITIALIZING
;
1249 void setInitializing() {
1250 assert(!isMetaClass());
1251 ISA()->setInfo(RW_INITIALIZING
);
1254 bool isInitialized() {
1255 return getMeta()->data()->flags
& RW_INITIALIZED
;
1258 void setInitialized();
1261 assert(isRealized());
1262 return true; // any class registered for +load is definitely loadable
1265 IMP
getLoadMethod();
1267 // Locking: To prevent concurrent realization, hold runtimeLock.
1269 return data()->flags
& RW_REALIZED
;
1272 // Returns true if this is an unrealized future class.
1273 // Locking: To prevent concurrent realization, hold runtimeLock.
1275 return data()->flags
& RW_FUTURE
;
1278 bool isMetaClass() {
1280 assert(isRealized());
1281 return data()->ro
->flags
& RO_META
;
1284 // NOT identical to this->ISA when this is a metaclass
1286 if (isMetaClass()) return (Class
)this;
1287 else return this->ISA();
1290 bool isRootClass() {
1291 return superclass
== nil
;
1293 bool isRootMetaclass() {
1294 return ISA() == (Class
)this;
1297 const char *mangledName() {
1298 // fixme can't assert locks here
1301 if (isRealized() || isFuture()) {
1302 return data()->ro
->name
;
1304 return ((const class_ro_t
*)data())->name
;
1308 const char *demangledName(bool realize
= false);
1309 const char *nameForLogging();
1311 // May be unaligned depending on class's ivars.
1312 uint32_t unalignedInstanceStart() {
1313 assert(isRealized());
1314 return data()->ro
->instanceStart
;
1317 // Class's instance start rounded up to a pointer-size boundary.
1318 // This is used for ARC layout bitmaps.
1319 uint32_t alignedInstanceStart() {
1320 return word_align(unalignedInstanceStart());
1323 // May be unaligned depending on class's ivars.
1324 uint32_t unalignedInstanceSize() {
1325 assert(isRealized());
1326 return data()->ro
->instanceSize
;
1329 // Class's ivar size rounded up to a pointer-size boundary.
1330 uint32_t alignedInstanceSize() {
1331 return word_align(unalignedInstanceSize());
1334 size_t instanceSize(size_t extraBytes
) {
1335 size_t size
= alignedInstanceSize() + extraBytes
;
1336 // CF requires all objects be at least 16 bytes.
1337 if (size
< 16) size
= 16;
1341 void setInstanceSize(uint32_t newSize
) {
1342 assert(isRealized());
1343 if (newSize
!= data()->ro
->instanceSize
) {
1344 assert(data()->flags
& RW_COPIED_RO
);
1345 *const_cast<uint32_t *>(&data()->ro
->instanceSize
) = newSize
;
1347 bits
.setFastInstanceSize(newSize
);
1350 void chooseClassArrayIndex();
1352 void setClassArrayIndex(unsigned Idx
) {
1353 bits
.setClassArrayIndex(Idx
);
1356 unsigned classArrayIndex() {
1357 return bits
.classArrayIndex();
1363 struct swift_class_t
: objc_class
{
1365 uint32_t instanceAddressOffset
;
1366 uint32_t instanceSize
;
1367 uint16_t instanceAlignMask
;
1371 uint32_t classAddressOffset
;
1375 void *baseAddress() {
1376 return (void *)((uint8_t *)this - classAddressOffset
);
1384 struct method_list_t
*instanceMethods
;
1385 struct method_list_t
*classMethods
;
1386 struct protocol_list_t
*protocols
;
1387 struct property_list_t
*instanceProperties
;
1388 // Fields below this point are not always present on disk.
1389 struct property_list_t
*_classProperties
;
1391 method_list_t
*methodsForMeta(bool isMeta
) {
1392 if (isMeta
) return classMethods
;
1393 else return instanceMethods
;
1396 property_list_t
*propertiesForMeta(bool isMeta
, struct header_info
*hi
);
1399 struct objc_super2
{
1401 Class current_class
;
1404 struct message_ref_t
{
1410 extern Method
protocol_getMethod(protocol_t
*p
, SEL sel
, bool isRequiredMethod
, bool isInstanceMethod
, bool recursive
);
1413 foreach_realized_class_and_subclass_2(Class top
, unsigned& count
,
1414 std::function
<bool (Class
)> code
)
1416 // runtimeLock.assertLocked();
1421 _objc_fatal("Memory corruption in class list.");
1423 if (!code(cls
)) break;
1425 if (cls
->data()->firstSubclass
) {
1426 cls
= cls
->data()->firstSubclass
;
1428 while (!cls
->data()->nextSiblingClass
&& cls
!= top
) {
1429 cls
= cls
->superclass
;
1431 _objc_fatal("Memory corruption in class list.");
1434 if (cls
== top
) break;
1435 cls
= cls
->data()->nextSiblingClass
;
1440 extern Class
firstRealizedClass();
1441 extern unsigned int unreasonableClassCount();
1443 // Enumerates a class and all of its realized subclasses.
1445 foreach_realized_class_and_subclass(Class top
,
1446 std::function
<void (Class
)> code
)
1448 unsigned int count
= unreasonableClassCount();
1450 foreach_realized_class_and_subclass_2(top
, count
,
1451 [&code
](Class cls
) -> bool
1458 // Enumerates all realized classes and metaclasses.
1460 foreach_realized_class_and_metaclass(std::function
<void (Class
)> code
)
1462 unsigned int count
= unreasonableClassCount();
1464 for (Class top
= firstRealizedClass();
1466 top
= top
->data()->nextSiblingClass
)
1468 foreach_realized_class_and_subclass_2(top
, count
,
1469 [&code
](Class cls
) -> bool