2 * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
25 /***********************************************************************
26 * Inlineable parts of NSObject / objc_object implementation
27 **********************************************************************/
29 #ifndef _OBJC_OBJCOBJECT_H_
30 #define _OBJC_OBJCOBJECT_H_
32 #include "objc-private.h"
34 static ALWAYS_INLINE
bool fastAutoreleaseForReturn(id obj
);
35 static ALWAYS_INLINE
bool fastRetainFromReturn(id obj
);
38 #if SUPPORT_TAGGED_POINTERS
41 #define TAG_SLOT_MASK 0xf
43 #if SUPPORT_MSB_TAGGED_POINTERS
44 # define TAG_MASK (1ULL<<63)
45 # define TAG_SLOT_SHIFT 60
46 # define TAG_PAYLOAD_LSHIFT 4
47 # define TAG_PAYLOAD_RSHIFT 4
50 # define TAG_SLOT_SHIFT 0
51 # define TAG_PAYLOAD_LSHIFT 0
52 # define TAG_PAYLOAD_RSHIFT 4
55 extern "C" { extern Class objc_debug_taggedpointer_classes
[TAG_COUNT
*2]; }
56 #define objc_tag_classes objc_debug_taggedpointer_classes
62 objc_object::isClass()
64 if (isTaggedPointer()) return false;
65 return ISA()->isMetaClass();
68 #if SUPPORT_NONPOINTER_ISA
70 # if !SUPPORT_TAGGED_POINTERS
78 assert(!isTaggedPointer());
79 return (Class
)(isa
.bits
& ISA_MASK
);
84 objc_object::hasIndexedIsa()
92 if (isTaggedPointer()) {
93 uintptr_t slot
= ((uintptr_t)this >> TAG_SLOT_SHIFT
) & TAG_SLOT_MASK
;
94 return objc_tag_classes
[slot
];
101 objc_object::initIsa(Class cls
)
103 initIsa(cls
, false, false);
107 objc_object::initClassIsa(Class cls
)
109 if (DisableIndexedIsa
) {
110 initIsa(cls
, false, false);
112 initIsa(cls
, true, false);
117 objc_object::initProtocolIsa(Class cls
)
119 return initClassIsa(cls
);
123 objc_object::initInstanceIsa(Class cls
, bool hasCxxDtor
)
126 assert(!cls
->requiresRawIsa());
127 assert(hasCxxDtor
== cls
->hasCxxDtor());
129 initIsa(cls
, true, hasCxxDtor
);
133 objc_object::initIsa(Class cls
, bool indexed
, bool hasCxxDtor
)
135 assert(!isTaggedPointer());
140 assert(!DisableIndexedIsa
);
141 isa
.bits
= ISA_MAGIC_VALUE
;
142 // isa.magic is part of ISA_MAGIC_VALUE
143 // isa.indexed is part of ISA_MAGIC_VALUE
144 isa
.has_cxx_dtor
= hasCxxDtor
;
145 isa
.shiftcls
= (uintptr_t)cls
>> 3;
151 objc_object::changeIsa(Class newCls
)
153 assert(!isTaggedPointer());
158 bool sideTableLocked
= false;
159 bool transcribeToSideTable
= false;
162 transcribeToSideTable
= false;
163 oldisa
= LoadExclusive(&isa
.bits
);
164 if ((oldisa
.bits
== 0 || oldisa
.indexed
) &&
165 newCls
->canAllocIndexed())
168 // indexed -> indexed
169 if (oldisa
.bits
== 0) newisa
.bits
= ISA_MAGIC_VALUE
;
170 else newisa
= oldisa
;
171 // isa.magic is part of ISA_MAGIC_VALUE
172 // isa.indexed is part of ISA_MAGIC_VALUE
173 newisa
.has_cxx_dtor
= newCls
->hasCxxDtor();
174 newisa
.shiftcls
= (uintptr_t)newCls
>> 3;
176 else if (oldisa
.indexed
) {
177 // indexed -> not indexed
178 // Need to copy retain count et al to side table.
179 // Acquire side table lock before setting isa to
180 // prevent races such as concurrent -release.
181 if (!sideTableLocked
) sidetable_lock();
182 sideTableLocked
= true;
183 transcribeToSideTable
= true;
187 // not indexed -> not indexed
190 } while (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
));
192 if (transcribeToSideTable
) {
193 // Copy oldisa's retain count et al to side table.
194 // oldisa.weakly_referenced: nothing to do
195 // oldisa.has_assoc: nothing to do
196 // oldisa.has_cxx_dtor: nothing to do
197 sidetable_moveExtraRC_nolock(oldisa
.extra_rc
,
199 oldisa
.weakly_referenced
);
202 if (sideTableLocked
) sidetable_unlock();
205 if (oldisa
.indexed
) oldCls
= (Class
)((uintptr_t)oldisa
.shiftcls
<< 3);
206 else oldCls
= oldisa
.cls
;
213 objc_object::isTaggedPointer()
215 return ((uintptr_t)this & TAG_MASK
);
220 objc_object::hasAssociatedObjects()
222 if (isTaggedPointer()) return true;
223 if (isa
.indexed
) return isa
.has_assoc
;
229 objc_object::setHasAssociatedObjects()
231 if (isTaggedPointer()) return;
234 isa_t oldisa
= LoadExclusive(&isa
.bits
);
235 isa_t newisa
= oldisa
;
236 if (!newisa
.indexed
) return;
237 if (newisa
.has_assoc
) return;
238 newisa
.has_assoc
= true;
239 if (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
)) goto retry
;
244 objc_object::isWeaklyReferenced()
246 assert(!isTaggedPointer());
247 if (isa
.indexed
) return isa
.weakly_referenced
;
248 else return sidetable_isWeaklyReferenced();
253 objc_object::setWeaklyReferenced_nolock()
256 isa_t oldisa
= LoadExclusive(&isa
.bits
);
257 isa_t newisa
= oldisa
;
258 if (!newisa
.indexed
) return sidetable_setWeaklyReferenced_nolock();
259 if (newisa
.weakly_referenced
) return;
260 newisa
.weakly_referenced
= true;
261 if (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
)) goto retry
;
266 objc_object::hasCxxDtor()
268 assert(!isTaggedPointer());
269 if (isa
.indexed
) return isa
.has_cxx_dtor
;
270 else return isa
.cls
->hasCxxDtor();
276 objc_object::rootIsDeallocating()
280 if (isTaggedPointer()) return false;
281 if (isa
.indexed
) return isa
.deallocating
;
282 return sidetable_isDeallocating();
287 objc_object::clearDeallocating()
290 sidetable_clearDeallocating();
292 else if (isa
.weakly_referenced
) {
293 clearDeallocating_weak();
296 assert(!sidetable_present());
301 objc_object::rootDealloc()
304 if (isTaggedPointer()) return;
307 !isa
.weakly_referenced
&&
311 assert(!sidetable_present());
315 object_dispose((id
)this);
320 // Equivalent to calling [this retain], with shortcuts if there is no override
322 objc_object::retain()
324 // UseGC is allowed here, but requires hasCustomRR.
325 assert(!UseGC
|| ISA()->hasCustomRR());
326 assert(!isTaggedPointer());
328 if (! ISA()->hasCustomRR()) {
332 return ((id(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_retain
);
336 // Base retain implementation, ignoring overrides.
337 // This does not check isa.fast_rr; if there is an RR override then
338 // it was already called and it chose to call [super retain].
340 // tryRetain=true is the -_tryRetain path.
341 // handleOverflow=false is the frameless fast path.
342 // handleOverflow=true is the framed slow path including overflow to side table
343 // The code is structured this way to prevent duplication.
346 objc_object::rootRetain()
348 return rootRetain(false, false);
352 objc_object::rootTryRetain()
354 return rootRetain(true, false) ? true : false;
358 objc_object::rootRetain(bool tryRetain
, bool handleOverflow
)
361 if (isTaggedPointer()) return (id
)this;
363 bool sideTableLocked
= false;
364 bool transcribeToSideTable
= false;
370 transcribeToSideTable
= false;
371 oldisa
= LoadExclusive(&isa
.bits
);
373 if (!newisa
.indexed
) goto unindexed
;
374 // don't check newisa.fast_rr; we already called any RR overrides
375 if (tryRetain
&& newisa
.deallocating
) goto tryfail
;
377 newisa
.bits
= addc(newisa
.bits
, RC_ONE
, 0, &carry
); // extra_rc++
380 // newisa.extra_rc++ overflowed
381 if (!handleOverflow
) return rootRetain_overflow(tryRetain
);
382 // Leave half of the retain counts inline and
383 // prepare to copy the other half to the side table.
384 if (!tryRetain
&& !sideTableLocked
) sidetable_lock();
385 sideTableLocked
= true;
386 transcribeToSideTable
= true;
387 newisa
.extra_rc
= RC_HALF
;
388 newisa
.has_sidetable_rc
= true;
390 } while (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
));
392 if (transcribeToSideTable
) {
393 // Copy the other half of the retain counts to the side table.
394 sidetable_addExtraRC_nolock(RC_HALF
);
397 if (!tryRetain
&& sideTableLocked
) sidetable_unlock();
401 if (!tryRetain
&& sideTableLocked
) sidetable_unlock();
405 if (!tryRetain
&& sideTableLocked
) sidetable_unlock();
406 if (tryRetain
) return sidetable_tryRetain() ? (id
)this : nil
;
407 else return sidetable_retain();
411 // Equivalent to calling [this release], with shortcuts if there is no override
413 objc_object::release()
415 // UseGC is allowed here, but requires hasCustomRR.
416 assert(!UseGC
|| ISA()->hasCustomRR());
417 assert(!isTaggedPointer());
419 if (! ISA()->hasCustomRR()) {
424 ((void(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_release
);
428 // Base release implementation, ignoring overrides.
429 // Does not call -dealloc.
430 // Returns true if the object should now be deallocated.
431 // This does not check isa.fast_rr; if there is an RR override then
432 // it was already called and it chose to call [super release].
434 // handleUnderflow=false is the frameless fast path.
435 // handleUnderflow=true is the framed slow path including side table borrow
436 // The code is structured this way to prevent duplication.
439 objc_object::rootRelease()
441 return rootRelease(true, false);
445 objc_object::rootReleaseShouldDealloc()
447 return rootRelease(false, false);
451 objc_object::rootRelease(bool performDealloc
, bool handleUnderflow
)
454 if (isTaggedPointer()) return false;
456 bool sideTableLocked
= false;
463 oldisa
= LoadExclusive(&isa
.bits
);
465 if (!newisa
.indexed
) goto unindexed
;
466 // don't check newisa.fast_rr; we already called any RR overrides
468 newisa
.bits
= subc(newisa
.bits
, RC_ONE
, 0, &carry
); // extra_rc--
469 if (carry
) goto underflow
;
470 } while (!StoreReleaseExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
));
472 if (sideTableLocked
) sidetable_unlock();
476 // newisa.extra_rc-- underflowed: borrow from side table or deallocate
478 // abandon newisa to undo the decrement
481 if (newisa
.has_sidetable_rc
) {
482 if (!handleUnderflow
) {
483 return rootRelease_underflow(performDealloc
);
485 // Add some retain counts inline and prepare
486 // to remove them from the side table.
487 if (!sideTableLocked
) sidetable_lock();
488 sideTableLocked
= true;
489 newisa
.extra_rc
= RC_HALF
- 1; // redo the decrement
490 if (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
)) goto retry
;
492 // Remove the retain counts from the side table.
493 bool zeroed
= sidetable_subExtraRC_nolock(RC_HALF
);
495 // Side table count is now zero. Clear the marker bit.
497 oldisa
= LoadExclusive(&isa
.bits
);
498 newisa
.has_sidetable_rc
= false;
499 } while (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
));
502 // Decrement successful after borrowing from side table.
503 // This decrement cannot be the deallocating decrement - the side
504 // table lock and has_sidetable_rc bit ensure that if everyone
505 // else tried to -release while we worked, the last one would block.
510 // Really deallocate.
512 if (sideTableLocked
) sidetable_unlock();
514 if (newisa
.deallocating
) {
515 return overrelease_error();
517 newisa
.deallocating
= true;
518 if (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
)) goto retry
;
519 __sync_synchronize();
520 if (performDealloc
) {
521 ((void(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_dealloc
);
526 if (sideTableLocked
) sidetable_unlock();
527 return sidetable_release(performDealloc
);
531 // Equivalent to [this autorelease], with shortcuts if there is no override
533 objc_object::autorelease()
535 // UseGC is allowed here, but requires hasCustomRR.
536 assert(!UseGC
|| ISA()->hasCustomRR());
538 if (isTaggedPointer()) return (id
)this;
539 if (! ISA()->hasCustomRR()) return rootAutorelease();
541 return ((id(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_autorelease
);
545 // Base autorelease implementation, ignoring overrides.
547 objc_object::rootAutorelease()
551 if (isTaggedPointer()) return (id
)this;
552 if (fastAutoreleaseForReturn((id
)this)) return (id
)this;
554 return rootAutorelease2();
559 objc_object::rootRetainCount()
562 if (isTaggedPointer()) return (uintptr_t)this;
565 isa_t bits
= LoadExclusive(&isa
.bits
);
567 uintptr_t rc
= 1 + bits
.extra_rc
;
568 if (bits
.has_sidetable_rc
) {
569 rc
+= sidetable_getExtraRC_nolock();
576 return sidetable_retainCount();
580 // SUPPORT_NONPOINTER_ISA
582 // not SUPPORT_NONPOINTER_ISA
588 assert(!isTaggedPointer());
594 objc_object::hasIndexedIsa()
601 objc_object::getIsa()
603 #if SUPPORT_TAGGED_POINTERS
604 if (isTaggedPointer()) {
605 uintptr_t slot
= ((uintptr_t)this >> TAG_SLOT_SHIFT
) & TAG_SLOT_MASK
;
606 return objc_tag_classes
[slot
];
614 objc_object::initIsa(Class cls
)
616 assert(!isTaggedPointer());
617 isa
= (uintptr_t)cls
;
622 objc_object::initClassIsa(Class cls
)
629 objc_object::initProtocolIsa(Class cls
)
636 objc_object::initInstanceIsa(Class cls
, bool)
643 objc_object::initIsa(Class cls
, bool, bool)
650 objc_object::changeIsa(Class cls
)
652 assert(!isTaggedPointer());
654 isa_t oldisa
, newisa
;
657 oldisa
= LoadExclusive(&isa
.bits
);
658 } while (!StoreExclusive(&isa
.bits
, oldisa
.bits
, newisa
.bits
));
660 if (oldisa
.cls
&& oldisa
.cls
->instancesHaveAssociatedObjects()) {
661 cls
->setInstancesHaveAssociatedObjects();
669 objc_object::isTaggedPointer()
671 #if SUPPORT_TAGGED_POINTERS
672 return ((uintptr_t)this & TAG_MASK
);
680 objc_object::hasAssociatedObjects()
684 return getIsa()->instancesHaveAssociatedObjects();
689 objc_object::setHasAssociatedObjects()
693 getIsa()->setInstancesHaveAssociatedObjects();
698 objc_object::isWeaklyReferenced()
700 assert(!isTaggedPointer());
703 return sidetable_isWeaklyReferenced();
708 objc_object::setWeaklyReferenced_nolock()
710 assert(!isTaggedPointer());
713 sidetable_setWeaklyReferenced_nolock();
718 objc_object::hasCxxDtor()
720 assert(!isTaggedPointer());
721 return isa
.cls
->hasCxxDtor();
726 objc_object::rootIsDeallocating()
730 if (isTaggedPointer()) return false;
731 return sidetable_isDeallocating();
736 objc_object::clearDeallocating()
738 sidetable_clearDeallocating();
743 objc_object::rootDealloc()
745 if (isTaggedPointer()) return;
746 object_dispose((id
)this);
750 // Equivalent to calling [this retain], with shortcuts if there is no override
752 objc_object::retain()
754 // UseGC is allowed here, but requires hasCustomRR.
755 assert(!UseGC
|| ISA()->hasCustomRR());
756 assert(!isTaggedPointer());
758 if (! ISA()->hasCustomRR()) {
759 return sidetable_retain();
762 return ((id(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_retain
);
766 // Base retain implementation, ignoring overrides.
767 // This does not check isa.fast_rr; if there is an RR override then
768 // it was already called and it chose to call [super retain].
770 objc_object::rootRetain()
774 if (isTaggedPointer()) return (id
)this;
775 return sidetable_retain();
779 // Equivalent to calling [this release], with shortcuts if there is no override
781 objc_object::release()
783 // UseGC is allowed here, but requires hasCustomRR.
784 assert(!UseGC
|| ISA()->hasCustomRR());
785 assert(!isTaggedPointer());
787 if (! ISA()->hasCustomRR()) {
792 ((void(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_release
);
796 // Base release implementation, ignoring overrides.
797 // Does not call -dealloc.
798 // Returns true if the object should now be deallocated.
799 // This does not check isa.fast_rr; if there is an RR override then
800 // it was already called and it chose to call [super release].
802 objc_object::rootRelease()
806 if (isTaggedPointer()) return false;
807 return sidetable_release(true);
811 objc_object::rootReleaseShouldDealloc()
813 if (isTaggedPointer()) return false;
814 return sidetable_release(false);
818 // Equivalent to [this autorelease], with shortcuts if there is no override
820 objc_object::autorelease()
822 // UseGC is allowed here, but requires hasCustomRR.
823 assert(!UseGC
|| ISA()->hasCustomRR());
825 if (isTaggedPointer()) return (id
)this;
826 if (! ISA()->hasCustomRR()) return rootAutorelease();
828 return ((id(*)(objc_object
*, SEL
))objc_msgSend
)(this, SEL_autorelease
);
832 // Base autorelease implementation, ignoring overrides.
834 objc_object::rootAutorelease()
838 if (isTaggedPointer()) return (id
)this;
839 if (fastAutoreleaseForReturn((id
)this)) return (id
)this;
841 return rootAutorelease2();
845 // Base tryRetain implementation, ignoring overrides.
846 // This does not check isa.fast_rr; if there is an RR override then
847 // it was already called and it chose to call [super _tryRetain].
849 objc_object::rootTryRetain()
853 if (isTaggedPointer()) return true;
854 return sidetable_tryRetain();
859 objc_object::rootRetainCount()
863 if (isTaggedPointer()) return (uintptr_t)this;
864 return sidetable_retainCount();
868 // not SUPPORT_NONPOINTER_ISA
872 #if SUPPORT_RETURN_AUTORELEASE
874 /***********************************************************************
875 Fast handling of returned autoreleased values.
876 The caller and callee cooperate to keep the returned object
877 out of the autorelease pool.
881 objc_retainAutoreleasedReturnValue(ret);
887 return objc_autoreleaseReturnValue(ret);
889 objc_autoreleaseReturnValue() examines the caller's instructions following
890 the return. If the caller's instructions immediately call
891 objc_autoreleaseReturnValue, then the callee omits the -autorelease and saves
892 the result in thread-local storage. If the caller does not look like it
893 cooperates, then the callee calls -autorelease as usual.
895 objc_autoreleaseReturnValue checks if the returned value is the same as the
896 one in thread-local storage. If it is, the value is used directly. If not,
897 the value is assumed to be truly autoreleased and is retained again. In
898 either case, the caller now has a retained reference to the value.
900 Tagged pointer objects do participate in the fast autorelease scheme,
901 because it saves message sends. They are not entered in the autorelease
902 pool in the slow case.
903 **********************************************************************/
907 static ALWAYS_INLINE
bool
908 callerAcceptsFastAutorelease(const void * const ra0
)
910 const uint8_t *ra1
= (const uint8_t *)ra0
;
912 const uint32_t *ra4
= (const uint32_t *)ra1
;
915 #define PREFER_GOTPCREL 0
917 // 48 89 c7 movq %rax,%rdi
918 // ff 15 callq *symbol@GOTPCREL(%rip)
919 if (*ra4
!= 0xffc78948) {
922 if (ra1
[4] != 0x15) {
927 // 48 89 c7 movq %rax,%rdi
929 if (*ra4
!= 0xe8c78948) {
932 ra1
+= (long)*(const int32_t *)(ra1
+ 4) + 8l;
933 ra2
= (const uint16_t *)ra1
;
934 // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
935 if (*ra2
!= 0x25ff) {
939 ra1
+= 6l + (long)*(const int32_t *)(ra1
+ 2);
940 sym
= (const void **)ra1
;
941 if (*sym
!= objc_retainAutoreleasedReturnValue
)
952 static ALWAYS_INLINE
bool
953 callerAcceptsFastAutorelease(const void *ra
)
955 // if the low bit is set, we're returning to thumb mode
956 if ((uintptr_t)ra
& 1) {
958 // we mask off the low bit via subtraction
959 if (*(uint16_t *)((uint8_t *)ra
- 1) == 0x463f) {
963 // 07 70 a0 e1 mov r7, r7
964 if (*(uint32_t *)ra
== 0xe1a07007) {
974 static ALWAYS_INLINE
bool
975 callerAcceptsFastAutorelease(const void *ra
)
977 // fd 03 1d aa mov fp, fp
978 if (*(uint32_t *)ra
== 0xaa1d03fd) {
985 # elif __i386__ && TARGET_IPHONE_SIMULATOR
988 callerAcceptsFastAutorelease(const void *ra
)
993 // __i386__ && TARGET_IPHONE_SIMULATOR
996 #warning unknown architecture
998 static ALWAYS_INLINE
bool
999 callerAcceptsFastAutorelease(const void *ra
)
1004 // unknown architecture
1008 static ALWAYS_INLINE
1009 bool fastAutoreleaseForReturn(id obj
)
1011 assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY
) == nil
);
1013 if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1014 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY
, obj
);
1022 static ALWAYS_INLINE
1023 bool fastRetainFromReturn(id obj
)
1025 if (obj
== tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY
)) {
1026 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY
, 0);
1034 // SUPPORT_RETURN_AUTORELEASE
1036 // not SUPPORT_RETURN_AUTORELEASE
1039 static ALWAYS_INLINE
1040 bool fastAutoreleaseForReturn(id obj
)
1046 static ALWAYS_INLINE
1047 bool fastRetainFromReturn(id obj
)
1053 // not SUPPORT_RETURN_AUTORELEASE