]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-object.h
6d811818c7411ebc499c27a7f6fe192913d56107
[apple/objc4.git] / runtime / objc-object.h
1 /*
2 * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24
25 /***********************************************************************
26 * Inlineable parts of NSObject / objc_object implementation
27 **********************************************************************/
28
29 #ifndef _OBJC_OBJCOBJECT_H_
30 #define _OBJC_OBJCOBJECT_H_
31
32 #include "objc-private.h"
33
34 static ALWAYS_INLINE bool fastAutoreleaseForReturn(id obj);
35 static ALWAYS_INLINE bool fastRetainFromReturn(id obj);
36
37
38 #if SUPPORT_TAGGED_POINTERS
39
40 #define TAG_COUNT 8
41 #define TAG_SLOT_MASK 0xf
42
43 #if SUPPORT_MSB_TAGGED_POINTERS
44 # define TAG_MASK (1ULL<<63)
45 # define TAG_SLOT_SHIFT 60
46 # define TAG_PAYLOAD_LSHIFT 4
47 # define TAG_PAYLOAD_RSHIFT 4
48 #else
49 # define TAG_MASK 1
50 # define TAG_SLOT_SHIFT 0
51 # define TAG_PAYLOAD_LSHIFT 0
52 # define TAG_PAYLOAD_RSHIFT 4
53 #endif
54
55 extern "C" { extern Class objc_debug_taggedpointer_classes[TAG_COUNT*2]; }
56 #define objc_tag_classes objc_debug_taggedpointer_classes
57
58 #endif
59
60
61 inline bool
62 objc_object::isClass()
63 {
64 if (isTaggedPointer()) return false;
65 return ISA()->isMetaClass();
66 }
67
68 #if SUPPORT_NONPOINTER_ISA
69
70 # if !SUPPORT_TAGGED_POINTERS
71 # error sorry
72 # endif
73
74
75 inline Class
76 objc_object::ISA()
77 {
78 assert(!isTaggedPointer());
79 return (Class)(isa.bits & ISA_MASK);
80 }
81
82
83 inline bool
84 objc_object::hasIndexedIsa()
85 {
86 return isa.indexed;
87 }
88
89 inline Class
90 objc_object::getIsa()
91 {
92 if (isTaggedPointer()) {
93 uintptr_t slot = ((uintptr_t)this >> TAG_SLOT_SHIFT) & TAG_SLOT_MASK;
94 return objc_tag_classes[slot];
95 }
96 return ISA();
97 }
98
99
100 inline void
101 objc_object::initIsa(Class cls)
102 {
103 initIsa(cls, false, false);
104 }
105
106 inline void
107 objc_object::initClassIsa(Class cls)
108 {
109 if (DisableIndexedIsa) {
110 initIsa(cls, false, false);
111 } else {
112 initIsa(cls, true, false);
113 }
114 }
115
116 inline void
117 objc_object::initProtocolIsa(Class cls)
118 {
119 return initClassIsa(cls);
120 }
121
122 inline void
123 objc_object::initInstanceIsa(Class cls, bool hasCxxDtor)
124 {
125 assert(!UseGC);
126 assert(!cls->requiresRawIsa());
127 assert(hasCxxDtor == cls->hasCxxDtor());
128
129 initIsa(cls, true, hasCxxDtor);
130 }
131
132 inline void
133 objc_object::initIsa(Class cls, bool indexed, bool hasCxxDtor)
134 {
135 assert(!isTaggedPointer());
136
137 if (!indexed) {
138 isa.cls = cls;
139 } else {
140 assert(!DisableIndexedIsa);
141 isa.bits = ISA_MAGIC_VALUE;
142 // isa.magic is part of ISA_MAGIC_VALUE
143 // isa.indexed is part of ISA_MAGIC_VALUE
144 isa.has_cxx_dtor = hasCxxDtor;
145 isa.shiftcls = (uintptr_t)cls >> 3;
146 }
147 }
148
149
150 inline Class
151 objc_object::changeIsa(Class newCls)
152 {
153 assert(!isTaggedPointer());
154
155 isa_t oldisa;
156 isa_t newisa;
157
158 bool sideTableLocked = false;
159 bool transcribeToSideTable = false;
160
161 do {
162 transcribeToSideTable = false;
163 oldisa = LoadExclusive(&isa.bits);
164 if ((oldisa.bits == 0 || oldisa.indexed) &&
165 newCls->canAllocIndexed())
166 {
167 // 0 -> indexed
168 // indexed -> indexed
169 if (oldisa.bits == 0) newisa.bits = ISA_MAGIC_VALUE;
170 else newisa = oldisa;
171 // isa.magic is part of ISA_MAGIC_VALUE
172 // isa.indexed is part of ISA_MAGIC_VALUE
173 newisa.has_cxx_dtor = newCls->hasCxxDtor();
174 newisa.shiftcls = (uintptr_t)newCls >> 3;
175 }
176 else if (oldisa.indexed) {
177 // indexed -> not indexed
178 // Need to copy retain count et al to side table.
179 // Acquire side table lock before setting isa to
180 // prevent races such as concurrent -release.
181 if (!sideTableLocked) sidetable_lock();
182 sideTableLocked = true;
183 transcribeToSideTable = true;
184 newisa.cls = newCls;
185 }
186 else {
187 // not indexed -> not indexed
188 newisa.cls = newCls;
189 }
190 } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
191
192 if (transcribeToSideTable) {
193 // Copy oldisa's retain count et al to side table.
194 // oldisa.weakly_referenced: nothing to do
195 // oldisa.has_assoc: nothing to do
196 // oldisa.has_cxx_dtor: nothing to do
197 sidetable_moveExtraRC_nolock(oldisa.extra_rc,
198 oldisa.deallocating,
199 oldisa.weakly_referenced);
200 }
201
202 if (sideTableLocked) sidetable_unlock();
203
204 Class oldCls;
205 if (oldisa.indexed) oldCls = (Class)((uintptr_t)oldisa.shiftcls << 3);
206 else oldCls = oldisa.cls;
207
208 return oldCls;
209 }
210
211
212 inline bool
213 objc_object::isTaggedPointer()
214 {
215 return ((uintptr_t)this & TAG_MASK);
216 }
217
218
219 inline bool
220 objc_object::hasAssociatedObjects()
221 {
222 if (isTaggedPointer()) return true;
223 if (isa.indexed) return isa.has_assoc;
224 return true;
225 }
226
227
228 inline void
229 objc_object::setHasAssociatedObjects()
230 {
231 if (isTaggedPointer()) return;
232
233 retry:
234 isa_t oldisa = LoadExclusive(&isa.bits);
235 isa_t newisa = oldisa;
236 if (!newisa.indexed) return;
237 if (newisa.has_assoc) return;
238 newisa.has_assoc = true;
239 if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
240 }
241
242
243 inline bool
244 objc_object::isWeaklyReferenced()
245 {
246 assert(!isTaggedPointer());
247 if (isa.indexed) return isa.weakly_referenced;
248 else return sidetable_isWeaklyReferenced();
249 }
250
251
252 inline void
253 objc_object::setWeaklyReferenced_nolock()
254 {
255 retry:
256 isa_t oldisa = LoadExclusive(&isa.bits);
257 isa_t newisa = oldisa;
258 if (!newisa.indexed) return sidetable_setWeaklyReferenced_nolock();
259 if (newisa.weakly_referenced) return;
260 newisa.weakly_referenced = true;
261 if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
262 }
263
264
265 inline bool
266 objc_object::hasCxxDtor()
267 {
268 assert(!isTaggedPointer());
269 if (isa.indexed) return isa.has_cxx_dtor;
270 else return isa.cls->hasCxxDtor();
271 }
272
273
274
275 inline bool
276 objc_object::rootIsDeallocating()
277 {
278 assert(!UseGC);
279
280 if (isTaggedPointer()) return false;
281 if (isa.indexed) return isa.deallocating;
282 return sidetable_isDeallocating();
283 }
284
285
286 inline void
287 objc_object::clearDeallocating()
288 {
289 if (!isa.indexed) {
290 sidetable_clearDeallocating();
291 }
292 else if (isa.weakly_referenced) {
293 clearDeallocating_weak();
294 }
295
296 assert(!sidetable_present());
297 }
298
299
300 inline void
301 objc_object::rootDealloc()
302 {
303 assert(!UseGC);
304 if (isTaggedPointer()) return;
305
306 if (isa.indexed &&
307 !isa.weakly_referenced &&
308 !isa.has_assoc &&
309 !isa.has_cxx_dtor)
310 {
311 assert(!sidetable_present());
312 free(this);
313 }
314 else {
315 object_dispose((id)this);
316 }
317 }
318
319
320 // Equivalent to calling [this retain], with shortcuts if there is no override
321 inline id
322 objc_object::retain()
323 {
324 // UseGC is allowed here, but requires hasCustomRR.
325 assert(!UseGC || ISA()->hasCustomRR());
326 assert(!isTaggedPointer());
327
328 if (! ISA()->hasCustomRR()) {
329 return rootRetain();
330 }
331
332 return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_retain);
333 }
334
335
336 // Base retain implementation, ignoring overrides.
337 // This does not check isa.fast_rr; if there is an RR override then
338 // it was already called and it chose to call [super retain].
339 //
340 // tryRetain=true is the -_tryRetain path.
341 // handleOverflow=false is the frameless fast path.
342 // handleOverflow=true is the framed slow path including overflow to side table
343 // The code is structured this way to prevent duplication.
344
345 ALWAYS_INLINE id
346 objc_object::rootRetain()
347 {
348 return rootRetain(false, false);
349 }
350
351 ALWAYS_INLINE bool
352 objc_object::rootTryRetain()
353 {
354 return rootRetain(true, false) ? true : false;
355 }
356
357 ALWAYS_INLINE id
358 objc_object::rootRetain(bool tryRetain, bool handleOverflow)
359 {
360 assert(!UseGC);
361 if (isTaggedPointer()) return (id)this;
362
363 bool sideTableLocked = false;
364 bool transcribeToSideTable = false;
365
366 isa_t oldisa;
367 isa_t newisa;
368
369 do {
370 transcribeToSideTable = false;
371 oldisa = LoadExclusive(&isa.bits);
372 newisa = oldisa;
373 if (!newisa.indexed) goto unindexed;
374 // don't check newisa.fast_rr; we already called any RR overrides
375 if (tryRetain && newisa.deallocating) goto tryfail;
376 uintptr_t carry;
377 newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++
378
379 if (carry) {
380 // newisa.extra_rc++ overflowed
381 if (!handleOverflow) return rootRetain_overflow(tryRetain);
382 // Leave half of the retain counts inline and
383 // prepare to copy the other half to the side table.
384 if (!tryRetain && !sideTableLocked) sidetable_lock();
385 sideTableLocked = true;
386 transcribeToSideTable = true;
387 newisa.extra_rc = RC_HALF;
388 newisa.has_sidetable_rc = true;
389 }
390 } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
391
392 if (transcribeToSideTable) {
393 // Copy the other half of the retain counts to the side table.
394 sidetable_addExtraRC_nolock(RC_HALF);
395 }
396
397 if (!tryRetain && sideTableLocked) sidetable_unlock();
398 return (id)this;
399
400 tryfail:
401 if (!tryRetain && sideTableLocked) sidetable_unlock();
402 return nil;
403
404 unindexed:
405 if (!tryRetain && sideTableLocked) sidetable_unlock();
406 if (tryRetain) return sidetable_tryRetain() ? (id)this : nil;
407 else return sidetable_retain();
408 }
409
410
411 // Equivalent to calling [this release], with shortcuts if there is no override
412 inline void
413 objc_object::release()
414 {
415 // UseGC is allowed here, but requires hasCustomRR.
416 assert(!UseGC || ISA()->hasCustomRR());
417 assert(!isTaggedPointer());
418
419 if (! ISA()->hasCustomRR()) {
420 rootRelease();
421 return;
422 }
423
424 ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_release);
425 }
426
427
428 // Base release implementation, ignoring overrides.
429 // Does not call -dealloc.
430 // Returns true if the object should now be deallocated.
431 // This does not check isa.fast_rr; if there is an RR override then
432 // it was already called and it chose to call [super release].
433 //
434 // handleUnderflow=false is the frameless fast path.
435 // handleUnderflow=true is the framed slow path including side table borrow
436 // The code is structured this way to prevent duplication.
437
438 ALWAYS_INLINE bool
439 objc_object::rootRelease()
440 {
441 return rootRelease(true, false);
442 }
443
444 ALWAYS_INLINE bool
445 objc_object::rootReleaseShouldDealloc()
446 {
447 return rootRelease(false, false);
448 }
449
450 ALWAYS_INLINE bool
451 objc_object::rootRelease(bool performDealloc, bool handleUnderflow)
452 {
453 assert(!UseGC);
454 if (isTaggedPointer()) return false;
455
456 bool sideTableLocked = false;
457
458 isa_t oldisa;
459 isa_t newisa;
460
461 retry:
462 do {
463 oldisa = LoadExclusive(&isa.bits);
464 newisa = oldisa;
465 if (!newisa.indexed) goto unindexed;
466 // don't check newisa.fast_rr; we already called any RR overrides
467 uintptr_t carry;
468 newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc--
469 if (carry) goto underflow;
470 } while (!StoreReleaseExclusive(&isa.bits, oldisa.bits, newisa.bits));
471
472 if (sideTableLocked) sidetable_unlock();
473 return false;
474
475 underflow:
476 // newisa.extra_rc-- underflowed: borrow from side table or deallocate
477
478 // abandon newisa to undo the decrement
479 newisa = oldisa;
480
481 if (newisa.has_sidetable_rc) {
482 if (!handleUnderflow) {
483 return rootRelease_underflow(performDealloc);
484 }
485 // Add some retain counts inline and prepare
486 // to remove them from the side table.
487 if (!sideTableLocked) sidetable_lock();
488 sideTableLocked = true;
489 newisa.extra_rc = RC_HALF - 1; // redo the decrement
490 if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
491
492 // Remove the retain counts from the side table.
493 bool zeroed = sidetable_subExtraRC_nolock(RC_HALF);
494 if (zeroed) {
495 // Side table count is now zero. Clear the marker bit.
496 do {
497 oldisa = LoadExclusive(&isa.bits);
498 newisa.has_sidetable_rc = false;
499 } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
500 }
501
502 // Decrement successful after borrowing from side table.
503 // This decrement cannot be the deallocating decrement - the side
504 // table lock and has_sidetable_rc bit ensure that if everyone
505 // else tried to -release while we worked, the last one would block.
506 sidetable_unlock();
507 return false;
508 }
509
510 // Really deallocate.
511
512 if (sideTableLocked) sidetable_unlock();
513
514 if (newisa.deallocating) {
515 return overrelease_error();
516 }
517 newisa.deallocating = true;
518 if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
519 __sync_synchronize();
520 if (performDealloc) {
521 ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_dealloc);
522 }
523 return true;
524
525 unindexed:
526 if (sideTableLocked) sidetable_unlock();
527 return sidetable_release(performDealloc);
528 }
529
530
531 // Equivalent to [this autorelease], with shortcuts if there is no override
532 inline id
533 objc_object::autorelease()
534 {
535 // UseGC is allowed here, but requires hasCustomRR.
536 assert(!UseGC || ISA()->hasCustomRR());
537
538 if (isTaggedPointer()) return (id)this;
539 if (! ISA()->hasCustomRR()) return rootAutorelease();
540
541 return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_autorelease);
542 }
543
544
545 // Base autorelease implementation, ignoring overrides.
546 inline id
547 objc_object::rootAutorelease()
548 {
549 assert(!UseGC);
550
551 if (isTaggedPointer()) return (id)this;
552 if (fastAutoreleaseForReturn((id)this)) return (id)this;
553
554 return rootAutorelease2();
555 }
556
557
558 inline uintptr_t
559 objc_object::rootRetainCount()
560 {
561 assert(!UseGC);
562 if (isTaggedPointer()) return (uintptr_t)this;
563
564 sidetable_lock();
565 isa_t bits = LoadExclusive(&isa.bits);
566 if (bits.indexed) {
567 uintptr_t rc = 1 + bits.extra_rc;
568 if (bits.has_sidetable_rc) {
569 rc += sidetable_getExtraRC_nolock();
570 }
571 sidetable_unlock();
572 return rc;
573 }
574
575 sidetable_unlock();
576 return sidetable_retainCount();
577 }
578
579
580 // SUPPORT_NONPOINTER_ISA
581 #else
582 // not SUPPORT_NONPOINTER_ISA
583
584
585 inline Class
586 objc_object::ISA()
587 {
588 assert(!isTaggedPointer());
589 return isa.cls;
590 }
591
592
593 inline bool
594 objc_object::hasIndexedIsa()
595 {
596 return false;
597 }
598
599
600 inline Class
601 objc_object::getIsa()
602 {
603 #if SUPPORT_TAGGED_POINTERS
604 if (isTaggedPointer()) {
605 uintptr_t slot = ((uintptr_t)this >> TAG_SLOT_SHIFT) & TAG_SLOT_MASK;
606 return objc_tag_classes[slot];
607 }
608 #endif
609 return ISA();
610 }
611
612
613 inline void
614 objc_object::initIsa(Class cls)
615 {
616 assert(!isTaggedPointer());
617 isa = (uintptr_t)cls;
618 }
619
620
621 inline void
622 objc_object::initClassIsa(Class cls)
623 {
624 initIsa(cls);
625 }
626
627
628 inline void
629 objc_object::initProtocolIsa(Class cls)
630 {
631 initIsa(cls);
632 }
633
634
635 inline void
636 objc_object::initInstanceIsa(Class cls, bool)
637 {
638 initIsa(cls);
639 }
640
641
642 inline void
643 objc_object::initIsa(Class cls, bool, bool)
644 {
645 initIsa(cls);
646 }
647
648
649 inline Class
650 objc_object::changeIsa(Class cls)
651 {
652 assert(!isTaggedPointer());
653
654 isa_t oldisa, newisa;
655 newisa.cls = cls;
656 do {
657 oldisa = LoadExclusive(&isa.bits);
658 } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
659
660 if (oldisa.cls && oldisa.cls->instancesHaveAssociatedObjects()) {
661 cls->setInstancesHaveAssociatedObjects();
662 }
663
664 return oldisa.cls;
665 }
666
667
668 inline bool
669 objc_object::isTaggedPointer()
670 {
671 #if SUPPORT_TAGGED_POINTERS
672 return ((uintptr_t)this & TAG_MASK);
673 #else
674 return false;
675 #endif
676 }
677
678
679 inline bool
680 objc_object::hasAssociatedObjects()
681 {
682 assert(!UseGC);
683
684 return getIsa()->instancesHaveAssociatedObjects();
685 }
686
687
688 inline void
689 objc_object::setHasAssociatedObjects()
690 {
691 assert(!UseGC);
692
693 getIsa()->setInstancesHaveAssociatedObjects();
694 }
695
696
697 inline bool
698 objc_object::isWeaklyReferenced()
699 {
700 assert(!isTaggedPointer());
701 assert(!UseGC);
702
703 return sidetable_isWeaklyReferenced();
704 }
705
706
707 inline void
708 objc_object::setWeaklyReferenced_nolock()
709 {
710 assert(!isTaggedPointer());
711 assert(!UseGC);
712
713 sidetable_setWeaklyReferenced_nolock();
714 }
715
716
717 inline bool
718 objc_object::hasCxxDtor()
719 {
720 assert(!isTaggedPointer());
721 return isa.cls->hasCxxDtor();
722 }
723
724
725 inline bool
726 objc_object::rootIsDeallocating()
727 {
728 assert(!UseGC);
729
730 if (isTaggedPointer()) return false;
731 return sidetable_isDeallocating();
732 }
733
734
735 inline void
736 objc_object::clearDeallocating()
737 {
738 sidetable_clearDeallocating();
739 }
740
741
742 inline void
743 objc_object::rootDealloc()
744 {
745 if (isTaggedPointer()) return;
746 object_dispose((id)this);
747 }
748
749
750 // Equivalent to calling [this retain], with shortcuts if there is no override
751 inline id
752 objc_object::retain()
753 {
754 // UseGC is allowed here, but requires hasCustomRR.
755 assert(!UseGC || ISA()->hasCustomRR());
756 assert(!isTaggedPointer());
757
758 if (! ISA()->hasCustomRR()) {
759 return sidetable_retain();
760 }
761
762 return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_retain);
763 }
764
765
766 // Base retain implementation, ignoring overrides.
767 // This does not check isa.fast_rr; if there is an RR override then
768 // it was already called and it chose to call [super retain].
769 inline id
770 objc_object::rootRetain()
771 {
772 assert(!UseGC);
773
774 if (isTaggedPointer()) return (id)this;
775 return sidetable_retain();
776 }
777
778
779 // Equivalent to calling [this release], with shortcuts if there is no override
780 inline void
781 objc_object::release()
782 {
783 // UseGC is allowed here, but requires hasCustomRR.
784 assert(!UseGC || ISA()->hasCustomRR());
785 assert(!isTaggedPointer());
786
787 if (! ISA()->hasCustomRR()) {
788 sidetable_release();
789 return;
790 }
791
792 ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_release);
793 }
794
795
796 // Base release implementation, ignoring overrides.
797 // Does not call -dealloc.
798 // Returns true if the object should now be deallocated.
799 // This does not check isa.fast_rr; if there is an RR override then
800 // it was already called and it chose to call [super release].
801 inline bool
802 objc_object::rootRelease()
803 {
804 assert(!UseGC);
805
806 if (isTaggedPointer()) return false;
807 return sidetable_release(true);
808 }
809
810 inline bool
811 objc_object::rootReleaseShouldDealloc()
812 {
813 if (isTaggedPointer()) return false;
814 return sidetable_release(false);
815 }
816
817
818 // Equivalent to [this autorelease], with shortcuts if there is no override
819 inline id
820 objc_object::autorelease()
821 {
822 // UseGC is allowed here, but requires hasCustomRR.
823 assert(!UseGC || ISA()->hasCustomRR());
824
825 if (isTaggedPointer()) return (id)this;
826 if (! ISA()->hasCustomRR()) return rootAutorelease();
827
828 return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_autorelease);
829 }
830
831
832 // Base autorelease implementation, ignoring overrides.
833 inline id
834 objc_object::rootAutorelease()
835 {
836 assert(!UseGC);
837
838 if (isTaggedPointer()) return (id)this;
839 if (fastAutoreleaseForReturn((id)this)) return (id)this;
840
841 return rootAutorelease2();
842 }
843
844
845 // Base tryRetain implementation, ignoring overrides.
846 // This does not check isa.fast_rr; if there is an RR override then
847 // it was already called and it chose to call [super _tryRetain].
848 inline bool
849 objc_object::rootTryRetain()
850 {
851 assert(!UseGC);
852
853 if (isTaggedPointer()) return true;
854 return sidetable_tryRetain();
855 }
856
857
858 inline uintptr_t
859 objc_object::rootRetainCount()
860 {
861 assert(!UseGC);
862
863 if (isTaggedPointer()) return (uintptr_t)this;
864 return sidetable_retainCount();
865 }
866
867
868 // not SUPPORT_NONPOINTER_ISA
869 #endif
870
871
872 #if SUPPORT_RETURN_AUTORELEASE
873
874 /***********************************************************************
875 Fast handling of returned autoreleased values.
876 The caller and callee cooperate to keep the returned object
877 out of the autorelease pool.
878
879 Caller:
880 ret = callee();
881 objc_retainAutoreleasedReturnValue(ret);
882 // use ret here
883
884 Callee:
885 // compute ret
886 [ret retain];
887 return objc_autoreleaseReturnValue(ret);
888
889 objc_autoreleaseReturnValue() examines the caller's instructions following
890 the return. If the caller's instructions immediately call
891 objc_autoreleaseReturnValue, then the callee omits the -autorelease and saves
892 the result in thread-local storage. If the caller does not look like it
893 cooperates, then the callee calls -autorelease as usual.
894
895 objc_autoreleaseReturnValue checks if the returned value is the same as the
896 one in thread-local storage. If it is, the value is used directly. If not,
897 the value is assumed to be truly autoreleased and is retained again. In
898 either case, the caller now has a retained reference to the value.
899
900 Tagged pointer objects do participate in the fast autorelease scheme,
901 because it saves message sends. They are not entered in the autorelease
902 pool in the slow case.
903 **********************************************************************/
904
905 # if __x86_64__
906
907 static ALWAYS_INLINE bool
908 callerAcceptsFastAutorelease(const void * const ra0)
909 {
910 const uint8_t *ra1 = (const uint8_t *)ra0;
911 const uint16_t *ra2;
912 const uint32_t *ra4 = (const uint32_t *)ra1;
913 const void **sym;
914
915 #define PREFER_GOTPCREL 0
916 #if PREFER_GOTPCREL
917 // 48 89 c7 movq %rax,%rdi
918 // ff 15 callq *symbol@GOTPCREL(%rip)
919 if (*ra4 != 0xffc78948) {
920 return false;
921 }
922 if (ra1[4] != 0x15) {
923 return false;
924 }
925 ra1 += 3;
926 #else
927 // 48 89 c7 movq %rax,%rdi
928 // e8 callq symbol
929 if (*ra4 != 0xe8c78948) {
930 return false;
931 }
932 ra1 += (long)*(const int32_t *)(ra1 + 4) + 8l;
933 ra2 = (const uint16_t *)ra1;
934 // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
935 if (*ra2 != 0x25ff) {
936 return false;
937 }
938 #endif
939 ra1 += 6l + (long)*(const int32_t *)(ra1 + 2);
940 sym = (const void **)ra1;
941 if (*sym != objc_retainAutoreleasedReturnValue)
942 {
943 return false;
944 }
945
946 return true;
947 }
948
949 // __x86_64__
950 # elif __arm__
951
952 static ALWAYS_INLINE bool
953 callerAcceptsFastAutorelease(const void *ra)
954 {
955 // if the low bit is set, we're returning to thumb mode
956 if ((uintptr_t)ra & 1) {
957 // 3f 46 mov r7, r7
958 // we mask off the low bit via subtraction
959 if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
960 return true;
961 }
962 } else {
963 // 07 70 a0 e1 mov r7, r7
964 if (*(uint32_t *)ra == 0xe1a07007) {
965 return true;
966 }
967 }
968 return false;
969 }
970
971 // __arm__
972 # elif __arm64__
973
974 static ALWAYS_INLINE bool
975 callerAcceptsFastAutorelease(const void *ra)
976 {
977 // fd 03 1d aa mov fp, fp
978 if (*(uint32_t *)ra == 0xaa1d03fd) {
979 return true;
980 }
981 return false;
982 }
983
984 // __arm64__
985 # elif __i386__ && TARGET_IPHONE_SIMULATOR
986
987 static inline bool
988 callerAcceptsFastAutorelease(const void *ra)
989 {
990 return false;
991 }
992
993 // __i386__ && TARGET_IPHONE_SIMULATOR
994 # else
995
996 #warning unknown architecture
997
998 static ALWAYS_INLINE bool
999 callerAcceptsFastAutorelease(const void *ra)
1000 {
1001 return false;
1002 }
1003
1004 // unknown architecture
1005 # endif
1006
1007
1008 static ALWAYS_INLINE
1009 bool fastAutoreleaseForReturn(id obj)
1010 {
1011 assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY) == nil);
1012
1013 if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1014 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, obj);
1015 return true;
1016 }
1017
1018 return false;
1019 }
1020
1021
1022 static ALWAYS_INLINE
1023 bool fastRetainFromReturn(id obj)
1024 {
1025 if (obj == tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY)) {
1026 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, 0);
1027 return true;
1028 }
1029
1030 return false;
1031 }
1032
1033
1034 // SUPPORT_RETURN_AUTORELEASE
1035 #else
1036 // not SUPPORT_RETURN_AUTORELEASE
1037
1038
1039 static ALWAYS_INLINE
1040 bool fastAutoreleaseForReturn(id obj)
1041 {
1042 return false;
1043 }
1044
1045
1046 static ALWAYS_INLINE
1047 bool fastRetainFromReturn(id obj)
1048 {
1049 return false;
1050 }
1051
1052
1053 // not SUPPORT_RETURN_AUTORELEASE
1054 #endif
1055
1056
1057 // _OBJC_OBJECT_H_
1058 #endif