2 * @APPLE_LICENSE_HEADER_START@
4 * Copyright (c) 2011 Apple Inc. All Rights Reserved.
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
23 /********************************************************************
25 * objc-msg-arm64.s - ARM64 code to support objc messaging
27 ********************************************************************/
33 #include "objc-config.h"
34 #include "arm64-asm.h"
36 #if TARGET_OS_IPHONE && __LP64__
37 .section __TEXT,__objc_methname,cstring_literals
38 l_MagicSelector: /* the shared cache builder knows about this value */
39 .byte 0xf0, 0x9f, 0xa4, 0xaf, 0
41 .section __DATA,__objc_selrefs,literal_pointers,no_dead_strip
49 // _objc_restartableRanges is used by method dispatch
50 // caching code to figure out whether any threads are actively
51 // in the cache for dispatching. The labels surround the asm code
52 // that do cache lookups. The tables are zero-terminated.
54 .macro RestartableEntry
61 .short LLookupEnd$0 - LLookupStart$0
62 .short LLookupRecover$0 - LLookupStart$0
67 .private_extern _objc_restartableRanges
68 _objc_restartableRanges:
69 RestartableEntry _cache_getImp
70 RestartableEntry _objc_msgSend
71 RestartableEntry _objc_msgSendSuper2
72 RestartableEntry _objc_msgLookup
73 RestartableEntry _objc_msgLookupSuper2
77 /* objc_super parameter to sendSuper */
79 #define CLASS __SIZEOF_POINTER__
81 /* Selected field offsets in class structure */
82 #define SUPERCLASS __SIZEOF_POINTER__
83 #define CACHE (2 * __SIZEOF_POINTER__)
85 /* Selected field offsets in method structure */
87 #define METHOD_TYPES __SIZEOF_POINTER__
88 #define METHOD_IMP (2 * __SIZEOF_POINTER__)
90 #define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
93 /********************************************************************
94 * GetClassFromIsa_p16 src, needs_auth, auth_address
95 * src is a raw isa field. Sets p16 to the corresponding class pointer.
96 * The raw isa might be an indexed isa to be decoded, or a
97 * packed isa that needs to be masked.
101 * p16 is a class pointer
103 ********************************************************************/
105 #if SUPPORT_INDEXED_ISA
107 .globl _objc_indexed_classes
108 _objc_indexed_classes:
109 .fill ISA_INDEX_COUNT, PTRSIZE, 0
112 .macro GetClassFromIsa_p16 src, needs_auth, auth_address /* note: auth_address is not required if !needs_auth */
114 #if SUPPORT_INDEXED_ISA
116 mov p16, \src // optimistically set dst = src
117 tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
118 // isa in p16 is indexed
119 adrp x10, _objc_indexed_classes@PAGE
120 add x10, x10, _objc_indexed_classes@PAGEOFF
121 ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
122 ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
126 .if \needs_auth == 0 // _cache_getImp takes an authed class already
130 ExtractISA p16, \src, \auth_address
141 /********************************************************************
143 * STATIC_ENTRY functionName
144 * END_ENTRY functionName
145 ********************************************************************/
147 .macro ENTRY /* name */
154 .macro STATIC_ENTRY /*name*/
161 .macro END_ENTRY /* name */
166 /********************************************************************
168 * Unwind info generation
169 ********************************************************************/
171 .section __LD,__compact_unwind,regular,debug
173 .set LUnwind$0, LExit$0 - $0
176 PTR 0 /* no personality */
181 #define NoFrame 0x02000000 // no frame, no SP adjustment
182 #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
186 #define METHOD_INVOKE 101
188 //////////////////////////////////////////////////////////////////////
192 // Create a stack frame and save all argument registers in preparation
193 // for a function call.
194 //////////////////////////////////////////////////////////////////////
196 .macro SAVE_REGS kind
200 stp fp, lr, [sp, #-16]!
203 // save parameter registers: x0..x8, q0..q7
204 sub sp, sp, #(10*8 + 8*16)
205 stp q0, q1, [sp, #(0*16)]
206 stp q2, q3, [sp, #(2*16)]
207 stp q4, q5, [sp, #(4*16)]
208 stp q6, q7, [sp, #(6*16)]
209 stp x0, x1, [sp, #(8*16+0*8)]
210 stp x2, x3, [sp, #(8*16+2*8)]
211 stp x4, x5, [sp, #(8*16+4*8)]
212 stp x6, x7, [sp, #(8*16+6*8)]
214 stp x8, x15, [sp, #(8*16+8*8)]
215 mov x16, x15 // stashed by CacheLookup, restore to x16
216 .elseif \kind == METHOD_INVOKE
217 str x8, [sp, #(8*16+8*8)]
225 //////////////////////////////////////////////////////////////////////
229 // Restore all argument registers and pop the stack frame created by
231 //////////////////////////////////////////////////////////////////////
233 .macro RESTORE_REGS kind
235 ldp q0, q1, [sp, #(0*16)]
236 ldp q2, q3, [sp, #(2*16)]
237 ldp q4, q5, [sp, #(4*16)]
238 ldp q6, q7, [sp, #(6*16)]
239 ldp x0, x1, [sp, #(8*16+0*8)]
240 ldp x2, x3, [sp, #(8*16+2*8)]
241 ldp x4, x5, [sp, #(8*16+4*8)]
242 ldp x6, x7, [sp, #(8*16+6*8)]
244 ldp x8, x16, [sp, #(8*16+8*8)]
245 orr x16, x16, #2 // for the sake of instrumentations, remember it was the slowpath
246 .elseif \kind == METHOD_INVOKE
247 ldr x8, [sp, #(8*16+8*8)]
253 ldp fp, lr, [sp], #16
259 /********************************************************************
261 * CacheLookup NORMAL|GETIMP|LOOKUP <function> MissLabelDynamic MissLabelConstant
263 * MissLabelConstant is only used for the GETIMP variant.
265 * Locate the implementation for a selector in a class method cache.
267 * When this is used in a function that doesn't hold the runtime lock,
268 * this represents the critical section that may access dead memory.
269 * If the kernel causes one of these functions to go down the recovery
270 * path, we pretend the lookup failed by jumping the JumpMiss branch.
274 * x16 = class to be searched
277 * x9,x10,x11,x12,x13,x15,x17
282 * On exit: (found) calls or returns IMP
283 * with x16 = class, x17 = IMP
284 * In LOOKUP mode, the two low bits are set to 0x3
285 * if we hit a constant cache (used in objc_trace)
286 * (not found) jumps to LCacheMiss
288 * For constant caches in LOOKUP mode, the low bit
289 * of x16 is set to 0x1 to indicate we had to fallback.
290 * In addition, when LCacheMiss is __objc_msgSend_uncached or
291 * __objc_msgLookup_uncached, 0x2 will be set in x16
292 * to remember we took the slowpath.
293 * So the two low bits of x16 on exit mean:
295 * 1: fallback to the parent class, when there is a preoptimized cache
297 * 3: preoptimized cache hit
299 ********************************************************************/
305 // CacheHit: x17 = cached IMP, x10 = address of buckets, x1 = SEL, x16 = isa
308 TailCallCachedImp x17, x10, x1, x16 // authenticate and call imp
311 cbz p0, 9f // don't ptrauth a nil imp
312 AuthAndResignAsIMP x0, x10, x1, x16 // authenticate imp and re-sign as IMP
315 // No nil check for ptrauth: the caller would crash anyway when they
316 // jump to a nil IMP. We don't care if that jump also fails ptrauth.
317 AuthAndResignAsIMP x17, x10, x1, x16 // authenticate imp and re-sign as IMP
319 cinc x16, x16, ne // x16 += 1 when x15 != x16 (for instrumentation ; fallback to the parent class)
320 ret // return imp via x17
326 .macro CacheLookup Mode, Function, MissLabelDynamic, MissLabelConstant
330 // As soon as we're past the LLookupStart\Function label we may have
331 // loaded an invalid cache pointer or mask.
333 // When task_restartable_ranges_synchronize() is called,
334 // (or when a signal hits us) before we're past LLookupEnd\Function,
335 // then our PC will be reset to LLookupRecover\Function which forcefully
336 // jumps to the cache-miss codepath which have the following
340 // The cache-miss is just returning NULL (setting x0 to 0)
342 // NORMAL and LOOKUP:
343 // - x0 contains the receiver
344 // - x1 contains the selector
345 // - x16 contains the isa
346 // - other registers are set as per calling conventions
349 mov x15, x16 // stash the original isa
350 LLookupStart\Function:
351 // p1 = SEL, p16 = isa
352 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
353 ldr p10, [x16, #CACHE] // p10 = mask|buckets
354 lsr p11, p10, #48 // p11 = mask
355 and p10, p10, #0xffffffffffff // p10 = buckets
356 and w12, w1, w11 // x12 = _cmd & mask
357 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
358 ldr p11, [x16, #CACHE] // p11 = mask|buckets
359 #if CONFIG_USE_PREOPT_CACHES
360 #if __has_feature(ptrauth_calls)
361 tbnz p11, #0, LLookupPreopt\Function
362 and p10, p11, #0x0000ffffffffffff // p10 = buckets
364 and p10, p11, #0x0000fffffffffffe // p10 = buckets
365 tbnz p11, #0, LLookupPreopt\Function
367 eor p12, p1, p1, LSR #7
368 and p12, p12, p11, LSR #48 // x12 = (_cmd ^ (_cmd >> 7)) & mask
370 and p10, p11, #0x0000ffffffffffff // p10 = buckets
371 and p12, p1, p11, LSR #48 // x12 = _cmd & mask
372 #endif // CONFIG_USE_PREOPT_CACHES
373 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
374 ldr p11, [x16, #CACHE] // p11 = mask|buckets
375 and p10, p11, #~0xf // p10 = buckets
376 and p11, p11, #0xf // p11 = maskShift
378 lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
379 and p12, p1, p11 // x12 = _cmd & mask
381 #error Unsupported cache mask storage for ARM64.
384 add p13, p10, p12, LSL #(1+PTRSHIFT)
385 // p13 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
388 1: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
389 cmp p9, p1 // if (sel != _cmd) {
392 2: CacheHit \Mode // hit: call or return imp
394 3: cbz p9, \MissLabelDynamic // if (sel == 0) goto Miss;
395 cmp p13, p10 // } while (bucket >= buckets)
399 // p10 = first bucket
400 // p11 = mask (and maybe other bits on LP64)
403 // A full cache can happen with CACHE_ALLOW_FULL_UTILIZATION.
404 // So stop when we circle back to the first probed bucket
405 // rather than when hitting the first bucket again.
407 // Note that we might probe the initial bucket twice
408 // when the first probed slot is the last entry.
411 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
412 add p13, p10, w11, UXTW #(1+PTRSHIFT)
413 // p13 = buckets + (mask << 1+PTRSHIFT)
414 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
415 add p13, p10, p11, LSR #(48 - (1+PTRSHIFT))
416 // p13 = buckets + (mask << 1+PTRSHIFT)
417 // see comment about maskZeroBits
418 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
419 add p13, p10, p11, LSL #(1+PTRSHIFT)
420 // p13 = buckets + (mask << 1+PTRSHIFT)
422 #error Unsupported cache mask storage for ARM64.
424 add p12, p10, p12, LSL #(1+PTRSHIFT)
425 // p12 = first probed bucket
428 4: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
429 cmp p9, p1 // if (sel == _cmd)
431 cmp p9, #0 // } while (sel != 0 &&
432 ccmp p13, p12, #0, ne // bucket > first_probed)
436 LLookupRecover\Function:
439 #if CONFIG_USE_PREOPT_CACHES
440 #if CACHE_MASK_STORAGE != CACHE_MASK_STORAGE_HIGH_16
441 #error config unsupported
443 LLookupPreopt\Function:
444 #if __has_feature(ptrauth_calls)
445 and p10, p11, #0x007ffffffffffffe // p10 = buckets
446 autdb x10, x16 // auth as early as possible
449 // x12 = (_cmd - first_shared_cache_sel)
450 adrp x9, _MagicSelRef@PAGE
451 ldr p9, [x9, _MagicSelRef@PAGEOFF]
454 // w9 = ((_cmd - first_shared_cache_sel) >> hash_shift & hash_mask)
455 #if __has_feature(ptrauth_calls)
456 // bits 63..60 of x11 are the number of bits in hash_mask
457 // bits 59..55 of x11 is hash_shift
459 lsr x17, x11, #55 // w17 = (hash_shift, ...)
460 lsr w9, w12, w17 // >>= shift
462 lsr x17, x11, #60 // w17 = mask_bits
464 lsr x11, x11, x17 // p11 = mask (0x7fff >> mask_bits)
465 and x9, x9, x11 // &= mask
467 // bits 63..53 of x11 is hash_mask
468 // bits 52..48 of x11 is hash_shift
469 lsr x17, x11, #48 // w17 = (hash_shift, hash_mask)
470 lsr w9, w12, w17 // >>= shift
471 and x9, x9, x11, LSR #53 // &= mask
474 ldr x17, [x10, x9, LSL #3] // x17 == sel_offs | (imp_offs << 32)
478 b.ne \MissLabelConstant // cache miss
479 sub x0, x16, x17, LSR #32 // imp = isa - imp_offs
483 b.ne 5f // cache miss
484 sub x17, x16, x17, LSR #32 // imp = isa - imp_offs
487 .elseif \Mode == LOOKUP
488 orr x16, x16, #3 // for instrumentation, note that we hit a constant cache
492 .abort unhandled mode \Mode
495 5: ldursw x9, [x10, #-8] // offset -8 is the fallback offset
496 add x16, x16, x9 // compute the fallback isa
497 b LLookupStart\Function // lookup again with a new isa
499 #endif // CONFIG_USE_PREOPT_CACHES
504 /********************************************************************
506 * id objc_msgSend(id self, SEL _cmd, ...);
507 * IMP objc_msgLookup(id self, SEL _cmd, ...);
509 * objc_msgLookup ABI:
510 * IMP returned in x17
511 * x16 reserved for our use but not used
513 ********************************************************************/
515 #if SUPPORT_TAGGED_POINTERS
518 .globl _objc_debug_taggedpointer_ext_classes
519 _objc_debug_taggedpointer_ext_classes:
522 // Dispatch for split tagged pointers take advantage of the fact that
523 // the extended tag classes array immediately precedes the standard
524 // tag array. The .alt_entry directive ensures that the two stay
525 // together. This is harmless when using non-split tagged pointers.
526 .globl _objc_debug_taggedpointer_classes
527 .alt_entry _objc_debug_taggedpointer_classes
528 _objc_debug_taggedpointer_classes:
531 // Look up the class for a tagged pointer in x0, placing it in x16.
532 .macro GetTaggedClass
534 and x10, x0, #0x7 // x10 = small tag
535 asr x11, x0, #55 // x11 = large tag with 1s filling the top (because bit 63 is 1 on a tagged pointer)
536 cmp x10, #7 // tag == 7?
537 csel x12, x11, x10, eq // x12 = index in tagged pointer classes array, negative for extended tags.
538 // The extended tag array is placed immediately before the basic tag array
539 // so this looks into the right place either way. The sign extension done
540 // by the asr instruction produces the value extended_tag - 256, which produces
541 // the correct index in the extended tagged pointer classes array.
543 // x16 = _objc_debug_taggedpointer_classes[x12]
544 adrp x10, _objc_debug_taggedpointer_classes@PAGE
545 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
546 ldr x16, [x10, x12, LSL #3]
552 UNWIND _objc_msgSend, NoFrame
554 cmp p0, #0 // nil check and tagged pointer check
555 #if SUPPORT_TAGGED_POINTERS
556 b.le LNilOrTagged // (MSB tagged pointer looks negative)
560 ldr p13, [x0] // p13 = isa
561 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
563 // calls imp or objc_msgSend_uncached
564 CacheLookup NORMAL, _objc_msgSend, __objc_msgSend_uncached
566 #if SUPPORT_TAGGED_POINTERS
568 b.eq LReturnZero // nil check
571 // SUPPORT_TAGGED_POINTERS
575 // x0 is already zero
583 END_ENTRY _objc_msgSend
586 ENTRY _objc_msgLookup
587 UNWIND _objc_msgLookup, NoFrame
588 cmp p0, #0 // nil check and tagged pointer check
589 #if SUPPORT_TAGGED_POINTERS
590 b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
594 ldr p13, [x0] // p13 = isa
595 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
598 CacheLookup LOOKUP, _objc_msgLookup, __objc_msgLookup_uncached
600 #if SUPPORT_TAGGED_POINTERS
602 b.eq LLookup_Nil // nil check
605 // SUPPORT_TAGGED_POINTERS
609 adr x17, __objc_msgNil
613 END_ENTRY _objc_msgLookup
616 STATIC_ENTRY __objc_msgNil
618 // x0 is already zero
626 END_ENTRY __objc_msgNil
629 ENTRY _objc_msgSendSuper
630 UNWIND _objc_msgSendSuper, NoFrame
632 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
633 b L_objc_msgSendSuper2_body
635 END_ENTRY _objc_msgSendSuper
637 // no _objc_msgLookupSuper
639 ENTRY _objc_msgSendSuper2
640 UNWIND _objc_msgSendSuper2, NoFrame
642 #if __has_feature(ptrauth_calls)
643 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
644 add x17, x17, #SUPERCLASS // x17 = &class->superclass
645 ldr x16, [x17] // x16 = class->superclass
646 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
649 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
650 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
652 L_objc_msgSendSuper2_body:
653 CacheLookup NORMAL, _objc_msgSendSuper2, __objc_msgSend_uncached
655 END_ENTRY _objc_msgSendSuper2
658 ENTRY _objc_msgLookupSuper2
659 UNWIND _objc_msgLookupSuper2, NoFrame
661 #if __has_feature(ptrauth_calls)
662 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
663 add x17, x17, #SUPERCLASS // x17 = &class->superclass
664 ldr x16, [x17] // x16 = class->superclass
665 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
666 LMsgLookupSuperResume:
668 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
669 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
671 CacheLookup LOOKUP, _objc_msgLookupSuper2, __objc_msgLookup_uncached
673 END_ENTRY _objc_msgLookupSuper2
676 .macro MethodTableLookup
680 // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
681 // receiver and selector already in x0 and x1
684 bl _lookUpImpOrForward
693 STATIC_ENTRY __objc_msgSend_uncached
694 UNWIND __objc_msgSend_uncached, FrameWithNoSaves
696 // THIS IS NOT A CALLABLE C FUNCTION
697 // Out-of-band p15 is the class to search
700 TailCallFunctionPointer x17
702 END_ENTRY __objc_msgSend_uncached
705 STATIC_ENTRY __objc_msgLookup_uncached
706 UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
708 // THIS IS NOT A CALLABLE C FUNCTION
709 // Out-of-band p15 is the class to search
714 END_ENTRY __objc_msgLookup_uncached
717 STATIC_ENTRY _cache_getImp
719 GetClassFromIsa_p16 p0, 0
720 CacheLookup GETIMP, _cache_getImp, LGetImpMissDynamic, LGetImpMissConstant
730 END_ENTRY _cache_getImp
733 /********************************************************************
735 * id _objc_msgForward(id self, SEL _cmd,...);
737 * _objc_msgForward is the externally-callable
738 * function returned by things like method_getImplementation().
739 * _objc_msgForward_impcache is the function pointer actually stored in
742 ********************************************************************/
744 STATIC_ENTRY __objc_msgForward_impcache
746 // No stret specialization.
749 END_ENTRY __objc_msgForward_impcache
752 ENTRY __objc_msgForward
754 adrp x17, __objc_forward_handler@PAGE
755 ldr p17, [x17, __objc_forward_handler@PAGEOFF]
756 TailCallFunctionPointer x17
758 END_ENTRY __objc_msgForward
761 ENTRY _objc_msgSend_noarg
763 END_ENTRY _objc_msgSend_noarg
765 ENTRY _objc_msgSend_debug
767 END_ENTRY _objc_msgSend_debug
769 ENTRY _objc_msgSendSuper2_debug
770 b _objc_msgSendSuper2
771 END_ENTRY _objc_msgSendSuper2_debug
776 // See if this is a small method.
777 tbnz p1, #0, L_method_invoke_small
779 // We can directly load the IMP from big methods.
780 // x1 is method triplet instead of SEL
781 add p16, p1, #METHOD_IMP
783 ldr p1, [x1, #METHOD_NAME]
784 TailCallMethodListImp x17, x16
786 L_method_invoke_small:
787 // Small methods require a call to handle swizzling.
788 SAVE_REGS METHOD_INVOKE
790 bl __method_getImplementationAndName
791 // ARM64_32 packs both return values into x0, with SEL in the high bits and IMP in the low.
792 // ARM64 just returns them in x0 and x1.
797 RESTORE_REGS METHOD_INVOKE
804 TailCallFunctionPointer x17
806 END_ENTRY _method_invoke