]> git.saurik.com Git - apple/objc4.git/blob - runtime/Messengers.subproj/objc-msg-arm64.s
objc4-818.2.tar.gz
[apple/objc4.git] / runtime / Messengers.subproj / objc-msg-arm64.s
1 /*
2 * @APPLE_LICENSE_HEADER_START@
3 *
4 * Copyright (c) 2011 Apple Inc. All Rights Reserved.
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /********************************************************************
24 *
25 * objc-msg-arm64.s - ARM64 code to support objc messaging
26 *
27 ********************************************************************/
28
29 #ifdef __arm64__
30
31 #include <arm/arch.h>
32 #include "isa.h"
33 #include "objc-config.h"
34 #include "arm64-asm.h"
35
36 #if TARGET_OS_IPHONE && __LP64__
37 .section __TEXT,__objc_methname,cstring_literals
38 l_MagicSelector: /* the shared cache builder knows about this value */
39 .byte 0xf0, 0x9f, 0xa4, 0xaf, 0
40
41 .section __DATA,__objc_selrefs,literal_pointers,no_dead_strip
42 .p2align 3
43 _MagicSelRef:
44 .quad l_MagicSelector
45 #endif
46
47 .data
48
49 // _objc_restartableRanges is used by method dispatch
50 // caching code to figure out whether any threads are actively
51 // in the cache for dispatching. The labels surround the asm code
52 // that do cache lookups. The tables are zero-terminated.
53
54 .macro RestartableEntry
55 #if __LP64__
56 .quad LLookupStart$0
57 #else
58 .long LLookupStart$0
59 .long 0
60 #endif
61 .short LLookupEnd$0 - LLookupStart$0
62 .short LLookupRecover$0 - LLookupStart$0
63 .long 0
64 .endmacro
65
66 .align 4
67 .private_extern _objc_restartableRanges
68 _objc_restartableRanges:
69 RestartableEntry _cache_getImp
70 RestartableEntry _objc_msgSend
71 RestartableEntry _objc_msgSendSuper2
72 RestartableEntry _objc_msgLookup
73 RestartableEntry _objc_msgLookupSuper2
74 .fill 16, 1, 0
75
76
77 /* objc_super parameter to sendSuper */
78 #define RECEIVER 0
79 #define CLASS __SIZEOF_POINTER__
80
81 /* Selected field offsets in class structure */
82 #define SUPERCLASS __SIZEOF_POINTER__
83 #define CACHE (2 * __SIZEOF_POINTER__)
84
85 /* Selected field offsets in method structure */
86 #define METHOD_NAME 0
87 #define METHOD_TYPES __SIZEOF_POINTER__
88 #define METHOD_IMP (2 * __SIZEOF_POINTER__)
89
90 #define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
91
92
93 /********************************************************************
94 * GetClassFromIsa_p16 src, needs_auth, auth_address
95 * src is a raw isa field. Sets p16 to the corresponding class pointer.
96 * The raw isa might be an indexed isa to be decoded, or a
97 * packed isa that needs to be masked.
98 *
99 * On exit:
100 * src is unchanged
101 * p16 is a class pointer
102 * x10 is clobbered
103 ********************************************************************/
104
105 #if SUPPORT_INDEXED_ISA
106 .align 3
107 .globl _objc_indexed_classes
108 _objc_indexed_classes:
109 .fill ISA_INDEX_COUNT, PTRSIZE, 0
110 #endif
111
112 .macro GetClassFromIsa_p16 src, needs_auth, auth_address /* note: auth_address is not required if !needs_auth */
113
114 #if SUPPORT_INDEXED_ISA
115 // Indexed isa
116 mov p16, \src // optimistically set dst = src
117 tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
118 // isa in p16 is indexed
119 adrp x10, _objc_indexed_classes@PAGE
120 add x10, x10, _objc_indexed_classes@PAGEOFF
121 ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
122 ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
123 1:
124
125 #elif __LP64__
126 .if \needs_auth == 0 // _cache_getImp takes an authed class already
127 mov p16, \src
128 .else
129 // 64-bit packed isa
130 ExtractISA p16, \src, \auth_address
131 .endif
132 #else
133 // 32-bit raw isa
134 mov p16, \src
135
136 #endif
137
138 .endmacro
139
140
141 /********************************************************************
142 * ENTRY functionName
143 * STATIC_ENTRY functionName
144 * END_ENTRY functionName
145 ********************************************************************/
146
147 .macro ENTRY /* name */
148 .text
149 .align 5
150 .globl $0
151 $0:
152 .endmacro
153
154 .macro STATIC_ENTRY /*name*/
155 .text
156 .align 5
157 .private_extern $0
158 $0:
159 .endmacro
160
161 .macro END_ENTRY /* name */
162 LExit$0:
163 .endmacro
164
165
166 /********************************************************************
167 * UNWIND name, flags
168 * Unwind info generation
169 ********************************************************************/
170 .macro UNWIND
171 .section __LD,__compact_unwind,regular,debug
172 PTR $0
173 .set LUnwind$0, LExit$0 - $0
174 .long LUnwind$0
175 .long $1
176 PTR 0 /* no personality */
177 PTR 0 /* no LSDA */
178 .text
179 .endmacro
180
181 #define NoFrame 0x02000000 // no frame, no SP adjustment
182 #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
183
184
185 #define MSGSEND 100
186 #define METHOD_INVOKE 101
187
188 //////////////////////////////////////////////////////////////////////
189 //
190 // SAVE_REGS
191 //
192 // Create a stack frame and save all argument registers in preparation
193 // for a function call.
194 //////////////////////////////////////////////////////////////////////
195
196 .macro SAVE_REGS kind
197
198 // push frame
199 SignLR
200 stp fp, lr, [sp, #-16]!
201 mov fp, sp
202
203 // save parameter registers: x0..x8, q0..q7
204 sub sp, sp, #(10*8 + 8*16)
205 stp q0, q1, [sp, #(0*16)]
206 stp q2, q3, [sp, #(2*16)]
207 stp q4, q5, [sp, #(4*16)]
208 stp q6, q7, [sp, #(6*16)]
209 stp x0, x1, [sp, #(8*16+0*8)]
210 stp x2, x3, [sp, #(8*16+2*8)]
211 stp x4, x5, [sp, #(8*16+4*8)]
212 stp x6, x7, [sp, #(8*16+6*8)]
213 .if \kind == MSGSEND
214 stp x8, x15, [sp, #(8*16+8*8)]
215 mov x16, x15 // stashed by CacheLookup, restore to x16
216 .elseif \kind == METHOD_INVOKE
217 str x8, [sp, #(8*16+8*8)]
218 .else
219 .abort Unknown kind.
220 .endif
221
222 .endmacro
223
224
225 //////////////////////////////////////////////////////////////////////
226 //
227 // RESTORE_REGS
228 //
229 // Restore all argument registers and pop the stack frame created by
230 // SAVE_REGS.
231 //////////////////////////////////////////////////////////////////////
232
233 .macro RESTORE_REGS kind
234
235 ldp q0, q1, [sp, #(0*16)]
236 ldp q2, q3, [sp, #(2*16)]
237 ldp q4, q5, [sp, #(4*16)]
238 ldp q6, q7, [sp, #(6*16)]
239 ldp x0, x1, [sp, #(8*16+0*8)]
240 ldp x2, x3, [sp, #(8*16+2*8)]
241 ldp x4, x5, [sp, #(8*16+4*8)]
242 ldp x6, x7, [sp, #(8*16+6*8)]
243 .if \kind == MSGSEND
244 ldp x8, x16, [sp, #(8*16+8*8)]
245 orr x16, x16, #2 // for the sake of instrumentations, remember it was the slowpath
246 .elseif \kind == METHOD_INVOKE
247 ldr x8, [sp, #(8*16+8*8)]
248 .else
249 .abort Unknown kind.
250 .endif
251
252 mov sp, fp
253 ldp fp, lr, [sp], #16
254 AuthenticateLR
255
256 .endmacro
257
258
259 /********************************************************************
260 *
261 * CacheLookup NORMAL|GETIMP|LOOKUP <function> MissLabelDynamic MissLabelConstant
262 *
263 * MissLabelConstant is only used for the GETIMP variant.
264 *
265 * Locate the implementation for a selector in a class method cache.
266 *
267 * When this is used in a function that doesn't hold the runtime lock,
268 * this represents the critical section that may access dead memory.
269 * If the kernel causes one of these functions to go down the recovery
270 * path, we pretend the lookup failed by jumping the JumpMiss branch.
271 *
272 * Takes:
273 * x1 = selector
274 * x16 = class to be searched
275 *
276 * Kills:
277 * x9,x10,x11,x12,x13,x15,x17
278 *
279 * Untouched:
280 * x14
281 *
282 * On exit: (found) calls or returns IMP
283 * with x16 = class, x17 = IMP
284 * In LOOKUP mode, the two low bits are set to 0x3
285 * if we hit a constant cache (used in objc_trace)
286 * (not found) jumps to LCacheMiss
287 * with x15 = class
288 * For constant caches in LOOKUP mode, the low bit
289 * of x16 is set to 0x1 to indicate we had to fallback.
290 * In addition, when LCacheMiss is __objc_msgSend_uncached or
291 * __objc_msgLookup_uncached, 0x2 will be set in x16
292 * to remember we took the slowpath.
293 * So the two low bits of x16 on exit mean:
294 * 0: dynamic hit
295 * 1: fallback to the parent class, when there is a preoptimized cache
296 * 2: slowpath
297 * 3: preoptimized cache hit
298 *
299 ********************************************************************/
300
301 #define NORMAL 0
302 #define GETIMP 1
303 #define LOOKUP 2
304
305 // CacheHit: x17 = cached IMP, x10 = address of buckets, x1 = SEL, x16 = isa
306 .macro CacheHit
307 .if $0 == NORMAL
308 TailCallCachedImp x17, x10, x1, x16 // authenticate and call imp
309 .elseif $0 == GETIMP
310 mov p0, p17
311 cbz p0, 9f // don't ptrauth a nil imp
312 AuthAndResignAsIMP x0, x10, x1, x16 // authenticate imp and re-sign as IMP
313 9: ret // return IMP
314 .elseif $0 == LOOKUP
315 // No nil check for ptrauth: the caller would crash anyway when they
316 // jump to a nil IMP. We don't care if that jump also fails ptrauth.
317 AuthAndResignAsIMP x17, x10, x1, x16 // authenticate imp and re-sign as IMP
318 cmp x16, x15
319 cinc x16, x16, ne // x16 += 1 when x15 != x16 (for instrumentation ; fallback to the parent class)
320 ret // return imp via x17
321 .else
322 .abort oops
323 .endif
324 .endmacro
325
326 .macro CacheLookup Mode, Function, MissLabelDynamic, MissLabelConstant
327 //
328 // Restart protocol:
329 //
330 // As soon as we're past the LLookupStart\Function label we may have
331 // loaded an invalid cache pointer or mask.
332 //
333 // When task_restartable_ranges_synchronize() is called,
334 // (or when a signal hits us) before we're past LLookupEnd\Function,
335 // then our PC will be reset to LLookupRecover\Function which forcefully
336 // jumps to the cache-miss codepath which have the following
337 // requirements:
338 //
339 // GETIMP:
340 // The cache-miss is just returning NULL (setting x0 to 0)
341 //
342 // NORMAL and LOOKUP:
343 // - x0 contains the receiver
344 // - x1 contains the selector
345 // - x16 contains the isa
346 // - other registers are set as per calling conventions
347 //
348
349 mov x15, x16 // stash the original isa
350 LLookupStart\Function:
351 // p1 = SEL, p16 = isa
352 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
353 ldr p10, [x16, #CACHE] // p10 = mask|buckets
354 lsr p11, p10, #48 // p11 = mask
355 and p10, p10, #0xffffffffffff // p10 = buckets
356 and w12, w1, w11 // x12 = _cmd & mask
357 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
358 ldr p11, [x16, #CACHE] // p11 = mask|buckets
359 #if CONFIG_USE_PREOPT_CACHES
360 #if __has_feature(ptrauth_calls)
361 tbnz p11, #0, LLookupPreopt\Function
362 and p10, p11, #0x0000ffffffffffff // p10 = buckets
363 #else
364 and p10, p11, #0x0000fffffffffffe // p10 = buckets
365 tbnz p11, #0, LLookupPreopt\Function
366 #endif
367 eor p12, p1, p1, LSR #7
368 and p12, p12, p11, LSR #48 // x12 = (_cmd ^ (_cmd >> 7)) & mask
369 #else
370 and p10, p11, #0x0000ffffffffffff // p10 = buckets
371 and p12, p1, p11, LSR #48 // x12 = _cmd & mask
372 #endif // CONFIG_USE_PREOPT_CACHES
373 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
374 ldr p11, [x16, #CACHE] // p11 = mask|buckets
375 and p10, p11, #~0xf // p10 = buckets
376 and p11, p11, #0xf // p11 = maskShift
377 mov p12, #0xffff
378 lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
379 and p12, p1, p11 // x12 = _cmd & mask
380 #else
381 #error Unsupported cache mask storage for ARM64.
382 #endif
383
384 add p13, p10, p12, LSL #(1+PTRSHIFT)
385 // p13 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
386
387 // do {
388 1: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
389 cmp p9, p1 // if (sel != _cmd) {
390 b.ne 3f // scan more
391 // } else {
392 2: CacheHit \Mode // hit: call or return imp
393 // }
394 3: cbz p9, \MissLabelDynamic // if (sel == 0) goto Miss;
395 cmp p13, p10 // } while (bucket >= buckets)
396 b.hs 1b
397
398 // wrap-around:
399 // p10 = first bucket
400 // p11 = mask (and maybe other bits on LP64)
401 // p12 = _cmd & mask
402 //
403 // A full cache can happen with CACHE_ALLOW_FULL_UTILIZATION.
404 // So stop when we circle back to the first probed bucket
405 // rather than when hitting the first bucket again.
406 //
407 // Note that we might probe the initial bucket twice
408 // when the first probed slot is the last entry.
409
410
411 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
412 add p13, p10, w11, UXTW #(1+PTRSHIFT)
413 // p13 = buckets + (mask << 1+PTRSHIFT)
414 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
415 add p13, p10, p11, LSR #(48 - (1+PTRSHIFT))
416 // p13 = buckets + (mask << 1+PTRSHIFT)
417 // see comment about maskZeroBits
418 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
419 add p13, p10, p11, LSL #(1+PTRSHIFT)
420 // p13 = buckets + (mask << 1+PTRSHIFT)
421 #else
422 #error Unsupported cache mask storage for ARM64.
423 #endif
424 add p12, p10, p12, LSL #(1+PTRSHIFT)
425 // p12 = first probed bucket
426
427 // do {
428 4: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
429 cmp p9, p1 // if (sel == _cmd)
430 b.eq 2b // goto hit
431 cmp p9, #0 // } while (sel != 0 &&
432 ccmp p13, p12, #0, ne // bucket > first_probed)
433 b.hi 4b
434
435 LLookupEnd\Function:
436 LLookupRecover\Function:
437 b \MissLabelDynamic
438
439 #if CONFIG_USE_PREOPT_CACHES
440 #if CACHE_MASK_STORAGE != CACHE_MASK_STORAGE_HIGH_16
441 #error config unsupported
442 #endif
443 LLookupPreopt\Function:
444 #if __has_feature(ptrauth_calls)
445 and p10, p11, #0x007ffffffffffffe // p10 = buckets
446 autdb x10, x16 // auth as early as possible
447 #endif
448
449 // x12 = (_cmd - first_shared_cache_sel)
450 adrp x9, _MagicSelRef@PAGE
451 ldr p9, [x9, _MagicSelRef@PAGEOFF]
452 sub p12, p1, p9
453
454 // w9 = ((_cmd - first_shared_cache_sel) >> hash_shift & hash_mask)
455 #if __has_feature(ptrauth_calls)
456 // bits 63..60 of x11 are the number of bits in hash_mask
457 // bits 59..55 of x11 is hash_shift
458
459 lsr x17, x11, #55 // w17 = (hash_shift, ...)
460 lsr w9, w12, w17 // >>= shift
461
462 lsr x17, x11, #60 // w17 = mask_bits
463 mov x11, #0x7fff
464 lsr x11, x11, x17 // p11 = mask (0x7fff >> mask_bits)
465 and x9, x9, x11 // &= mask
466 #else
467 // bits 63..53 of x11 is hash_mask
468 // bits 52..48 of x11 is hash_shift
469 lsr x17, x11, #48 // w17 = (hash_shift, hash_mask)
470 lsr w9, w12, w17 // >>= shift
471 and x9, x9, x11, LSR #53 // &= mask
472 #endif
473
474 ldr x17, [x10, x9, LSL #3] // x17 == sel_offs | (imp_offs << 32)
475 cmp x12, w17, uxtw
476
477 .if \Mode == GETIMP
478 b.ne \MissLabelConstant // cache miss
479 sub x0, x16, x17, LSR #32 // imp = isa - imp_offs
480 SignAsImp x0
481 ret
482 .else
483 b.ne 5f // cache miss
484 sub x17, x16, x17, LSR #32 // imp = isa - imp_offs
485 .if \Mode == NORMAL
486 br x17
487 .elseif \Mode == LOOKUP
488 orr x16, x16, #3 // for instrumentation, note that we hit a constant cache
489 SignAsImp x17
490 ret
491 .else
492 .abort unhandled mode \Mode
493 .endif
494
495 5: ldursw x9, [x10, #-8] // offset -8 is the fallback offset
496 add x16, x16, x9 // compute the fallback isa
497 b LLookupStart\Function // lookup again with a new isa
498 .endif
499 #endif // CONFIG_USE_PREOPT_CACHES
500
501 .endmacro
502
503
504 /********************************************************************
505 *
506 * id objc_msgSend(id self, SEL _cmd, ...);
507 * IMP objc_msgLookup(id self, SEL _cmd, ...);
508 *
509 * objc_msgLookup ABI:
510 * IMP returned in x17
511 * x16 reserved for our use but not used
512 *
513 ********************************************************************/
514
515 #if SUPPORT_TAGGED_POINTERS
516 .data
517 .align 3
518 .globl _objc_debug_taggedpointer_ext_classes
519 _objc_debug_taggedpointer_ext_classes:
520 .fill 256, 8, 0
521
522 // Dispatch for split tagged pointers take advantage of the fact that
523 // the extended tag classes array immediately precedes the standard
524 // tag array. The .alt_entry directive ensures that the two stay
525 // together. This is harmless when using non-split tagged pointers.
526 .globl _objc_debug_taggedpointer_classes
527 .alt_entry _objc_debug_taggedpointer_classes
528 _objc_debug_taggedpointer_classes:
529 .fill 16, 8, 0
530
531 // Look up the class for a tagged pointer in x0, placing it in x16.
532 .macro GetTaggedClass
533
534 and x10, x0, #0x7 // x10 = small tag
535 asr x11, x0, #55 // x11 = large tag with 1s filling the top (because bit 63 is 1 on a tagged pointer)
536 cmp x10, #7 // tag == 7?
537 csel x12, x11, x10, eq // x12 = index in tagged pointer classes array, negative for extended tags.
538 // The extended tag array is placed immediately before the basic tag array
539 // so this looks into the right place either way. The sign extension done
540 // by the asr instruction produces the value extended_tag - 256, which produces
541 // the correct index in the extended tagged pointer classes array.
542
543 // x16 = _objc_debug_taggedpointer_classes[x12]
544 adrp x10, _objc_debug_taggedpointer_classes@PAGE
545 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
546 ldr x16, [x10, x12, LSL #3]
547
548 .endmacro
549 #endif
550
551 ENTRY _objc_msgSend
552 UNWIND _objc_msgSend, NoFrame
553
554 cmp p0, #0 // nil check and tagged pointer check
555 #if SUPPORT_TAGGED_POINTERS
556 b.le LNilOrTagged // (MSB tagged pointer looks negative)
557 #else
558 b.eq LReturnZero
559 #endif
560 ldr p13, [x0] // p13 = isa
561 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
562 LGetIsaDone:
563 // calls imp or objc_msgSend_uncached
564 CacheLookup NORMAL, _objc_msgSend, __objc_msgSend_uncached
565
566 #if SUPPORT_TAGGED_POINTERS
567 LNilOrTagged:
568 b.eq LReturnZero // nil check
569 GetTaggedClass
570 b LGetIsaDone
571 // SUPPORT_TAGGED_POINTERS
572 #endif
573
574 LReturnZero:
575 // x0 is already zero
576 mov x1, #0
577 movi d0, #0
578 movi d1, #0
579 movi d2, #0
580 movi d3, #0
581 ret
582
583 END_ENTRY _objc_msgSend
584
585
586 ENTRY _objc_msgLookup
587 UNWIND _objc_msgLookup, NoFrame
588 cmp p0, #0 // nil check and tagged pointer check
589 #if SUPPORT_TAGGED_POINTERS
590 b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
591 #else
592 b.eq LLookup_Nil
593 #endif
594 ldr p13, [x0] // p13 = isa
595 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
596 LLookup_GetIsaDone:
597 // returns imp
598 CacheLookup LOOKUP, _objc_msgLookup, __objc_msgLookup_uncached
599
600 #if SUPPORT_TAGGED_POINTERS
601 LLookup_NilOrTagged:
602 b.eq LLookup_Nil // nil check
603 GetTaggedClass
604 b LLookup_GetIsaDone
605 // SUPPORT_TAGGED_POINTERS
606 #endif
607
608 LLookup_Nil:
609 adr x17, __objc_msgNil
610 SignAsImp x17
611 ret
612
613 END_ENTRY _objc_msgLookup
614
615
616 STATIC_ENTRY __objc_msgNil
617
618 // x0 is already zero
619 mov x1, #0
620 movi d0, #0
621 movi d1, #0
622 movi d2, #0
623 movi d3, #0
624 ret
625
626 END_ENTRY __objc_msgNil
627
628
629 ENTRY _objc_msgSendSuper
630 UNWIND _objc_msgSendSuper, NoFrame
631
632 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
633 b L_objc_msgSendSuper2_body
634
635 END_ENTRY _objc_msgSendSuper
636
637 // no _objc_msgLookupSuper
638
639 ENTRY _objc_msgSendSuper2
640 UNWIND _objc_msgSendSuper2, NoFrame
641
642 #if __has_feature(ptrauth_calls)
643 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
644 add x17, x17, #SUPERCLASS // x17 = &class->superclass
645 ldr x16, [x17] // x16 = class->superclass
646 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
647 LMsgSendSuperResume:
648 #else
649 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
650 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
651 #endif
652 L_objc_msgSendSuper2_body:
653 CacheLookup NORMAL, _objc_msgSendSuper2, __objc_msgSend_uncached
654
655 END_ENTRY _objc_msgSendSuper2
656
657
658 ENTRY _objc_msgLookupSuper2
659 UNWIND _objc_msgLookupSuper2, NoFrame
660
661 #if __has_feature(ptrauth_calls)
662 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
663 add x17, x17, #SUPERCLASS // x17 = &class->superclass
664 ldr x16, [x17] // x16 = class->superclass
665 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
666 LMsgLookupSuperResume:
667 #else
668 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
669 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
670 #endif
671 CacheLookup LOOKUP, _objc_msgLookupSuper2, __objc_msgLookup_uncached
672
673 END_ENTRY _objc_msgLookupSuper2
674
675
676 .macro MethodTableLookup
677
678 SAVE_REGS MSGSEND
679
680 // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
681 // receiver and selector already in x0 and x1
682 mov x2, x16
683 mov x3, #3
684 bl _lookUpImpOrForward
685
686 // IMP in x0
687 mov x17, x0
688
689 RESTORE_REGS MSGSEND
690
691 .endmacro
692
693 STATIC_ENTRY __objc_msgSend_uncached
694 UNWIND __objc_msgSend_uncached, FrameWithNoSaves
695
696 // THIS IS NOT A CALLABLE C FUNCTION
697 // Out-of-band p15 is the class to search
698
699 MethodTableLookup
700 TailCallFunctionPointer x17
701
702 END_ENTRY __objc_msgSend_uncached
703
704
705 STATIC_ENTRY __objc_msgLookup_uncached
706 UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
707
708 // THIS IS NOT A CALLABLE C FUNCTION
709 // Out-of-band p15 is the class to search
710
711 MethodTableLookup
712 ret
713
714 END_ENTRY __objc_msgLookup_uncached
715
716
717 STATIC_ENTRY _cache_getImp
718
719 GetClassFromIsa_p16 p0, 0
720 CacheLookup GETIMP, _cache_getImp, LGetImpMissDynamic, LGetImpMissConstant
721
722 LGetImpMissDynamic:
723 mov p0, #0
724 ret
725
726 LGetImpMissConstant:
727 mov p0, p2
728 ret
729
730 END_ENTRY _cache_getImp
731
732
733 /********************************************************************
734 *
735 * id _objc_msgForward(id self, SEL _cmd,...);
736 *
737 * _objc_msgForward is the externally-callable
738 * function returned by things like method_getImplementation().
739 * _objc_msgForward_impcache is the function pointer actually stored in
740 * method caches.
741 *
742 ********************************************************************/
743
744 STATIC_ENTRY __objc_msgForward_impcache
745
746 // No stret specialization.
747 b __objc_msgForward
748
749 END_ENTRY __objc_msgForward_impcache
750
751
752 ENTRY __objc_msgForward
753
754 adrp x17, __objc_forward_handler@PAGE
755 ldr p17, [x17, __objc_forward_handler@PAGEOFF]
756 TailCallFunctionPointer x17
757
758 END_ENTRY __objc_msgForward
759
760
761 ENTRY _objc_msgSend_noarg
762 b _objc_msgSend
763 END_ENTRY _objc_msgSend_noarg
764
765 ENTRY _objc_msgSend_debug
766 b _objc_msgSend
767 END_ENTRY _objc_msgSend_debug
768
769 ENTRY _objc_msgSendSuper2_debug
770 b _objc_msgSendSuper2
771 END_ENTRY _objc_msgSendSuper2_debug
772
773
774 ENTRY _method_invoke
775
776 // See if this is a small method.
777 tbnz p1, #0, L_method_invoke_small
778
779 // We can directly load the IMP from big methods.
780 // x1 is method triplet instead of SEL
781 add p16, p1, #METHOD_IMP
782 ldr p17, [x16]
783 ldr p1, [x1, #METHOD_NAME]
784 TailCallMethodListImp x17, x16
785
786 L_method_invoke_small:
787 // Small methods require a call to handle swizzling.
788 SAVE_REGS METHOD_INVOKE
789 mov p0, p1
790 bl __method_getImplementationAndName
791 // ARM64_32 packs both return values into x0, with SEL in the high bits and IMP in the low.
792 // ARM64 just returns them in x0 and x1.
793 mov x17, x0
794 #if __LP64__
795 mov x16, x1
796 #endif
797 RESTORE_REGS METHOD_INVOKE
798 #if __LP64__
799 mov x1, x16
800 #else
801 lsr x1, x17, #32
802 mov w17, w17
803 #endif
804 TailCallFunctionPointer x17
805
806 END_ENTRY _method_invoke
807
808 #endif