]> git.saurik.com Git - apple/objc4.git/blame - runtime/Messengers.subproj/objc-msg-arm64.s
objc4-818.2.tar.gz
[apple/objc4.git] / runtime / Messengers.subproj / objc-msg-arm64.s
CommitLineData
8070259c
A
1/*
2 * @APPLE_LICENSE_HEADER_START@
3 *
4 * Copyright (c) 2011 Apple Inc. All Rights Reserved.
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23/********************************************************************
24 *
25 * objc-msg-arm64.s - ARM64 code to support objc messaging
26 *
27 ********************************************************************/
28
29#ifdef __arm64__
8070259c 30
66799735
A
31#include <arm/arch.h>
32#include "isa.h"
1807f628 33#include "objc-config.h"
34d5b5e8
A
34#include "arm64-asm.h"
35
36#if TARGET_OS_IPHONE && __LP64__
37 .section __TEXT,__objc_methname,cstring_literals
38l_MagicSelector: /* the shared cache builder knows about this value */
39 .byte 0xf0, 0x9f, 0xa4, 0xaf, 0
40
41 .section __DATA,__objc_selrefs,literal_pointers,no_dead_strip
42 .p2align 3
43_MagicSelRef:
44 .quad l_MagicSelector
45#endif
8070259c 46
31875a97
A
47.data
48
1807f628 49// _objc_restartableRanges is used by method dispatch
8070259c
A
50// caching code to figure out whether any threads are actively
51// in the cache for dispatching. The labels surround the asm code
52// that do cache lookups. The tables are zero-terminated.
31875a97 53
1807f628
A
54.macro RestartableEntry
55#if __LP64__
56 .quad LLookupStart$0
57#else
58 .long LLookupStart$0
59 .long 0
60#endif
61 .short LLookupEnd$0 - LLookupStart$0
62 .short LLookupRecover$0 - LLookupStart$0
63 .long 0
64.endmacro
65
66 .align 4
67 .private_extern _objc_restartableRanges
68_objc_restartableRanges:
69 RestartableEntry _cache_getImp
70 RestartableEntry _objc_msgSend
1807f628
A
71 RestartableEntry _objc_msgSendSuper2
72 RestartableEntry _objc_msgLookup
73 RestartableEntry _objc_msgLookupSuper2
74 .fill 16, 1, 0
8070259c
A
75
76
77/* objc_super parameter to sendSuper */
78#define RECEIVER 0
66799735 79#define CLASS __SIZEOF_POINTER__
8070259c
A
80
81/* Selected field offsets in class structure */
66799735
A
82#define SUPERCLASS __SIZEOF_POINTER__
83#define CACHE (2 * __SIZEOF_POINTER__)
8070259c
A
84
85/* Selected field offsets in method structure */
86#define METHOD_NAME 0
66799735
A
87#define METHOD_TYPES __SIZEOF_POINTER__
88#define METHOD_IMP (2 * __SIZEOF_POINTER__)
89
90#define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
91
92
93/********************************************************************
34d5b5e8 94 * GetClassFromIsa_p16 src, needs_auth, auth_address
66799735
A
95 * src is a raw isa field. Sets p16 to the corresponding class pointer.
96 * The raw isa might be an indexed isa to be decoded, or a
97 * packed isa that needs to be masked.
98 *
99 * On exit:
34d5b5e8 100 * src is unchanged
66799735
A
101 * p16 is a class pointer
102 * x10 is clobbered
103 ********************************************************************/
104
105#if SUPPORT_INDEXED_ISA
106 .align 3
107 .globl _objc_indexed_classes
108_objc_indexed_classes:
109 .fill ISA_INDEX_COUNT, PTRSIZE, 0
110#endif
111
34d5b5e8 112.macro GetClassFromIsa_p16 src, needs_auth, auth_address /* note: auth_address is not required if !needs_auth */
66799735
A
113
114#if SUPPORT_INDEXED_ISA
115 // Indexed isa
34d5b5e8 116 mov p16, \src // optimistically set dst = src
66799735
A
117 tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
118 // isa in p16 is indexed
119 adrp x10, _objc_indexed_classes@PAGE
120 add x10, x10, _objc_indexed_classes@PAGEOFF
121 ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
122 ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
1231:
124
125#elif __LP64__
34d5b5e8
A
126.if \needs_auth == 0 // _cache_getImp takes an authed class already
127 mov p16, \src
128.else
66799735 129 // 64-bit packed isa
34d5b5e8
A
130 ExtractISA p16, \src, \auth_address
131.endif
66799735
A
132#else
133 // 32-bit raw isa
34d5b5e8 134 mov p16, \src
66799735
A
135
136#endif
137
138.endmacro
8070259c
A
139
140
141/********************************************************************
142 * ENTRY functionName
143 * STATIC_ENTRY functionName
144 * END_ENTRY functionName
145 ********************************************************************/
146
147.macro ENTRY /* name */
148 .text
149 .align 5
150 .globl $0
151$0:
152.endmacro
153
154.macro STATIC_ENTRY /*name*/
155 .text
156 .align 5
157 .private_extern $0
158$0:
159.endmacro
160
161.macro END_ENTRY /* name */
162LExit$0:
163.endmacro
164
165
c1e772c4
A
166/********************************************************************
167 * UNWIND name, flags
168 * Unwind info generation
169 ********************************************************************/
170.macro UNWIND
171 .section __LD,__compact_unwind,regular,debug
66799735 172 PTR $0
c1e772c4
A
173 .set LUnwind$0, LExit$0 - $0
174 .long LUnwind$0
175 .long $1
66799735
A
176 PTR 0 /* no personality */
177 PTR 0 /* no LSDA */
c1e772c4
A
178 .text
179.endmacro
180
181#define NoFrame 0x02000000 // no frame, no SP adjustment
182#define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
183
184
34d5b5e8
A
185#define MSGSEND 100
186#define METHOD_INVOKE 101
187
bc4fafce
A
188//////////////////////////////////////////////////////////////////////
189//
190// SAVE_REGS
191//
192// Create a stack frame and save all argument registers in preparation
193// for a function call.
194//////////////////////////////////////////////////////////////////////
195
34d5b5e8 196.macro SAVE_REGS kind
bc4fafce
A
197
198 // push frame
199 SignLR
200 stp fp, lr, [sp, #-16]!
201 mov fp, sp
202
203 // save parameter registers: x0..x8, q0..q7
34d5b5e8
A
204 sub sp, sp, #(10*8 + 8*16)
205 stp q0, q1, [sp, #(0*16)]
206 stp q2, q3, [sp, #(2*16)]
207 stp q4, q5, [sp, #(4*16)]
208 stp q6, q7, [sp, #(6*16)]
209 stp x0, x1, [sp, #(8*16+0*8)]
210 stp x2, x3, [sp, #(8*16+2*8)]
211 stp x4, x5, [sp, #(8*16+4*8)]
212 stp x6, x7, [sp, #(8*16+6*8)]
213.if \kind == MSGSEND
214 stp x8, x15, [sp, #(8*16+8*8)]
215 mov x16, x15 // stashed by CacheLookup, restore to x16
216.elseif \kind == METHOD_INVOKE
217 str x8, [sp, #(8*16+8*8)]
218.else
219.abort Unknown kind.
220.endif
bc4fafce
A
221
222.endmacro
223
224
225//////////////////////////////////////////////////////////////////////
226//
227// RESTORE_REGS
228//
229// Restore all argument registers and pop the stack frame created by
230// SAVE_REGS.
231//////////////////////////////////////////////////////////////////////
232
34d5b5e8
A
233.macro RESTORE_REGS kind
234
235 ldp q0, q1, [sp, #(0*16)]
236 ldp q2, q3, [sp, #(2*16)]
237 ldp q4, q5, [sp, #(4*16)]
238 ldp q6, q7, [sp, #(6*16)]
239 ldp x0, x1, [sp, #(8*16+0*8)]
240 ldp x2, x3, [sp, #(8*16+2*8)]
241 ldp x4, x5, [sp, #(8*16+4*8)]
242 ldp x6, x7, [sp, #(8*16+6*8)]
243.if \kind == MSGSEND
244 ldp x8, x16, [sp, #(8*16+8*8)]
245 orr x16, x16, #2 // for the sake of instrumentations, remember it was the slowpath
246.elseif \kind == METHOD_INVOKE
247 ldr x8, [sp, #(8*16+8*8)]
248.else
249.abort Unknown kind.
250.endif
bc4fafce
A
251
252 mov sp, fp
253 ldp fp, lr, [sp], #16
254 AuthenticateLR
255
256.endmacro
257
258
8070259c
A
259/********************************************************************
260 *
34d5b5e8
A
261 * CacheLookup NORMAL|GETIMP|LOOKUP <function> MissLabelDynamic MissLabelConstant
262 *
263 * MissLabelConstant is only used for the GETIMP variant.
1807f628 264 *
8070259c
A
265 * Locate the implementation for a selector in a class method cache.
266 *
1807f628
A
267 * When this is used in a function that doesn't hold the runtime lock,
268 * this represents the critical section that may access dead memory.
269 * If the kernel causes one of these functions to go down the recovery
270 * path, we pretend the lookup failed by jumping the JumpMiss branch.
271 *
8070259c
A
272 * Takes:
273 * x1 = selector
c1e772c4 274 * x16 = class to be searched
8070259c
A
275 *
276 * Kills:
34d5b5e8
A
277 * x9,x10,x11,x12,x13,x15,x17
278 *
279 * Untouched:
280 * x14
8070259c 281 *
c1e772c4
A
282 * On exit: (found) calls or returns IMP
283 * with x16 = class, x17 = IMP
34d5b5e8
A
284 * In LOOKUP mode, the two low bits are set to 0x3
285 * if we hit a constant cache (used in objc_trace)
8070259c 286 * (not found) jumps to LCacheMiss
34d5b5e8
A
287 * with x15 = class
288 * For constant caches in LOOKUP mode, the low bit
289 * of x16 is set to 0x1 to indicate we had to fallback.
290 * In addition, when LCacheMiss is __objc_msgSend_uncached or
291 * __objc_msgLookup_uncached, 0x2 will be set in x16
292 * to remember we took the slowpath.
293 * So the two low bits of x16 on exit mean:
294 * 0: dynamic hit
295 * 1: fallback to the parent class, when there is a preoptimized cache
296 * 2: slowpath
297 * 3: preoptimized cache hit
8070259c
A
298 *
299 ********************************************************************/
300
301#define NORMAL 0
302#define GETIMP 1
c1e772c4 303#define LOOKUP 2
8070259c 304
34d5b5e8 305// CacheHit: x17 = cached IMP, x10 = address of buckets, x1 = SEL, x16 = isa
8070259c 306.macro CacheHit
8070259c 307.if $0 == NORMAL
34d5b5e8 308 TailCallCachedImp x17, x10, x1, x16 // authenticate and call imp
c1e772c4 309.elseif $0 == GETIMP
66799735 310 mov p0, p17
13ba007e 311 cbz p0, 9f // don't ptrauth a nil imp
34d5b5e8 312 AuthAndResignAsIMP x0, x10, x1, x16 // authenticate imp and re-sign as IMP
13ba007e 3139: ret // return IMP
c1e772c4 314.elseif $0 == LOOKUP
13ba007e
A
315 // No nil check for ptrauth: the caller would crash anyway when they
316 // jump to a nil IMP. We don't care if that jump also fails ptrauth.
34d5b5e8
A
317 AuthAndResignAsIMP x17, x10, x1, x16 // authenticate imp and re-sign as IMP
318 cmp x16, x15
319 cinc x16, x16, ne // x16 += 1 when x15 != x16 (for instrumentation ; fallback to the parent class)
c1e772c4 320 ret // return imp via x17
8070259c 321.else
c1e772c4 322.abort oops
8070259c
A
323.endif
324.endmacro
325
34d5b5e8 326.macro CacheLookup Mode, Function, MissLabelDynamic, MissLabelConstant
1807f628
A
327 //
328 // Restart protocol:
329 //
34d5b5e8
A
330 // As soon as we're past the LLookupStart\Function label we may have
331 // loaded an invalid cache pointer or mask.
1807f628
A
332 //
333 // When task_restartable_ranges_synchronize() is called,
34d5b5e8
A
334 // (or when a signal hits us) before we're past LLookupEnd\Function,
335 // then our PC will be reset to LLookupRecover\Function which forcefully
1807f628
A
336 // jumps to the cache-miss codepath which have the following
337 // requirements:
338 //
339 // GETIMP:
340 // The cache-miss is just returning NULL (setting x0 to 0)
341 //
342 // NORMAL and LOOKUP:
343 // - x0 contains the receiver
344 // - x1 contains the selector
345 // - x16 contains the isa
346 // - other registers are set as per calling conventions
347 //
1807f628 348
34d5b5e8
A
349 mov x15, x16 // stash the original isa
350LLookupStart\Function:
66799735 351 // p1 = SEL, p16 = isa
34d5b5e8
A
352#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
353 ldr p10, [x16, #CACHE] // p10 = mask|buckets
354 lsr p11, p10, #48 // p11 = mask
355 and p10, p10, #0xffffffffffff // p10 = buckets
356 and w12, w1, w11 // x12 = _cmd & mask
357#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
358 ldr p11, [x16, #CACHE] // p11 = mask|buckets
359#if CONFIG_USE_PREOPT_CACHES
360#if __has_feature(ptrauth_calls)
361 tbnz p11, #0, LLookupPreopt\Function
362 and p10, p11, #0x0000ffffffffffff // p10 = buckets
363#else
364 and p10, p11, #0x0000fffffffffffe // p10 = buckets
365 tbnz p11, #0, LLookupPreopt\Function
366#endif
367 eor p12, p1, p1, LSR #7
368 and p12, p12, p11, LSR #48 // x12 = (_cmd ^ (_cmd >> 7)) & mask
369#else
1807f628
A
370 and p10, p11, #0x0000ffffffffffff // p10 = buckets
371 and p12, p1, p11, LSR #48 // x12 = _cmd & mask
34d5b5e8 372#endif // CONFIG_USE_PREOPT_CACHES
1807f628 373#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
34d5b5e8 374 ldr p11, [x16, #CACHE] // p11 = mask|buckets
1807f628
A
375 and p10, p11, #~0xf // p10 = buckets
376 and p11, p11, #0xf // p11 = maskShift
377 mov p12, #0xffff
34d5b5e8
A
378 lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
379 and p12, p1, p11 // x12 = _cmd & mask
1807f628
A
380#else
381#error Unsupported cache mask storage for ARM64.
66799735 382#endif
1807f628 383
34d5b5e8
A
384 add p13, p10, p12, LSL #(1+PTRSHIFT)
385 // p13 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
386
387 // do {
3881: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
389 cmp p9, p1 // if (sel != _cmd) {
390 b.ne 3f // scan more
391 // } else {
3922: CacheHit \Mode // hit: call or return imp
393 // }
3943: cbz p9, \MissLabelDynamic // if (sel == 0) goto Miss;
395 cmp p13, p10 // } while (bucket >= buckets)
396 b.hs 1b
397
398 // wrap-around:
399 // p10 = first bucket
400 // p11 = mask (and maybe other bits on LP64)
401 // p12 = _cmd & mask
402 //
403 // A full cache can happen with CACHE_ALLOW_FULL_UTILIZATION.
404 // So stop when we circle back to the first probed bucket
405 // rather than when hitting the first bucket again.
406 //
407 // Note that we might probe the initial bucket twice
408 // when the first probed slot is the last entry.
1807f628 409
8070259c 410
34d5b5e8
A
411#if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16_BIG_ADDRS
412 add p13, p10, w11, UXTW #(1+PTRSHIFT)
413 // p13 = buckets + (mask << 1+PTRSHIFT)
414#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
415 add p13, p10, p11, LSR #(48 - (1+PTRSHIFT))
416 // p13 = buckets + (mask << 1+PTRSHIFT)
417 // see comment about maskZeroBits
1807f628 418#elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
34d5b5e8
A
419 add p13, p10, p11, LSL #(1+PTRSHIFT)
420 // p13 = buckets + (mask << 1+PTRSHIFT)
1807f628
A
421#else
422#error Unsupported cache mask storage for ARM64.
34d5b5e8
A
423#endif
424 add p12, p10, p12, LSL #(1+PTRSHIFT)
425 // p12 = first probed bucket
426
427 // do {
4284: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
429 cmp p9, p1 // if (sel == _cmd)
430 b.eq 2b // goto hit
431 cmp p9, #0 // } while (sel != 0 &&
432 ccmp p13, p12, #0, ne // bucket > first_probed)
433 b.hi 4b
434
435LLookupEnd\Function:
436LLookupRecover\Function:
437 b \MissLabelDynamic
438
439#if CONFIG_USE_PREOPT_CACHES
440#if CACHE_MASK_STORAGE != CACHE_MASK_STORAGE_HIGH_16
441#error config unsupported
442#endif
443LLookupPreopt\Function:
444#if __has_feature(ptrauth_calls)
445 and p10, p11, #0x007ffffffffffffe // p10 = buckets
446 autdb x10, x16 // auth as early as possible
1807f628 447#endif
8070259c 448
34d5b5e8
A
449 // x12 = (_cmd - first_shared_cache_sel)
450 adrp x9, _MagicSelRef@PAGE
451 ldr p9, [x9, _MagicSelRef@PAGEOFF]
452 sub p12, p1, p9
8070259c 453
34d5b5e8
A
454 // w9 = ((_cmd - first_shared_cache_sel) >> hash_shift & hash_mask)
455#if __has_feature(ptrauth_calls)
456 // bits 63..60 of x11 are the number of bits in hash_mask
457 // bits 59..55 of x11 is hash_shift
458
459 lsr x17, x11, #55 // w17 = (hash_shift, ...)
460 lsr w9, w12, w17 // >>= shift
461
462 lsr x17, x11, #60 // w17 = mask_bits
463 mov x11, #0x7fff
464 lsr x11, x11, x17 // p11 = mask (0x7fff >> mask_bits)
465 and x9, x9, x11 // &= mask
466#else
467 // bits 63..53 of x11 is hash_mask
468 // bits 52..48 of x11 is hash_shift
469 lsr x17, x11, #48 // w17 = (hash_shift, hash_mask)
470 lsr w9, w12, w17 // >>= shift
471 and x9, x9, x11, LSR #53 // &= mask
472#endif
473
474 ldr x17, [x10, x9, LSL #3] // x17 == sel_offs | (imp_offs << 32)
475 cmp x12, w17, uxtw
476
477.if \Mode == GETIMP
478 b.ne \MissLabelConstant // cache miss
479 sub x0, x16, x17, LSR #32 // imp = isa - imp_offs
480 SignAsImp x0
481 ret
482.else
483 b.ne 5f // cache miss
484 sub x17, x16, x17, LSR #32 // imp = isa - imp_offs
485.if \Mode == NORMAL
486 br x17
487.elseif \Mode == LOOKUP
488 orr x16, x16, #3 // for instrumentation, note that we hit a constant cache
489 SignAsImp x17
490 ret
491.else
492.abort unhandled mode \Mode
493.endif
494
4955: ldursw x9, [x10, #-8] // offset -8 is the fallback offset
496 add x16, x16, x9 // compute the fallback isa
497 b LLookupStart\Function // lookup again with a new isa
498.endif
499#endif // CONFIG_USE_PREOPT_CACHES
1807f628 500
8070259c
A
501.endmacro
502
503
c1e772c4
A
504/********************************************************************
505 *
506 * id objc_msgSend(id self, SEL _cmd, ...);
507 * IMP objc_msgLookup(id self, SEL _cmd, ...);
508 *
509 * objc_msgLookup ABI:
510 * IMP returned in x17
511 * x16 reserved for our use but not used
512 *
513 ********************************************************************/
514
66799735 515#if SUPPORT_TAGGED_POINTERS
8070259c
A
516 .data
517 .align 3
c1e772c4
A
518 .globl _objc_debug_taggedpointer_ext_classes
519_objc_debug_taggedpointer_ext_classes:
520 .fill 256, 8, 0
34d5b5e8
A
521
522// Dispatch for split tagged pointers take advantage of the fact that
523// the extended tag classes array immediately precedes the standard
524// tag array. The .alt_entry directive ensures that the two stay
525// together. This is harmless when using non-split tagged pointers.
526 .globl _objc_debug_taggedpointer_classes
527 .alt_entry _objc_debug_taggedpointer_classes
528_objc_debug_taggedpointer_classes:
529 .fill 16, 8, 0
530
531// Look up the class for a tagged pointer in x0, placing it in x16.
532.macro GetTaggedClass
533
534 and x10, x0, #0x7 // x10 = small tag
535 asr x11, x0, #55 // x11 = large tag with 1s filling the top (because bit 63 is 1 on a tagged pointer)
536 cmp x10, #7 // tag == 7?
537 csel x12, x11, x10, eq // x12 = index in tagged pointer classes array, negative for extended tags.
538 // The extended tag array is placed immediately before the basic tag array
539 // so this looks into the right place either way. The sign extension done
540 // by the asr instruction produces the value extended_tag - 256, which produces
541 // the correct index in the extended tagged pointer classes array.
542
543 // x16 = _objc_debug_taggedpointer_classes[x12]
544 adrp x10, _objc_debug_taggedpointer_classes@PAGE
545 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
546 ldr x16, [x10, x12, LSL #3]
547
548.endmacro
66799735 549#endif
8070259c
A
550
551 ENTRY _objc_msgSend
c1e772c4 552 UNWIND _objc_msgSend, NoFrame
8070259c 553
66799735
A
554 cmp p0, #0 // nil check and tagged pointer check
555#if SUPPORT_TAGGED_POINTERS
8070259c 556 b.le LNilOrTagged // (MSB tagged pointer looks negative)
66799735
A
557#else
558 b.eq LReturnZero
559#endif
560 ldr p13, [x0] // p13 = isa
34d5b5e8 561 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
8070259c 562LGetIsaDone:
1807f628 563 // calls imp or objc_msgSend_uncached
34d5b5e8 564 CacheLookup NORMAL, _objc_msgSend, __objc_msgSend_uncached
8070259c 565
66799735 566#if SUPPORT_TAGGED_POINTERS
8070259c
A
567LNilOrTagged:
568 b.eq LReturnZero // nil check
34d5b5e8 569 GetTaggedClass
c1e772c4 570 b LGetIsaDone
66799735
A
571// SUPPORT_TAGGED_POINTERS
572#endif
573
8070259c
A
574LReturnZero:
575 // x0 is already zero
576 mov x1, #0
577 movi d0, #0
578 movi d1, #0
579 movi d2, #0
580 movi d3, #0
8070259c
A
581 ret
582
583 END_ENTRY _objc_msgSend
584
585
c1e772c4
A
586 ENTRY _objc_msgLookup
587 UNWIND _objc_msgLookup, NoFrame
66799735
A
588 cmp p0, #0 // nil check and tagged pointer check
589#if SUPPORT_TAGGED_POINTERS
c1e772c4 590 b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
66799735
A
591#else
592 b.eq LLookup_Nil
593#endif
594 ldr p13, [x0] // p13 = isa
34d5b5e8 595 GetClassFromIsa_p16 p13, 1, x0 // p16 = class
c1e772c4 596LLookup_GetIsaDone:
1807f628 597 // returns imp
34d5b5e8 598 CacheLookup LOOKUP, _objc_msgLookup, __objc_msgLookup_uncached
c1e772c4 599
66799735 600#if SUPPORT_TAGGED_POINTERS
c1e772c4
A
601LLookup_NilOrTagged:
602 b.eq LLookup_Nil // nil check
34d5b5e8 603 GetTaggedClass
c1e772c4 604 b LLookup_GetIsaDone
66799735
A
605// SUPPORT_TAGGED_POINTERS
606#endif
c1e772c4
A
607
608LLookup_Nil:
34d5b5e8
A
609 adr x17, __objc_msgNil
610 SignAsImp x17
c1e772c4
A
611 ret
612
613 END_ENTRY _objc_msgLookup
614
615
616 STATIC_ENTRY __objc_msgNil
617
618 // x0 is already zero
619 mov x1, #0
620 movi d0, #0
621 movi d1, #0
622 movi d2, #0
623 movi d3, #0
624 ret
625
626 END_ENTRY __objc_msgNil
627
628
8070259c 629 ENTRY _objc_msgSendSuper
c1e772c4 630 UNWIND _objc_msgSendSuper, NoFrame
8070259c 631
66799735 632 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
34d5b5e8 633 b L_objc_msgSendSuper2_body
8070259c
A
634
635 END_ENTRY _objc_msgSendSuper
636
c1e772c4
A
637 // no _objc_msgLookupSuper
638
8070259c 639 ENTRY _objc_msgSendSuper2
c1e772c4 640 UNWIND _objc_msgSendSuper2, NoFrame
8070259c 641
34d5b5e8
A
642#if __has_feature(ptrauth_calls)
643 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
644 add x17, x17, #SUPERCLASS // x17 = &class->superclass
645 ldr x16, [x17] // x16 = class->superclass
646 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
647LMsgSendSuperResume:
648#else
66799735
A
649 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
650 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
34d5b5e8
A
651#endif
652L_objc_msgSendSuper2_body:
653 CacheLookup NORMAL, _objc_msgSendSuper2, __objc_msgSend_uncached
8070259c
A
654
655 END_ENTRY _objc_msgSendSuper2
656
c1e772c4
A
657
658 ENTRY _objc_msgLookupSuper2
659 UNWIND _objc_msgLookupSuper2, NoFrame
8070259c 660
34d5b5e8
A
661#if __has_feature(ptrauth_calls)
662 ldp x0, x17, [x0] // x0 = real receiver, x17 = class
663 add x17, x17, #SUPERCLASS // x17 = &class->superclass
664 ldr x16, [x17] // x16 = class->superclass
665 AuthISASuper x16, x17, ISA_SIGNING_DISCRIMINATOR_CLASS_SUPERCLASS
666LMsgLookupSuperResume:
667#else
66799735
A
668 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
669 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
34d5b5e8
A
670#endif
671 CacheLookup LOOKUP, _objc_msgLookupSuper2, __objc_msgLookup_uncached
8070259c 672
c1e772c4 673 END_ENTRY _objc_msgLookupSuper2
8070259c 674
8070259c 675
c1e772c4 676.macro MethodTableLookup
8070259c 677
34d5b5e8 678 SAVE_REGS MSGSEND
8070259c 679
1807f628 680 // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
8070259c 681 // receiver and selector already in x0 and x1
c1e772c4 682 mov x2, x16
1807f628
A
683 mov x3, #3
684 bl _lookUpImpOrForward
8070259c 685
66799735 686 // IMP in x0
8070259c 687 mov x17, x0
bc4fafce 688
34d5b5e8 689 RESTORE_REGS MSGSEND
c1e772c4
A
690
691.endmacro
692
693 STATIC_ENTRY __objc_msgSend_uncached
694 UNWIND __objc_msgSend_uncached, FrameWithNoSaves
695
696 // THIS IS NOT A CALLABLE C FUNCTION
34d5b5e8 697 // Out-of-band p15 is the class to search
8070259c 698
c1e772c4 699 MethodTableLookup
66799735 700 TailCallFunctionPointer x17
8070259c 701
c1e772c4
A
702 END_ENTRY __objc_msgSend_uncached
703
704
705 STATIC_ENTRY __objc_msgLookup_uncached
706 UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
707
708 // THIS IS NOT A CALLABLE C FUNCTION
34d5b5e8 709 // Out-of-band p15 is the class to search
c1e772c4
A
710
711 MethodTableLookup
712 ret
713
714 END_ENTRY __objc_msgLookup_uncached
8070259c
A
715
716
717 STATIC_ENTRY _cache_getImp
718
34d5b5e8
A
719 GetClassFromIsa_p16 p0, 0
720 CacheLookup GETIMP, _cache_getImp, LGetImpMissDynamic, LGetImpMissConstant
8070259c 721
34d5b5e8 722LGetImpMissDynamic:
66799735 723 mov p0, #0
8070259c
A
724 ret
725
34d5b5e8
A
726LGetImpMissConstant:
727 mov p0, p2
728 ret
729
8070259c
A
730 END_ENTRY _cache_getImp
731
732
733/********************************************************************
734*
735* id _objc_msgForward(id self, SEL _cmd,...);
736*
737* _objc_msgForward is the externally-callable
738* function returned by things like method_getImplementation().
739* _objc_msgForward_impcache is the function pointer actually stored in
740* method caches.
741*
742********************************************************************/
743
744 STATIC_ENTRY __objc_msgForward_impcache
745
8070259c
A
746 // No stret specialization.
747 b __objc_msgForward
748
749 END_ENTRY __objc_msgForward_impcache
750
751
752 ENTRY __objc_msgForward
753
754 adrp x17, __objc_forward_handler@PAGE
66799735
A
755 ldr p17, [x17, __objc_forward_handler@PAGEOFF]
756 TailCallFunctionPointer x17
8070259c
A
757
758 END_ENTRY __objc_msgForward
759
760
c1e772c4
A
761 ENTRY _objc_msgSend_noarg
762 b _objc_msgSend
763 END_ENTRY _objc_msgSend_noarg
764
8070259c
A
765 ENTRY _objc_msgSend_debug
766 b _objc_msgSend
767 END_ENTRY _objc_msgSend_debug
768
769 ENTRY _objc_msgSendSuper2_debug
770 b _objc_msgSendSuper2
771 END_ENTRY _objc_msgSendSuper2_debug
772
773
774 ENTRY _method_invoke
bc4fafce
A
775
776 // See if this is a small method.
777 tbnz p1, #0, L_method_invoke_small
778
779 // We can directly load the IMP from big methods.
8070259c 780 // x1 is method triplet instead of SEL
66799735
A
781 add p16, p1, #METHOD_IMP
782 ldr p17, [x16]
783 ldr p1, [x1, #METHOD_NAME]
784 TailCallMethodListImp x17, x16
bc4fafce
A
785
786L_method_invoke_small:
787 // Small methods require a call to handle swizzling.
34d5b5e8 788 SAVE_REGS METHOD_INVOKE
bc4fafce
A
789 mov p0, p1
790 bl __method_getImplementationAndName
791 // ARM64_32 packs both return values into x0, with SEL in the high bits and IMP in the low.
792 // ARM64 just returns them in x0 and x1.
793 mov x17, x0
794#if __LP64__
795 mov x16, x1
796#endif
34d5b5e8 797 RESTORE_REGS METHOD_INVOKE
bc4fafce
A
798#if __LP64__
799 mov x1, x16
800#else
801 lsr x1, x17, #32
802 mov w17, w17
803#endif
804 TailCallFunctionPointer x17
805
8070259c
A
806 END_ENTRY _method_invoke
807
8070259c 808#endif