2 * @APPLE_LICENSE_HEADER_START@
4 * Copyright (c) 2011 Apple Inc. All Rights Reserved.
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
23 /********************************************************************
25 * objc-msg-arm64.s - ARM64 code to support objc messaging
27 ********************************************************************/
33 #include "arm64-asm.h"
34 #include "objc-config.h"
38 // _objc_restartableRanges is used by method dispatch
39 // caching code to figure out whether any threads are actively
40 // in the cache for dispatching. The labels surround the asm code
41 // that do cache lookups. The tables are zero-terminated.
43 .macro RestartableEntry
50 .short LLookupEnd$0 - LLookupStart$0
51 .short LLookupRecover$0 - LLookupStart$0
56 .private_extern _objc_restartableRanges
57 _objc_restartableRanges:
58 RestartableEntry _cache_getImp
59 RestartableEntry _objc_msgSend
60 RestartableEntry _objc_msgSendSuper
61 RestartableEntry _objc_msgSendSuper2
62 RestartableEntry _objc_msgLookup
63 RestartableEntry _objc_msgLookupSuper2
67 /* objc_super parameter to sendSuper */
69 #define CLASS __SIZEOF_POINTER__
71 /* Selected field offsets in class structure */
72 #define SUPERCLASS __SIZEOF_POINTER__
73 #define CACHE (2 * __SIZEOF_POINTER__)
75 /* Selected field offsets in method structure */
77 #define METHOD_TYPES __SIZEOF_POINTER__
78 #define METHOD_IMP (2 * __SIZEOF_POINTER__)
80 #define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
83 /********************************************************************
84 * GetClassFromIsa_p16 src
85 * src is a raw isa field. Sets p16 to the corresponding class pointer.
86 * The raw isa might be an indexed isa to be decoded, or a
87 * packed isa that needs to be masked.
91 * p16 is a class pointer
93 ********************************************************************/
95 #if SUPPORT_INDEXED_ISA
97 .globl _objc_indexed_classes
98 _objc_indexed_classes:
99 .fill ISA_INDEX_COUNT, PTRSIZE, 0
102 .macro GetClassFromIsa_p16 /* src */
104 #if SUPPORT_INDEXED_ISA
106 mov p16, $0 // optimistically set dst = src
107 tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
108 // isa in p16 is indexed
109 adrp x10, _objc_indexed_classes@PAGE
110 add x10, x10, _objc_indexed_classes@PAGEOFF
111 ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
112 ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
117 and p16, $0, #ISA_MASK
128 /********************************************************************
130 * STATIC_ENTRY functionName
131 * END_ENTRY functionName
132 ********************************************************************/
134 .macro ENTRY /* name */
141 .macro STATIC_ENTRY /*name*/
148 .macro END_ENTRY /* name */
153 /********************************************************************
155 * Unwind info generation
156 ********************************************************************/
158 .section __LD,__compact_unwind,regular,debug
160 .set LUnwind$0, LExit$0 - $0
163 PTR 0 /* no personality */
168 #define NoFrame 0x02000000 // no frame, no SP adjustment
169 #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
172 /********************************************************************
174 * CacheLookup NORMAL|GETIMP|LOOKUP <function>
176 * Locate the implementation for a selector in a class method cache.
178 * When this is used in a function that doesn't hold the runtime lock,
179 * this represents the critical section that may access dead memory.
180 * If the kernel causes one of these functions to go down the recovery
181 * path, we pretend the lookup failed by jumping the JumpMiss branch.
185 * x16 = class to be searched
188 * x9,x10,x11,x12, x17
190 * On exit: (found) calls or returns IMP
191 * with x16 = class, x17 = IMP
192 * (not found) jumps to LCacheMiss
194 ********************************************************************/
200 // CacheHit: x17 = cached IMP, x12 = address of cached IMP, x1 = SEL, x16 = isa
203 TailCallCachedImp x17, x12, x1, x16 // authenticate and call imp
206 cbz p0, 9f // don't ptrauth a nil imp
207 AuthAndResignAsIMP x0, x12, x1, x16 // authenticate imp and re-sign as IMP
210 // No nil check for ptrauth: the caller would crash anyway when they
211 // jump to a nil IMP. We don't care if that jump also fails ptrauth.
212 AuthAndResignAsIMP x17, x12, x1, x16 // authenticate imp and re-sign as IMP
213 ret // return imp via x17
220 // miss if bucket->sel == 0
224 cbz p9, __objc_msgSend_uncached
226 cbz p9, __objc_msgLookup_uncached
236 b __objc_msgSend_uncached
238 b __objc_msgLookup_uncached
248 // As soon as we're past the LLookupStart$1 label we may have loaded
249 // an invalid cache pointer or mask.
251 // When task_restartable_ranges_synchronize() is called,
252 // (or when a signal hits us) before we're past LLookupEnd$1,
253 // then our PC will be reset to LLookupRecover$1 which forcefully
254 // jumps to the cache-miss codepath which have the following
258 // The cache-miss is just returning NULL (setting x0 to 0)
260 // NORMAL and LOOKUP:
261 // - x0 contains the receiver
262 // - x1 contains the selector
263 // - x16 contains the isa
264 // - other registers are set as per calling conventions
268 // p1 = SEL, p16 = isa
269 ldr p11, [x16, #CACHE] // p11 = mask|buckets
271 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
272 and p10, p11, #0x0000ffffffffffff // p10 = buckets
273 and p12, p1, p11, LSR #48 // x12 = _cmd & mask
274 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
275 and p10, p11, #~0xf // p10 = buckets
276 and p11, p11, #0xf // p11 = maskShift
278 lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
279 and p12, p1, p11 // x12 = _cmd & mask
281 #error Unsupported cache mask storage for ARM64.
285 add p12, p10, p12, LSL #(1+PTRSHIFT)
286 // p12 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
288 ldp p17, p9, [x12] // {imp, sel} = *bucket
289 1: cmp p9, p1 // if (bucket->sel != _cmd)
291 CacheHit $0 // call or return imp
293 2: // not hit: p12 = not-hit bucket
294 CheckMiss $0 // miss if bucket->sel == 0
295 cmp p12, p10 // wrap if bucket == buckets
297 ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
300 3: // wrap: p12 = first bucket, w11 = mask
301 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
302 add p12, p12, p11, LSR #(48 - (1+PTRSHIFT))
303 // p12 = buckets + (mask << 1+PTRSHIFT)
304 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
305 add p12, p12, p11, LSL #(1+PTRSHIFT)
306 // p12 = buckets + (mask << 1+PTRSHIFT)
308 #error Unsupported cache mask storage for ARM64.
311 // Clone scanning loop to miss instead of hang when cache is corrupt.
312 // The slow path may detect any corruption and halt later.
314 ldp p17, p9, [x12] // {imp, sel} = *bucket
315 1: cmp p9, p1 // if (bucket->sel != _cmd)
317 CacheHit $0 // call or return imp
319 2: // not hit: p12 = not-hit bucket
320 CheckMiss $0 // miss if bucket->sel == 0
321 cmp p12, p10 // wrap if bucket == buckets
323 ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
334 /********************************************************************
336 * id objc_msgSend(id self, SEL _cmd, ...);
337 * IMP objc_msgLookup(id self, SEL _cmd, ...);
339 * objc_msgLookup ABI:
340 * IMP returned in x17
341 * x16 reserved for our use but not used
343 ********************************************************************/
345 #if SUPPORT_TAGGED_POINTERS
348 .globl _objc_debug_taggedpointer_classes
349 _objc_debug_taggedpointer_classes:
351 .globl _objc_debug_taggedpointer_ext_classes
352 _objc_debug_taggedpointer_ext_classes:
357 UNWIND _objc_msgSend, NoFrame
359 cmp p0, #0 // nil check and tagged pointer check
360 #if SUPPORT_TAGGED_POINTERS
361 b.le LNilOrTagged // (MSB tagged pointer looks negative)
365 ldr p13, [x0] // p13 = isa
366 GetClassFromIsa_p16 p13 // p16 = class
368 // calls imp or objc_msgSend_uncached
369 CacheLookup NORMAL, _objc_msgSend
371 #if SUPPORT_TAGGED_POINTERS
373 b.eq LReturnZero // nil check
376 adrp x10, _objc_debug_taggedpointer_classes@PAGE
377 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
378 ubfx x11, x0, #60, #4
379 ldr x16, [x10, x11, LSL #3]
380 adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
381 add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
386 adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
387 add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
388 ubfx x11, x0, #52, #8
389 ldr x16, [x10, x11, LSL #3]
391 // SUPPORT_TAGGED_POINTERS
395 // x0 is already zero
403 END_ENTRY _objc_msgSend
406 ENTRY _objc_msgLookup
407 UNWIND _objc_msgLookup, NoFrame
408 cmp p0, #0 // nil check and tagged pointer check
409 #if SUPPORT_TAGGED_POINTERS
410 b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
414 ldr p13, [x0] // p13 = isa
415 GetClassFromIsa_p16 p13 // p16 = class
418 CacheLookup LOOKUP, _objc_msgLookup
420 #if SUPPORT_TAGGED_POINTERS
422 b.eq LLookup_Nil // nil check
425 adrp x10, _objc_debug_taggedpointer_classes@PAGE
426 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
427 ubfx x11, x0, #60, #4
428 ldr x16, [x10, x11, LSL #3]
429 adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
430 add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
432 b.ne LLookup_GetIsaDone
435 adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
436 add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
437 ubfx x11, x0, #52, #8
438 ldr x16, [x10, x11, LSL #3]
440 // SUPPORT_TAGGED_POINTERS
444 adrp x17, __objc_msgNil@PAGE
445 add x17, x17, __objc_msgNil@PAGEOFF
448 END_ENTRY _objc_msgLookup
451 STATIC_ENTRY __objc_msgNil
453 // x0 is already zero
461 END_ENTRY __objc_msgNil
464 ENTRY _objc_msgSendSuper
465 UNWIND _objc_msgSendSuper, NoFrame
467 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
468 // calls imp or objc_msgSend_uncached
469 CacheLookup NORMAL, _objc_msgSendSuper
471 END_ENTRY _objc_msgSendSuper
473 // no _objc_msgLookupSuper
475 ENTRY _objc_msgSendSuper2
476 UNWIND _objc_msgSendSuper2, NoFrame
478 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
479 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
480 CacheLookup NORMAL, _objc_msgSendSuper2
482 END_ENTRY _objc_msgSendSuper2
485 ENTRY _objc_msgLookupSuper2
486 UNWIND _objc_msgLookupSuper2, NoFrame
488 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
489 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
490 CacheLookup LOOKUP, _objc_msgLookupSuper2
492 END_ENTRY _objc_msgLookupSuper2
495 .macro MethodTableLookup
499 stp fp, lr, [sp, #-16]!
502 // save parameter registers: x0..x8, q0..q7
503 sub sp, sp, #(10*8 + 8*16)
504 stp q0, q1, [sp, #(0*16)]
505 stp q2, q3, [sp, #(2*16)]
506 stp q4, q5, [sp, #(4*16)]
507 stp q6, q7, [sp, #(6*16)]
508 stp x0, x1, [sp, #(8*16+0*8)]
509 stp x2, x3, [sp, #(8*16+2*8)]
510 stp x4, x5, [sp, #(8*16+4*8)]
511 stp x6, x7, [sp, #(8*16+6*8)]
512 str x8, [sp, #(8*16+8*8)]
514 // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
515 // receiver and selector already in x0 and x1
518 bl _lookUpImpOrForward
523 // restore registers and return
524 ldp q0, q1, [sp, #(0*16)]
525 ldp q2, q3, [sp, #(2*16)]
526 ldp q4, q5, [sp, #(4*16)]
527 ldp q6, q7, [sp, #(6*16)]
528 ldp x0, x1, [sp, #(8*16+0*8)]
529 ldp x2, x3, [sp, #(8*16+2*8)]
530 ldp x4, x5, [sp, #(8*16+4*8)]
531 ldp x6, x7, [sp, #(8*16+6*8)]
532 ldr x8, [sp, #(8*16+8*8)]
535 ldp fp, lr, [sp], #16
540 STATIC_ENTRY __objc_msgSend_uncached
541 UNWIND __objc_msgSend_uncached, FrameWithNoSaves
543 // THIS IS NOT A CALLABLE C FUNCTION
544 // Out-of-band p16 is the class to search
547 TailCallFunctionPointer x17
549 END_ENTRY __objc_msgSend_uncached
552 STATIC_ENTRY __objc_msgLookup_uncached
553 UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
555 // THIS IS NOT A CALLABLE C FUNCTION
556 // Out-of-band p16 is the class to search
561 END_ENTRY __objc_msgLookup_uncached
564 STATIC_ENTRY _cache_getImp
566 GetClassFromIsa_p16 p0
567 CacheLookup GETIMP, _cache_getImp
573 END_ENTRY _cache_getImp
576 /********************************************************************
578 * id _objc_msgForward(id self, SEL _cmd,...);
580 * _objc_msgForward is the externally-callable
581 * function returned by things like method_getImplementation().
582 * _objc_msgForward_impcache is the function pointer actually stored in
585 ********************************************************************/
587 STATIC_ENTRY __objc_msgForward_impcache
589 // No stret specialization.
592 END_ENTRY __objc_msgForward_impcache
595 ENTRY __objc_msgForward
597 adrp x17, __objc_forward_handler@PAGE
598 ldr p17, [x17, __objc_forward_handler@PAGEOFF]
599 TailCallFunctionPointer x17
601 END_ENTRY __objc_msgForward
604 ENTRY _objc_msgSend_noarg
606 END_ENTRY _objc_msgSend_noarg
608 ENTRY _objc_msgSend_debug
610 END_ENTRY _objc_msgSend_debug
612 ENTRY _objc_msgSendSuper2_debug
613 b _objc_msgSendSuper2
614 END_ENTRY _objc_msgSendSuper2_debug
618 // x1 is method triplet instead of SEL
619 add p16, p1, #METHOD_IMP
621 ldr p1, [x1, #METHOD_NAME]
622 TailCallMethodListImp x17, x16
623 END_ENTRY _method_invoke