]> git.saurik.com Git - apple/objc4.git/blob - runtime/Messengers.subproj/objc-msg-arm64.s
595b03ee309b55b2a0883617e57cdc3c794b8203
[apple/objc4.git] / runtime / Messengers.subproj / objc-msg-arm64.s
1 /*
2 * @APPLE_LICENSE_HEADER_START@
3 *
4 * Copyright (c) 2011 Apple Inc. All Rights Reserved.
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /********************************************************************
24 *
25 * objc-msg-arm64.s - ARM64 code to support objc messaging
26 *
27 ********************************************************************/
28
29 #ifdef __arm64__
30
31 #include <arm/arch.h>
32 #include "isa.h"
33 #include "arm64-asm.h"
34 #include "objc-config.h"
35
36 .data
37
38 // _objc_restartableRanges is used by method dispatch
39 // caching code to figure out whether any threads are actively
40 // in the cache for dispatching. The labels surround the asm code
41 // that do cache lookups. The tables are zero-terminated.
42
43 .macro RestartableEntry
44 #if __LP64__
45 .quad LLookupStart$0
46 #else
47 .long LLookupStart$0
48 .long 0
49 #endif
50 .short LLookupEnd$0 - LLookupStart$0
51 .short LLookupRecover$0 - LLookupStart$0
52 .long 0
53 .endmacro
54
55 .align 4
56 .private_extern _objc_restartableRanges
57 _objc_restartableRanges:
58 RestartableEntry _cache_getImp
59 RestartableEntry _objc_msgSend
60 RestartableEntry _objc_msgSendSuper
61 RestartableEntry _objc_msgSendSuper2
62 RestartableEntry _objc_msgLookup
63 RestartableEntry _objc_msgLookupSuper2
64 .fill 16, 1, 0
65
66
67 /* objc_super parameter to sendSuper */
68 #define RECEIVER 0
69 #define CLASS __SIZEOF_POINTER__
70
71 /* Selected field offsets in class structure */
72 #define SUPERCLASS __SIZEOF_POINTER__
73 #define CACHE (2 * __SIZEOF_POINTER__)
74
75 /* Selected field offsets in method structure */
76 #define METHOD_NAME 0
77 #define METHOD_TYPES __SIZEOF_POINTER__
78 #define METHOD_IMP (2 * __SIZEOF_POINTER__)
79
80 #define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
81
82
83 /********************************************************************
84 * GetClassFromIsa_p16 src
85 * src is a raw isa field. Sets p16 to the corresponding class pointer.
86 * The raw isa might be an indexed isa to be decoded, or a
87 * packed isa that needs to be masked.
88 *
89 * On exit:
90 * $0 is unchanged
91 * p16 is a class pointer
92 * x10 is clobbered
93 ********************************************************************/
94
95 #if SUPPORT_INDEXED_ISA
96 .align 3
97 .globl _objc_indexed_classes
98 _objc_indexed_classes:
99 .fill ISA_INDEX_COUNT, PTRSIZE, 0
100 #endif
101
102 .macro GetClassFromIsa_p16 /* src */
103
104 #if SUPPORT_INDEXED_ISA
105 // Indexed isa
106 mov p16, $0 // optimistically set dst = src
107 tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
108 // isa in p16 is indexed
109 adrp x10, _objc_indexed_classes@PAGE
110 add x10, x10, _objc_indexed_classes@PAGEOFF
111 ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
112 ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
113 1:
114
115 #elif __LP64__
116 // 64-bit packed isa
117 and p16, $0, #ISA_MASK
118
119 #else
120 // 32-bit raw isa
121 mov p16, $0
122
123 #endif
124
125 .endmacro
126
127
128 /********************************************************************
129 * ENTRY functionName
130 * STATIC_ENTRY functionName
131 * END_ENTRY functionName
132 ********************************************************************/
133
134 .macro ENTRY /* name */
135 .text
136 .align 5
137 .globl $0
138 $0:
139 .endmacro
140
141 .macro STATIC_ENTRY /*name*/
142 .text
143 .align 5
144 .private_extern $0
145 $0:
146 .endmacro
147
148 .macro END_ENTRY /* name */
149 LExit$0:
150 .endmacro
151
152
153 /********************************************************************
154 * UNWIND name, flags
155 * Unwind info generation
156 ********************************************************************/
157 .macro UNWIND
158 .section __LD,__compact_unwind,regular,debug
159 PTR $0
160 .set LUnwind$0, LExit$0 - $0
161 .long LUnwind$0
162 .long $1
163 PTR 0 /* no personality */
164 PTR 0 /* no LSDA */
165 .text
166 .endmacro
167
168 #define NoFrame 0x02000000 // no frame, no SP adjustment
169 #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
170
171
172 //////////////////////////////////////////////////////////////////////
173 //
174 // SAVE_REGS
175 //
176 // Create a stack frame and save all argument registers in preparation
177 // for a function call.
178 //////////////////////////////////////////////////////////////////////
179
180 .macro SAVE_REGS
181
182 // push frame
183 SignLR
184 stp fp, lr, [sp, #-16]!
185 mov fp, sp
186
187 // save parameter registers: x0..x8, q0..q7
188 sub sp, sp, #(10*8 + 8*16)
189 stp q0, q1, [sp, #(0*16)]
190 stp q2, q3, [sp, #(2*16)]
191 stp q4, q5, [sp, #(4*16)]
192 stp q6, q7, [sp, #(6*16)]
193 stp x0, x1, [sp, #(8*16+0*8)]
194 stp x2, x3, [sp, #(8*16+2*8)]
195 stp x4, x5, [sp, #(8*16+4*8)]
196 stp x6, x7, [sp, #(8*16+6*8)]
197 str x8, [sp, #(8*16+8*8)]
198
199 .endmacro
200
201
202 //////////////////////////////////////////////////////////////////////
203 //
204 // RESTORE_REGS
205 //
206 // Restore all argument registers and pop the stack frame created by
207 // SAVE_REGS.
208 //////////////////////////////////////////////////////////////////////
209
210 .macro RESTORE_REGS
211
212 ldp q0, q1, [sp, #(0*16)]
213 ldp q2, q3, [sp, #(2*16)]
214 ldp q4, q5, [sp, #(4*16)]
215 ldp q6, q7, [sp, #(6*16)]
216 ldp x0, x1, [sp, #(8*16+0*8)]
217 ldp x2, x3, [sp, #(8*16+2*8)]
218 ldp x4, x5, [sp, #(8*16+4*8)]
219 ldp x6, x7, [sp, #(8*16+6*8)]
220 ldr x8, [sp, #(8*16+8*8)]
221
222 mov sp, fp
223 ldp fp, lr, [sp], #16
224 AuthenticateLR
225
226 .endmacro
227
228
229 /********************************************************************
230 *
231 * CacheLookup NORMAL|GETIMP|LOOKUP <function>
232 *
233 * Locate the implementation for a selector in a class method cache.
234 *
235 * When this is used in a function that doesn't hold the runtime lock,
236 * this represents the critical section that may access dead memory.
237 * If the kernel causes one of these functions to go down the recovery
238 * path, we pretend the lookup failed by jumping the JumpMiss branch.
239 *
240 * Takes:
241 * x1 = selector
242 * x16 = class to be searched
243 *
244 * Kills:
245 * x9,x10,x11,x12, x17
246 *
247 * On exit: (found) calls or returns IMP
248 * with x16 = class, x17 = IMP
249 * (not found) jumps to LCacheMiss
250 *
251 ********************************************************************/
252
253 #define NORMAL 0
254 #define GETIMP 1
255 #define LOOKUP 2
256
257 // CacheHit: x17 = cached IMP, x12 = address of cached IMP, x1 = SEL, x16 = isa
258 .macro CacheHit
259 .if $0 == NORMAL
260 TailCallCachedImp x17, x12, x1, x16 // authenticate and call imp
261 .elseif $0 == GETIMP
262 mov p0, p17
263 cbz p0, 9f // don't ptrauth a nil imp
264 AuthAndResignAsIMP x0, x12, x1, x16 // authenticate imp and re-sign as IMP
265 9: ret // return IMP
266 .elseif $0 == LOOKUP
267 // No nil check for ptrauth: the caller would crash anyway when they
268 // jump to a nil IMP. We don't care if that jump also fails ptrauth.
269 AuthAndResignAsIMP x17, x12, x1, x16 // authenticate imp and re-sign as IMP
270 ret // return imp via x17
271 .else
272 .abort oops
273 .endif
274 .endmacro
275
276 .macro CheckMiss
277 // miss if bucket->sel == 0
278 .if $0 == GETIMP
279 cbz p9, LGetImpMiss
280 .elseif $0 == NORMAL
281 cbz p9, __objc_msgSend_uncached
282 .elseif $0 == LOOKUP
283 cbz p9, __objc_msgLookup_uncached
284 .else
285 .abort oops
286 .endif
287 .endmacro
288
289 .macro JumpMiss
290 .if $0 == GETIMP
291 b LGetImpMiss
292 .elseif $0 == NORMAL
293 b __objc_msgSend_uncached
294 .elseif $0 == LOOKUP
295 b __objc_msgLookup_uncached
296 .else
297 .abort oops
298 .endif
299 .endmacro
300
301 .macro CacheLookup
302 //
303 // Restart protocol:
304 //
305 // As soon as we're past the LLookupStart$1 label we may have loaded
306 // an invalid cache pointer or mask.
307 //
308 // When task_restartable_ranges_synchronize() is called,
309 // (or when a signal hits us) before we're past LLookupEnd$1,
310 // then our PC will be reset to LLookupRecover$1 which forcefully
311 // jumps to the cache-miss codepath which have the following
312 // requirements:
313 //
314 // GETIMP:
315 // The cache-miss is just returning NULL (setting x0 to 0)
316 //
317 // NORMAL and LOOKUP:
318 // - x0 contains the receiver
319 // - x1 contains the selector
320 // - x16 contains the isa
321 // - other registers are set as per calling conventions
322 //
323 LLookupStart$1:
324
325 // p1 = SEL, p16 = isa
326 ldr p11, [x16, #CACHE] // p11 = mask|buckets
327
328 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
329 and p10, p11, #0x0000ffffffffffff // p10 = buckets
330 and p12, p1, p11, LSR #48 // x12 = _cmd & mask
331 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
332 and p10, p11, #~0xf // p10 = buckets
333 and p11, p11, #0xf // p11 = maskShift
334 mov p12, #0xffff
335 lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
336 and p12, p1, p11 // x12 = _cmd & mask
337 #else
338 #error Unsupported cache mask storage for ARM64.
339 #endif
340
341
342 add p12, p10, p12, LSL #(1+PTRSHIFT)
343 // p12 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
344
345 ldp p17, p9, [x12] // {imp, sel} = *bucket
346 1: cmp p9, p1 // if (bucket->sel != _cmd)
347 b.ne 2f // scan more
348 CacheHit $0 // call or return imp
349
350 2: // not hit: p12 = not-hit bucket
351 CheckMiss $0 // miss if bucket->sel == 0
352 cmp p12, p10 // wrap if bucket == buckets
353 b.eq 3f
354 ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
355 b 1b // loop
356
357 3: // wrap: p12 = first bucket, w11 = mask
358 #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
359 add p12, p12, p11, LSR #(48 - (1+PTRSHIFT))
360 // p12 = buckets + (mask << 1+PTRSHIFT)
361 #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
362 add p12, p12, p11, LSL #(1+PTRSHIFT)
363 // p12 = buckets + (mask << 1+PTRSHIFT)
364 #else
365 #error Unsupported cache mask storage for ARM64.
366 #endif
367
368 // Clone scanning loop to miss instead of hang when cache is corrupt.
369 // The slow path may detect any corruption and halt later.
370
371 ldp p17, p9, [x12] // {imp, sel} = *bucket
372 1: cmp p9, p1 // if (bucket->sel != _cmd)
373 b.ne 2f // scan more
374 CacheHit $0 // call or return imp
375
376 2: // not hit: p12 = not-hit bucket
377 CheckMiss $0 // miss if bucket->sel == 0
378 cmp p12, p10 // wrap if bucket == buckets
379 b.eq 3f
380 ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
381 b 1b // loop
382
383 LLookupEnd$1:
384 LLookupRecover$1:
385 3: // double wrap
386 JumpMiss $0
387
388 .endmacro
389
390
391 /********************************************************************
392 *
393 * id objc_msgSend(id self, SEL _cmd, ...);
394 * IMP objc_msgLookup(id self, SEL _cmd, ...);
395 *
396 * objc_msgLookup ABI:
397 * IMP returned in x17
398 * x16 reserved for our use but not used
399 *
400 ********************************************************************/
401
402 #if SUPPORT_TAGGED_POINTERS
403 .data
404 .align 3
405 .globl _objc_debug_taggedpointer_classes
406 _objc_debug_taggedpointer_classes:
407 .fill 16, 8, 0
408 .globl _objc_debug_taggedpointer_ext_classes
409 _objc_debug_taggedpointer_ext_classes:
410 .fill 256, 8, 0
411 #endif
412
413 ENTRY _objc_msgSend
414 UNWIND _objc_msgSend, NoFrame
415
416 cmp p0, #0 // nil check and tagged pointer check
417 #if SUPPORT_TAGGED_POINTERS
418 b.le LNilOrTagged // (MSB tagged pointer looks negative)
419 #else
420 b.eq LReturnZero
421 #endif
422 ldr p13, [x0] // p13 = isa
423 GetClassFromIsa_p16 p13 // p16 = class
424 LGetIsaDone:
425 // calls imp or objc_msgSend_uncached
426 CacheLookup NORMAL, _objc_msgSend
427
428 #if SUPPORT_TAGGED_POINTERS
429 LNilOrTagged:
430 b.eq LReturnZero // nil check
431
432 // tagged
433 adrp x10, _objc_debug_taggedpointer_classes@PAGE
434 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
435 ubfx x11, x0, #60, #4
436 ldr x16, [x10, x11, LSL #3]
437 adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
438 add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
439 cmp x10, x16
440 b.ne LGetIsaDone
441
442 // ext tagged
443 adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
444 add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
445 ubfx x11, x0, #52, #8
446 ldr x16, [x10, x11, LSL #3]
447 b LGetIsaDone
448 // SUPPORT_TAGGED_POINTERS
449 #endif
450
451 LReturnZero:
452 // x0 is already zero
453 mov x1, #0
454 movi d0, #0
455 movi d1, #0
456 movi d2, #0
457 movi d3, #0
458 ret
459
460 END_ENTRY _objc_msgSend
461
462
463 ENTRY _objc_msgLookup
464 UNWIND _objc_msgLookup, NoFrame
465 cmp p0, #0 // nil check and tagged pointer check
466 #if SUPPORT_TAGGED_POINTERS
467 b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
468 #else
469 b.eq LLookup_Nil
470 #endif
471 ldr p13, [x0] // p13 = isa
472 GetClassFromIsa_p16 p13 // p16 = class
473 LLookup_GetIsaDone:
474 // returns imp
475 CacheLookup LOOKUP, _objc_msgLookup
476
477 #if SUPPORT_TAGGED_POINTERS
478 LLookup_NilOrTagged:
479 b.eq LLookup_Nil // nil check
480
481 // tagged
482 adrp x10, _objc_debug_taggedpointer_classes@PAGE
483 add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
484 ubfx x11, x0, #60, #4
485 ldr x16, [x10, x11, LSL #3]
486 adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
487 add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
488 cmp x10, x16
489 b.ne LLookup_GetIsaDone
490
491 LLookup_ExtTag:
492 adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
493 add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
494 ubfx x11, x0, #52, #8
495 ldr x16, [x10, x11, LSL #3]
496 b LLookup_GetIsaDone
497 // SUPPORT_TAGGED_POINTERS
498 #endif
499
500 LLookup_Nil:
501 adrp x17, __objc_msgNil@PAGE
502 add x17, x17, __objc_msgNil@PAGEOFF
503 ret
504
505 END_ENTRY _objc_msgLookup
506
507
508 STATIC_ENTRY __objc_msgNil
509
510 // x0 is already zero
511 mov x1, #0
512 movi d0, #0
513 movi d1, #0
514 movi d2, #0
515 movi d3, #0
516 ret
517
518 END_ENTRY __objc_msgNil
519
520
521 ENTRY _objc_msgSendSuper
522 UNWIND _objc_msgSendSuper, NoFrame
523
524 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
525 // calls imp or objc_msgSend_uncached
526 CacheLookup NORMAL, _objc_msgSendSuper
527
528 END_ENTRY _objc_msgSendSuper
529
530 // no _objc_msgLookupSuper
531
532 ENTRY _objc_msgSendSuper2
533 UNWIND _objc_msgSendSuper2, NoFrame
534
535 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
536 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
537 CacheLookup NORMAL, _objc_msgSendSuper2
538
539 END_ENTRY _objc_msgSendSuper2
540
541
542 ENTRY _objc_msgLookupSuper2
543 UNWIND _objc_msgLookupSuper2, NoFrame
544
545 ldp p0, p16, [x0] // p0 = real receiver, p16 = class
546 ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
547 CacheLookup LOOKUP, _objc_msgLookupSuper2
548
549 END_ENTRY _objc_msgLookupSuper2
550
551
552 .macro MethodTableLookup
553
554 SAVE_REGS
555
556 // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
557 // receiver and selector already in x0 and x1
558 mov x2, x16
559 mov x3, #3
560 bl _lookUpImpOrForward
561
562 // IMP in x0
563 mov x17, x0
564
565 RESTORE_REGS
566
567 .endmacro
568
569 STATIC_ENTRY __objc_msgSend_uncached
570 UNWIND __objc_msgSend_uncached, FrameWithNoSaves
571
572 // THIS IS NOT A CALLABLE C FUNCTION
573 // Out-of-band p16 is the class to search
574
575 MethodTableLookup
576 TailCallFunctionPointer x17
577
578 END_ENTRY __objc_msgSend_uncached
579
580
581 STATIC_ENTRY __objc_msgLookup_uncached
582 UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
583
584 // THIS IS NOT A CALLABLE C FUNCTION
585 // Out-of-band p16 is the class to search
586
587 MethodTableLookup
588 ret
589
590 END_ENTRY __objc_msgLookup_uncached
591
592
593 STATIC_ENTRY _cache_getImp
594
595 GetClassFromIsa_p16 p0
596 CacheLookup GETIMP, _cache_getImp
597
598 LGetImpMiss:
599 mov p0, #0
600 ret
601
602 END_ENTRY _cache_getImp
603
604
605 /********************************************************************
606 *
607 * id _objc_msgForward(id self, SEL _cmd,...);
608 *
609 * _objc_msgForward is the externally-callable
610 * function returned by things like method_getImplementation().
611 * _objc_msgForward_impcache is the function pointer actually stored in
612 * method caches.
613 *
614 ********************************************************************/
615
616 STATIC_ENTRY __objc_msgForward_impcache
617
618 // No stret specialization.
619 b __objc_msgForward
620
621 END_ENTRY __objc_msgForward_impcache
622
623
624 ENTRY __objc_msgForward
625
626 adrp x17, __objc_forward_handler@PAGE
627 ldr p17, [x17, __objc_forward_handler@PAGEOFF]
628 TailCallFunctionPointer x17
629
630 END_ENTRY __objc_msgForward
631
632
633 ENTRY _objc_msgSend_noarg
634 b _objc_msgSend
635 END_ENTRY _objc_msgSend_noarg
636
637 ENTRY _objc_msgSend_debug
638 b _objc_msgSend
639 END_ENTRY _objc_msgSend_debug
640
641 ENTRY _objc_msgSendSuper2_debug
642 b _objc_msgSendSuper2
643 END_ENTRY _objc_msgSendSuper2_debug
644
645
646 ENTRY _method_invoke
647
648 // See if this is a small method.
649 tbnz p1, #0, L_method_invoke_small
650
651 // We can directly load the IMP from big methods.
652 // x1 is method triplet instead of SEL
653 add p16, p1, #METHOD_IMP
654 ldr p17, [x16]
655 ldr p1, [x1, #METHOD_NAME]
656 TailCallMethodListImp x17, x16
657
658 L_method_invoke_small:
659 // Small methods require a call to handle swizzling.
660 SAVE_REGS
661 mov p0, p1
662 bl __method_getImplementationAndName
663 // ARM64_32 packs both return values into x0, with SEL in the high bits and IMP in the low.
664 // ARM64 just returns them in x0 and x1.
665 mov x17, x0
666 #if __LP64__
667 mov x16, x1
668 #endif
669 RESTORE_REGS
670 #if __LP64__
671 mov x1, x16
672 #else
673 lsr x1, x17, #32
674 mov w17, w17
675 #endif
676 TailCallFunctionPointer x17
677
678 END_ENTRY _method_invoke
679
680 #endif