*
* @APPLE_LICENSE_HEADER_END@
*/
+
+#ifdef __x86_64__
+
/********************************************************************
********************************************************************
**
#define __OBJC2__ 1
-#undef OBJC_ASM
-#define OBJC_ASM
-#include "objc-rtp.h"
-
-
/********************************************************************
* Data used by the ObjC runtime.
*
// Substitute receiver for messages sent to nil (usually also nil)
// id _objc_nilReceiver
.align 4
-.globl __objc_nilReceiver
+.private_extern __objc_nilReceiver
__objc_nilReceiver:
.quad 0
// to get the critical regions for which method caches
// cannot be garbage collected.
-.globl _objc_entryPoints
+.private_extern _objc_entryPoints
_objc_entryPoints:
.quad __cache_getImp
.quad __cache_getMethod
.quad _objc_msgSend_stret
.quad _objc_msgSendSuper
.quad _objc_msgSendSuper_stret
+ .quad _objc_msgSendSuper2
+ .quad _objc_msgSendSuper2_stret
.quad 0
-.globl _objc_exitPoints
+.private_extern _objc_exitPoints
_objc_exitPoints:
- .quad LGetImpExit
- .quad LGetMethodExit
- .quad LMsgSendExit
- .quad LMsgSendFpretExit
- .quad LMsgSendFp2retExit
- .quad LMsgSendStretExit
- .quad LMsgSendSuperExit
- .quad LMsgSendSuperStretExit
+ .quad LExit__cache_getImp
+ .quad LExit__cache_getMethod
+ .quad LExit_objc_msgSend
+ .quad LExit_objc_msgSend_fpret
+ .quad LExit_objc_msgSend_fp2ret
+ .quad LExit_objc_msgSend_stret
+ .quad LExit_objc_msgSendSuper
+ .quad LExit_objc_msgSendSuper_stret
+ .quad LExit_objc_msgSendSuper2
+ .quad LExit_objc_msgSendSuper2_stret
.quad 0
/********************************************************************
- *
+ * Recommended multi-byte NOP instructions
+ * (Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2B)
+ ********************************************************************/
+#define nop1 .byte 0x90
+#define nop2 .byte 0x66,0x90
+#define nop3 .byte 0x0F,0x1F,0x00
+#define nop4 .byte 0x0F,0x1F,0x40,0x00
+#define nop5 .byte 0x0F,0x1F,0x44,0x00,0x00
+#define nop6 .byte 0x66,0x0F,0x1F,0x44,0x00,0x00
+#define nop7 .byte 0x0F,0x1F,0x80,0x00,0x00,0x00,0x00
+#define nop8 .byte 0x0F,0x1F,0x84,0x00,0x00,0x00,0x00,0x00
+#define nop9 .byte 0x66,0x0F,0x1F,0x84,0x00,0x00,0x00,0x00,0x00
+
+
+/********************************************************************
* Names for parameter registers.
- *
********************************************************************/
-#define a1 rdi
-#define a2 rsi
-#define a3 rdx
-#define a4 rcx
-#define a5 r8
-#define a6 r9
+#define a1 rdi
+#define a1d edi
+#define a1b dil
+#define a2 rsi
+#define a2d esi
+#define a2b sil
+#define a3 rdx
+#define a3d edx
+#define a4 rcx
+#define a4d ecx
+#define a5 r8
+#define a5d r8d
+#define a6 r9
#define a6d r9d
+/********************************************************************
+ * Names for relative labels
+ * DO NOT USE THESE LABELS ELSEWHERE
+ * Reserved labels: 5: 6: 7: 8: 9:
+ ********************************************************************/
+#define LCacheMiss 5
+#define LCacheMiss_f 5f
+#define LCacheMiss_b 5b
+#define LNilTestDone 6
+#define LNilTestDone_f 6f
+#define LNilTestDone_b 6b
+#define LNilTestSlow 7
+#define LNilTestSlow_f 7f
+#define LNilTestSlow_b 7b
+#define LGetIsaDone 8
+#define LGetIsaDone_f 8f
+#define LGetIsaDone_b 8b
+#define LGetIsaSlow 9
+#define LGetIsaSlow_f 9f
+#define LGetIsaSlow_b 9b
+
+/********************************************************************
+ * Macro parameters
+ ********************************************************************/
+
+#define STRET -1
+#define NORMAL 0
+#define FPRET 1
+#define FP2RET 2
+
+
/********************************************************************
*
* Structure definitions.
********************************************************************/
// objc_super parameter to sendSuper
- receiver = 0
- class = 8
+#define receiver 0
+#define class 8
// Selected field offsets in class structure
- isa = 0
-#if __OBJC2__
- cache = 16
-#else
- cache = 64
-#endif
+// #define isa 0 USE GetIsa INSTEAD
+#define cache 16
// Method descriptor
- method_name = 0
- method_imp = 16
+#define method_name 0
+#define method_imp 16
// Cache header
- mask = 0
- occupied = 4
- buckets = 8 // variable length array
+#define mask 0
+#define occupied 8
+#define buckets 16
// typedef struct {
// uint128_t floatingPointArgs[8]; // xmm0..xmm7
#define STACK_AREA (REG_AREA+6*8)
-#if defined(OBJC_INSTRUMENTED)
-// Cache instrumentation data, follows buckets
- hitCount = 0
- hitProbes = hitCount + 4
- maxHitProbes = hitProbes + 4
- missCount = maxHitProbes + 4
- missProbes = missCount + 4
- maxMissProbes = missProbes + 4
- flushCount = maxMissProbes + 4
- flushedEntries = flushCount + 4
-
-// Buckets in CacheHitHistogram and CacheMissHistogram
- CACHE_HISTOGRAM_SIZE = 512
-#endif
-
-
//////////////////////////////////////////////////////////////////////
//
// ENTRY functionName
.macro ENTRY
.text
.globl $0
+ .align 6, 0x90
+$0:
+.endmacro
+
+.macro STATIC_ENTRY
+ .text
+ .private_extern $0
.align 2, 0x90
$0:
.endmacro
//////////////////////////////////////////////////////////////////////
.macro END_ENTRY
+LExit$0:
.endmacro
-//////////////////////////////////////////////////////////////////////
-//
-// CALL_MCOUNTER
-//
-// Calls mcount() profiling routine. Must be called immediately on
-// function entry, before any prologue executes.
-//
-//////////////////////////////////////////////////////////////////////
-.macro CALL_MCOUNTER
-#ifdef PROFILE
- // Current stack contents: ret
- pushq %rbp
- movq %rsp,%rbp
- // Current stack contents: ret, rbp
- call mcount
- movq %rbp,%rsp
- popq %rbp
-#endif
+/* DWARF support
+ These macros work for objc_msgSend variants and others that call
+ CacheLookup/MethodTableLookup or SaveRegisters/RestoreRegisters
+ without otherwise building a frame or clobbering callee-save registers
+
+ The macros build appropriate FDEs and tie them to the CIE.
+*/
+
+#define DW_CFA_offset 0x80
+#define DW_CFA_restore 0xc0
+#define DW_CFA_advance_loc4 0x4
+#define DW_CFA_same_value 0x8
+#define DW_CFA_def_cfa 0xc
+#define DW_CFA_def_cfa_register 0xd
+#define DW_CFA_def_cfa_offset 0xe
+#define DW_CFA_offset_extended_sf 0x11
+#define DW_CFA_def_cfa_offset_sf 0x13
+#define DW_rax 0
+#define DW_rdx 1
+#define DW_rcx 2
+#define DW_rsi 4
+#define DW_rdi 5
+#define DW_rbp 6
+#define DW_rsp 7
+#define DW_r8 8
+#define DW_r9 9
+#define DW_r10 10
+#define DW_ra 16
+#define DW_xmm0 17
+#define DW_xmm1 18
+#define DW_xmm2 19
+#define DW_xmm3 20
+#define DW_xmm4 21
+#define DW_xmm5 22
+#define DW_xmm6 23
+#define DW_xmm7 24
+#define DW_a1 DW_rdi
+#define DW_a2 DW_rsi
+#define DW_a3 DW_rdx
+#define DW_a4 DW_rcx
+#define DW_a5 DW_r8
+#define DW_a6 DW_r9
+
+// CIE
+// 8-byte data multiplier
+// 1-byte insn multiplier
+// PC-relative everything
+// No prologue
+
+ .section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms+live_support
+CIE:
+ .set L$set$0,LECIE1-LSCIE1
+ .long L$set$0 # Length of Common Information Entry
+LSCIE1:
+ .long 0 # CIE Identifier Tag
+ .byte 0x3 # CIE Version
+ .ascii "zPR\0" # CIE Augmentation: size + personality + FDE encoding
+ .byte 0x1 # uleb128 0x1; CIE Code Alignment Factor
+ .byte 0x78 # sleb128 -0x8; CIE Data Alignment Factor
+ .byte 0x10 # CIE RA Column
+ .byte 0x6 # uleb128 0x1; Augmentation size
+ // Personality augmentation
+ .byte 0x9b
+ .long ___objc_personality_v0+4@GOTPCREL
+ // FDE-encoding augmentation
+ .byte 0x10
+ // Prefix instructions
+ // CFA is %rsp+8
+ .byte DW_CFA_def_cfa
+ .byte DW_rsp
+ .byte 8
+ // RA is at 0(%rsp) aka -8(CFA)
+ .byte DW_CFA_offset | DW_ra
+ .byte 1
+
+ .align 3
+LECIE1:
+
+
+.macro EMIT_FDE
+
+ .section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms+live_support
+
+// FDE header
+.globl $0.eh
+$0.eh:
+LSFDE$0:
+ .set LLENFDE$0, LEFDE$0-LASFDE$0
+ .long LLENFDE$0 # FDE Length
+LASFDE$0:
+ .long LASFDE$0-CIE # FDE CIE offset
+ .quad LF0$0-. # FDE address start
+ .quad LLEN$0 # FDE address range
+ .byte 0x0 # uleb128 0x0; Augmentation size
+
+ // DW_START: set by CIE
+
+.if $2 == 1
+
+ // pushq %rbp
+ .byte DW_CFA_advance_loc4
+ .long LFLEN0$0+1
+ .byte DW_CFA_def_cfa_offset
+ .byte 16
+ .byte DW_CFA_offset | DW_rbp
+ .byte -16/-8
+ // movq %rsp, %rbp
+ .byte DW_CFA_advance_loc4
+ .long 3
+ .byte DW_CFA_def_cfa_register
+ .byte DW_rbp
+
+.endif
+
+ .align 3
+LEFDE$0:
+ .text
+
+.endmacro
+
+
+.macro DW_START
+LF0$0:
+.endmacro
+
+.macro DW_FRAME
+LF1$0:
+ .set LFLEN0$0, LF1$0-LF0$0
+.endmacro
+
+.macro DW_END
+ .set LLEN$0, .-LF0$0
+ EMIT_FDE $0, LLEN$0, 1
+.endmacro
+
+.macro DW_END2
+ .set LLEN$0, .-LF0$0
+ EMIT_FDE $0, LLEN$0, 2
.endmacro
/////////////////////////////////////////////////////////////////////
//
-// SaveRegisters
+// SaveRegisters caller
//
// Pushes a stack frame and saves all registers that might contain
// parameter values.
//
-// On entry:
-// $0 = 0 if normal, 1 if CacheLookup already saved a4, a5, a6
-// stack = ret
+// On entry: %0 = caller's symbol name for DWARF
+// stack = ret
//
// On exit:
-// %rsp is 16-byte aligned
+// %rsp is 16-byte aligned
//
/////////////////////////////////////////////////////////////////////
-/*
- * old->ret 0 +208
- * 16 ->rbp -8 +200
- * a6 -16 +192
- * a5 -24 +184
- * a4 -32 +176
- * a3 -40 +168
- * a2 -48 +160
- * a1 -56 +152
- * rax -64 +144
- * r10 -72 +136
- * pad -80 +128
- * xmm7 -88 +112
- * xmm6 -104 +96
- * xmm5 -120 +80
- * xmm4 -136 +64
- * xmm3 -152 +48
- * xmm2 -168 +32
- * xmm1 -184 +16
- * new->xmm0 -200 +0
- */
+
.macro SaveRegisters
-.if $0 == 0
- movq %a6, -16(%rsp)
- movq %a5, -24(%rsp)
- movq %a4, -32(%rsp)
-.else
- // a4-a6 already saved by CacheLookup
-.endif
- movq %a3, -40(%rsp)
- movq %a2, -48(%rsp)
- movq %a1, -56(%rsp)
- movq %rax, -64(%rsp) // might be xmm parameter count
- movq %r10, -72(%rsp) // fixme needed?
- // movq pad, -80(%rsp)
-
- subq $$ 128+88, %rsp
-
- // stack is now 16-byte aligned
- movdqa %xmm0, 0(%rsp)
- movdqa %xmm1, 16(%rsp)
- movdqa %xmm2, 32(%rsp)
- movdqa %xmm3, 48(%rsp)
- movdqa %xmm4, 64(%rsp)
- movdqa %xmm5, 80(%rsp)
- movdqa %xmm6, 96(%rsp)
- movdqa %xmm7, 112(%rsp)
+ DW_FRAME $0
+ enter $$0x80+8, $$0 // +8 for alignment
+ movdqa %xmm0, -0x80(%rbp)
+ push %rax // might be xmm parameter count
+ movdqa %xmm1, -0x70(%rbp)
+ push %a1
+ movdqa %xmm2, -0x60(%rbp)
+ push %a2
+ movdqa %xmm3, -0x50(%rbp)
+ push %a3
+ movdqa %xmm4, -0x40(%rbp)
+ push %a4
+ movdqa %xmm5, -0x30(%rbp)
+ push %a5
+ movdqa %xmm6, -0x20(%rbp)
+ push %a6
+ movdqa %xmm7, -0x10(%rbp)
.endmacro
/////////////////////////////////////////////////////////////////////
//
// Pops a stack frame pushed by SaveRegisters
//
-// On entry:
-// %rsp is unchanged since SaveRegisters
+// On entry: $0 = caller's symbol name for DWARF
+// %rbp unchanged since SaveRegisters
//
// On exit:
-// stack = ret
+// stack = ret
//
/////////////////////////////////////////////////////////////////////
.macro RestoreRegisters
- movdqa 0(%rsp), %xmm0
- movdqa 16(%rsp), %xmm1
- movdqa 32(%rsp), %xmm2
- movdqa 48(%rsp), %xmm3
- movdqa 64(%rsp), %xmm4
- movdqa 80(%rsp), %xmm5
- movdqa 96(%rsp), %xmm6
- movdqa 112(%rsp), %xmm7
-
- addq $$ 128+88, %rsp
-
- movq -16(%rsp), %a6
- movq -24(%rsp), %a5
- movq -32(%rsp), %a4
- movq -40(%rsp), %a3
- movq -48(%rsp), %a2
- movq -56(%rsp), %a1
- movq -64(%rsp), %rax
- movq -72(%rsp), %r10
- // movq -80(%rsp), pad
+ movdqa -0x80(%rbp), %xmm0
+ pop %a6
+ movdqa -0x70(%rbp), %xmm1
+ pop %a5
+ movdqa -0x60(%rbp), %xmm2
+ pop %a4
+ movdqa -0x50(%rbp), %xmm3
+ pop %a3
+ movdqa -0x40(%rbp), %xmm4
+ pop %a2
+ movdqa -0x30(%rbp), %xmm5
+ pop %a1
+ movdqa -0x20(%rbp), %xmm6
+ pop %rax
+ movdqa -0x10(%rbp), %xmm7
+ leave
.endmacro
/////////////////////////////////////////////////////////////////////
//
//
-// CacheLookup selectorRegister, cacheMissLabel, name
+// CacheLookup return-type
//
// Locate the implementation for a selector in a class method cache.
//
// Takes:
-// $0 = register containing selector (%a1 or %a2 ONLY)
-// cacheMissLabel = label to branch to iff method is not cached
+// $0 = NORMAL, FPRET, FP2RET, STRET
+// a2 or a3 (STRET) = selector
// %r11 = class whose cache is to be searched
-// stack = ret
//
-// On exit: (found) method triplet in %r11
-// (not found) jumps to cacheMissLabel
-// stack = ret
-//
+// On exit: (found) method in %r11, stack unchanged, eq/ne set for forwarding
+// (not found) jumps to LCacheMiss, %rax on stack
+//
/////////////////////////////////////////////////////////////////////
-
.macro CacheLookup
-// load variables and save caller registers.
-
- movq %a4, -32(%rsp) // save scratch registers in red zone
- movq %a5, -24(%rsp)
- movq %a6, -16(%rsp)
-
- movq cache(%r11), %a4 // cache = class->cache
-
-#if defined(OBJC_INSTRUMENTED)
- pushl %ebx // save non-volatile register
- pushl %eax // save cache pointer
- xorl %ebx, %ebx // probeCount = 0
-#endif
-
- leaq buckets(%a4), %a5 // buckets = &cache->buckets
- movl mask(%a4), %a6d
- shlq $$3, %a6 // %a6 = cache->mask << 3
- mov $0, %a4 // bytes = sel
- andq %a6, %a4 // bytes &= (mask << 3)
+ push %rax
+ movq cache(%r11), %r10 // cache = class->cache
+.if $0 != STRET
+ mov %a2d, %eax // index = sel
+.else
+ mov %a3d, %eax // index = sel
+.endif
// search the receiver's cache
// r11 = method (soon)
-// a4 = bytes
-// a5 = buckets
-// a6 = mask << 3
-// $0 = sel
-LMsgSendProbeCache_$1:
-#if defined(OBJC_INSTRUMENTED)
- addl $$1, %ebx // probeCount += 1
-#endif
- movq (%a5, %a4, 1), %r11 // method = buckets[bytes/8]
+// eax = index
+// r10 = cache
+// a2 or a3 = sel
+1:
+ andl mask(%r10), %eax // index &= mask
+ movq buckets(%r10, %rax, 8), %r11 // method = cache->buckets[index]
+ incl %eax // index++
testq %r11, %r11 // if (method == NULL)
-#if defined(OBJC_INSTRUMENTED)
- je LMsgSendCacheMiss_$1
-#else
- je $1 // goto cacheMissLabel
-#endif
-
- addq $$8, %a4 // bytes += 8
- andq %a6, %a4 // bytes &= (mask << 3)
- cmpq method_name(%r11), $0 // if (method_name != sel)
- jne LMsgSendProbeCache_$1 // goto loop
+ je LCacheMiss_f // goto cacheMissLabel
+.if $0 != STRET
+ cmpq method_name(%r11), %a2 // if (method_name != sel)
+.else
+ cmpq method_name(%r11), %a3 // if (method_name != sel)
+.endif
+ jne 1b // goto loop
// cache hit, r11 = method triplet
-#if defined(OBJC_INSTRUMENTED)
- jmp LMsgSendInstrumentCacheHit_$1
-LMsgSendCacheHit2_$1:
-#endif
-
// restore saved registers
- movq -32(%rsp), %a4
- movq -24(%rsp), %a5
- movq -16(%rsp), %a6
+ pop %rax
- // Done. Only instrumentation follows.
-
-#if defined(OBJC_INSTRUMENTED)
- jmp LMsgSendCacheDone_$1
-
-LMsgSendInstrumentCacheHit_$1:
- popl %edx // retrieve cache pointer
- movl mask(%edx), %esi // mask = cache->mask
- testl %esi, %esi // a mask of zero is only for the...
- je LMsgSendHitInstrumentDone_$1 // ... emptyCache, do not record anything
-
- // locate and update the CacheInstrumentation structure
- addl $$1, %esi // entryCount = mask + 1
- shll $$2, %esi // tableSize = entryCount * sizeof(entry)
- addl $buckets, %esi // offset = buckets + tableSize
- addl %edx, %esi // cacheData = &cache->buckets[mask+1]
-
- movl hitCount(%esi), %edi
- addl $$1, %edi
- movl %edi, hitCount(%esi) // cacheData->hitCount += 1
- movl hitProbes(%esi), %edi
- addl %ebx, %edi
- movl %edi, hitProbes(%esi) // cacheData->hitProbes += probeCount
- movl maxHitProbes(%esi), %edi// if (cacheData->maxHitProbes < probeCount)
- cmpl %ebx, %edi
- jge LMsgSendMaxHitProbeOK_$1
- movl %ebx, maxHitProbes(%esi)// cacheData->maxHitProbes = probeCount
-LMsgSendMaxHitProbeOK_$1:
-
- // update cache hit probe histogram
- cmpl $CACHE_HISTOGRAM_SIZE, %ebx // pin probeCount to max index
- jl LMsgSendHitHistoIndexSet_$1
- movl $(CACHE_HISTOGRAM_SIZE-1), %ebx
-LMsgSendHitHistoIndexSet_$1:
- LEA_STATIC_DATA %esi, _CacheHitHistogram, EXTERNAL_SYMBOL
- shll $$2, %ebx // convert probeCount to histogram index
- addl %ebx, %esi // calculate &CacheHitHistogram[probeCount<<2]
- movl 0(%esi), %edi // get current tally
- addl $$1, %edi //
- movl %edi, 0(%esi) // tally += 1
-LMsgSendHitInstrumentDone_$1:
- popl %ebx // restore non-volatile register
- jmp LMsgSendCacheHit2_$1
-
-
-LMsgSendCacheMiss_$1:
- popl %edx // retrieve cache pointer
- movl mask(%edx), %esi // mask = cache->mask
- testl %esi, %esi // a mask of zero is only for the...
- je LMsgSendMissInstrumentDone_$1 // ... emptyCache, do not record anything
-
- // locate and update the CacheInstrumentation structure
- addl $$1, %esi // entryCount = mask + 1
- shll $$2, %esi // tableSize = entryCount * sizeof(entry)
- addl $buckets, %esi // offset = buckets + tableSize
- addl %edx, %esi // cacheData = &cache->buckets[mask+1]
-
- movl missCount(%esi), %edi //
- addl $$1, %edi //
- movl %edi, missCount(%esi) // cacheData->missCount += 1
- movl missProbes(%esi), %edi //
- addl %ebx, %edi //
- movl %edi, missProbes(%esi) // cacheData->missProbes += probeCount
- movl maxMissProbes(%esi), %edi// if (cacheData->maxMissProbes < probeCount)
- cmpl %ebx, %edi //
- jge LMsgSendMaxMissProbeOK_$1 //
- movl %ebx, maxMissProbes(%esi)// cacheData->maxMissProbes = probeCount
-LMsgSendMaxMissProbeOK_$1:
-
- // update cache miss probe histogram
- cmpl $CACHE_HISTOGRAM_SIZE, %ebx // pin probeCount to max index
- jl LMsgSendMissHistoIndexSet_$1
- movl $(CACHE_HISTOGRAM_SIZE-1), %ebx
-LMsgSendMissHistoIndexSet_$1:
- LEA_STATIC_DATA %esi, _CacheMissHistogram, EXTERNAL_SYMBOL
- shll $$2, %ebx // convert probeCount to histogram index
- addl %ebx, %esi // calculate &CacheMissHistogram[probeCount<<2]
- movl 0(%esi), %edi // get current tally
- addl $$1, %edi //
- movl %edi, 0(%esi) // tally += 1
-LMsgSendMissInstrumentDone_$1:
- popl %ebx // restore non-volatile register
- jmp $0
-
-LMsgSendCacheDone_$1:
-#endif
+.if $0 != STRET
+ // eq (non-stret) flag already set above
+.else
+ // set ne (stret) for forwarding; r11 != 0
+ test %r11, %r11
+.endif
-
.endmacro
/////////////////////////////////////////////////////////////////////
//
-// MethodTableLookup classRegister, selectorRegister
+// MethodTableLookup classRegister, selectorRegister, fn
//
-// Takes: $0 = class to search (%a1 or %a2 or %r11 ONLY)
-// $1 = selector to search for (%a2 or %a3 ONLY)
+// Takes: $0 = class to search (a1 or a2 or r10 ONLY)
+// $1 = selector to search for (a2 or a3 ONLY)
+// $2 = caller's symbol name for DWARF
+// r11 = class to search
//
-// Stack: ret (%rsp+0), pad, %a4, %a5, %a6 (saved by CacheLookup)
+// Stack: ret, rax (pushed by CacheLookup)
//
-// On exit: restores registers saved by CacheLookup
+// On exit: pops registers pushed by CacheLookup
// imp in %r11
//
/////////////////////////////////////////////////////////////////////
.macro MethodTableLookup
+
+ pop %rax // saved by CacheLookup
+ SaveRegisters $2
- SaveRegisters 1
+ // _class_lookupMethodAndLoadCache3(receiver, selector, class)
- // _class_lookupMethodAndLoadCache(class, selector)
movq $0, %a1
movq $1, %a2
- call __class_lookupMethodAndLoadCache
+ movq %r11, %a3
+ call __class_lookupMethodAndLoadCache3
// IMP is now in %rax
movq %rax, %r11
- RestoreRegisters
+ RestoreRegisters $2
.endmacro
+/////////////////////////////////////////////////////////////////////
+//
+// GetIsa return-type
+// GetIsaFast return-type
+// GetIsaSupport return-type
+//
+// Sets r11 = obj->isa. Consults the tagged isa table if necessary.
+//
+// Takes: $0 = NORMAL or FPRET or FP2RET or STRET
+// a1 or a2 (STRET) = receiver
+//
+// On exit: r11 = receiver->isa
+// r10 is clobbered
+//
+/////////////////////////////////////////////////////////////////////
+
+.macro GetIsa
+
+.if $0 != STRET
+ testb $$1, %a1b
+ jnz 1f
+ movq (%a1), %r11
+ jmp 2f
+1: movl %a1d, %r10d
+.else
+ testb $$1, %a2b
+ jnz 1f
+ movq (%a2), %r11
+ jmp 2f
+1: movl %a2d, %r10d
+.endif
+ andl $$0xF, %r10d
+ leaq __objc_tagged_isa_table(%rip), %r11
+ movq (%r11, %r10, 8), %r11 // read isa from table
+2:
+.endmacro
+
+.macro GetIsaFast
+.if $0 != STRET
+ testb $$1, %a1b
+ .byte 0x2e // harmless branch hint prefix to align IFETCH blocks
+ jnz LGetIsaSlow_f
+ movq (%a1), %r11
+.else
+ testb $$1, %a2b
+ .byte 0x2e // harmless branch hint prefix to align IFETCH blocks
+ jnz LGetIsaSlow_f
+ movq (%a2), %r11
+.endif
+LGetIsaDone:
+.endmacro
+
+.macro GetIsaSupport
+LGetIsaSlow:
+ leaq __objc_tagged_isa_table(%rip), %r11
+.if $0 != STRET
+ movl %a1d, %r10d
+.else
+ movl %a2d, %r10d
+.endif
+ andl $$0xF, %r10d
+ movq (%r11, %r10, 8), %r11 // read isa from table
+ jmp LGetIsaDone_b
+.endmacro
+
+/////////////////////////////////////////////////////////////////////
+//
+// NilTest return-type
+//
+// Takes: $0 = NORMAL or FPRET or FP2RET or STRET
+// %a1 or %a2 (STRET) = receiver
+//
+// On exit: Loads non-nil receiver in %a1 or %a2 (STRET), or returns zero.
+//
+// NilTestSupport return-type
+//
+// Takes: $0 = NORMAL or FPRET or FP2RET or STRET
+// %a1 or %a2 (STRET) = receiver
+//
+// On exit: Loads non-nil receiver in %a1 or %a2 (STRET), or returns zero.
+//
+/////////////////////////////////////////////////////////////////////
+
+.macro NilTest
+.if $0 != STRET
+ testq %a1, %a1
+.else
+ testq %a2, %a2
+.endif
+ jz LNilTestSlow_f
+LNilTestDone:
+.endmacro
+
+.macro NilTestSupport
+ .align 3
+LNilTestSlow:
+.if $0 != STRET
+ movq __objc_nilReceiver(%rip), %a1
+ testq %a1, %a1 // if (receiver != nil)
+.else
+ movq __objc_nilReceiver(%rip), %a2
+ testq %a2, %a2 // if (receiver != nil)
+.endif
+ jne LNilTestDone_b // send to new receiver
+
+.if $0 == FPRET
+ fldz
+.elseif $0 == FP2RET
+ fldz
+ fldz
+.endif
+.if $0 != STRET
+ xorl %eax, %eax
+ xorl %edx, %edx
+ xorps %xmm0, %xmm0
+ xorps %xmm1, %xmm1
+.endif
+ ret
+.endmacro
+
/********************************************************************
- * Method _cache_getMethod(Class cls, SEL sel, IMP objc_msgForward_imp)
+ * Method _cache_getMethod(Class cls, SEL sel, IMP msgForward_internal_imp)
*
* On entry: a1 = class whose cache is to be searched
* a2 = selector to search for
- * a3 = _objc_msgForward IMP
+ * a3 = _objc_msgForward_internal IMP
*
* If found, returns method triplet pointer.
* If not found, returns NULL.
*
* NOTE: _cache_getMethod never returns any cache entry whose implementation
- * is _objc_msgForward. It returns NULL instead. This prevents thread-
- * safety and memory management bugs in _class_lookupMethodAndLoadCache.
+ * is _objc_msgForward_internal. It returns 1 instead. This prevents thread-
+ * thread-safety and memory management bugs in _class_lookupMethodAndLoadCache.
* See _class_lookupMethodAndLoadCache for details.
*
- * _objc_msgForward is passed as a parameter because it's more efficient
- * to do the (PIC) lookup once in the caller than repeatedly here.
+ * _objc_msgForward_internal is passed as a parameter because it's more
+ * efficient to do the (PIC) lookup once in the caller than repeatedly here.
********************************************************************/
-
- ENTRY __cache_getMethod
+
+ STATIC_ENTRY __cache_getMethod
+ DW_START __cache_getMethod
// do lookup
movq %a1, %r11 // move class to r11 for CacheLookup
- CacheLookup %a2, LGetMethodMiss
+ CacheLookup NORMAL
// cache hit, method triplet in %r11
- cmpq method_imp(%r11), %a3 // if (imp == _objc_msgForward)
- je LGetMethodMiss // return nil
+ cmpq method_imp(%r11), %a3 // if (imp==_objc_msgForward_internal)
+ je 1f // return (Method)1
movq %r11, %rax // return method triplet address
ret
+1: movl $1, %eax
+ ret
-LGetMethodMiss:
+LCacheMiss:
// cache miss, return nil
- xorq %rax, %rax // erase %rax
+ pop %rax // pushed by CacheLookup
+ xorl %eax, %eax
ret
LGetMethodExit:
- END_ENTRY __cache_getMethod
+ DW_END2 __cache_getMethod
+ END_ENTRY __cache_getMethod
/********************************************************************
* If not found, returns NULL.
********************************************************************/
- ENTRY __cache_getImp
+ STATIC_ENTRY __cache_getImp
+ DW_START __cache_getImp
// do lookup
movq %a1, %r11 // move class to r11 for CacheLookup
- CacheLookup %a2, LGetImpMiss
+ CacheLookup NORMAL
// cache hit, method triplet in %r11
movq method_imp(%r11), %rax // return method imp address
ret
-LGetImpMiss:
+LCacheMiss:
// cache miss, return nil
- xorq %rax, %rax // erase %rax
+ pop %rax // pushed by CacheLookup
+ xorl %eax, %eax
ret
LGetImpExit:
- END_ENTRY __cache_getImp
+ DW_END2 __cache_getImp
+ END_ENTRY __cache_getImp
/********************************************************************
*
********************************************************************/
+ .data
+ .align 3
+ .private_extern __objc_tagged_isa_table
+__objc_tagged_isa_table:
+ .fill 16, 8, 0
+
ENTRY _objc_msgSend
- CALL_MCOUNTER
+ DW_START _objc_msgSend
-// check whether selector is ignored
- cmpq $ kIgnore, %a2
- je LMsgSendReturnSelf // ignore and return self
+ NilTest NORMAL
-// check whether receiver is nil
- testq %a1, %a1
- je LMsgSendNilSelf
-
-// receiver (in %a1) is non-nil: search the cache
-LMsgSendReceiverOk:
- movq isa(%a1), %r11 // class = self->isa
- CacheLookup %a2, LMsgSendCacheMiss
- // CacheLookup placed method in r11
- movq method_imp(%r11), %r11
- jmp *%r11 // goto *imp
+ GetIsaFast NORMAL // r11 = self->isa
+ CacheLookup NORMAL // r11 = method, eq set (nonstret fwd)
+ jmp *method_imp(%r11) // goto *imp
-// cache miss: go search the method lists
-LMsgSendCacheMiss:
- MethodTableLookup isa(%a1), %a2
- // MethodTableLookup placed IMP in r11
- jmp *%r11 // goto *imp
+ NilTestSupport NORMAL
-// message sent to nil: redirect to nil receiver, if any
-LMsgSendNilSelf:
- movq __objc_nilReceiver(%rip), %a1
- testq %a1, %a1 // if (receiver != nil)
- jne LMsgSendReceiverOk // send to new receiver
+ GetIsaSupport NORMAL
- // message sent to nil - return 0
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
-
-LMsgSendReturnSelf:
- movq %a1, %rax
- ret
+// cache miss: go search the method lists
+LCacheMiss:
+ GetIsa NORMAL // r11 = self->isa
+ MethodTableLookup %a1, %a2, _objc_msgSend // r11 = IMP
+ cmp %r11, %r11 // set eq (nonstret) for forwarding
+ jmp *%r11 // goto *imp
-LMsgSendExit:
+ DW_END _objc_msgSend
END_ENTRY _objc_msgSend
#if __OBJC2__
ENTRY _objc_msgSend_fixup
+ DW_START _objc_msgSend_fixup
- testq %a1, %a1
- je LMsgSendFixupNilSelf
+ NilTest NORMAL
+
+ SaveRegisters _objc_msgSend_fixup
+
+ // Dereference obj/isa/cache to crash before _objc_fixupMessageRef
+ movq 8(%a2), %a6 // selector
+ GetIsa NORMAL // r11 = isa = *receiver
+ movq cache(%r11), %a5 // cache = *isa
+ movq mask(%a5), %a4 // *cache
- SaveRegisters 0
// a1 = receiver
// a2 = address of message ref
movq %a2, %a3
- movq $0, %a2
+ xorl %a2d, %a2d
// __objc_fixupMessageRef(receiver, 0, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+
+ RestoreRegisters _objc_msgSend_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a2), %a2
+ cmp %r11, %r11 // set nonstret (eq) for forwarding
jmp *%r11
-LMsgSendFixupNilSelf:
- // message sent to nil - return 0
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
+ NilTestSupport NORMAL
- END_ENTRY _objc_msgSend_fixup
+ DW_END _objc_msgSend_fixup
+ END_ENTRY _objc_msgSend_fixup
- ENTRY _objc_msgSend_fixedup
+ STATIC_ENTRY _objc_msgSend_fixedup
// Load _cmd from the message_ref
movq 8(%a2), %a2
jmp _objc_msgSend
********************************************************************/
ENTRY _objc_msgSendSuper
- CALL_MCOUNTER
-
-// check whether selector is ignored
- cmpq $ kIgnore, %a2
- je LMsgSendSuperReturnSelf
+ DW_START _objc_msgSendSuper
// search the cache (objc_super in %a1)
movq class(%a1), %r11 // class = objc_super->class
- CacheLookup %a2, LMsgSendSuperCacheMiss
- // CacheLookup placed method in r11
- movq method_imp(%r11), %r11
+ CacheLookup NORMAL // r11 = method, eq set (nonstret fwd)
movq receiver(%a1), %a1 // load real receiver
- jmp *%r11 // goto *imp
+ jmp *method_imp(%r11) // goto *imp
// cache miss: go search the method lists
-LMsgSendSuperCacheMiss:
- MethodTableLookup class(%a1), %a2
- // MethodTableLookup placed IMP in r11
+LCacheMiss:
+ movq receiver(%a1), %r10
+ movq class(%a1), %r11
+ MethodTableLookup %r10, %a2, _objc_msgSendSuper // r11 = IMP
movq receiver(%a1), %a1 // load real receiver
+ cmp %r11, %r11 // set eq (nonstret) for forwarding
jmp *%r11 // goto *imp
-
-LMsgSendSuperReturnSelf:
- movq receiver(%a1), %rax
- ret
-LMsgSendSuperExit:
+ DW_END _objc_msgSendSuper
END_ENTRY _objc_msgSendSuper
+
+/********************************************************************
+ * id objc_msgSendSuper2
+ ********************************************************************/
+
#if __OBJC2__
ENTRY _objc_msgSendSuper2_fixup
+ DW_START _objc_msgSendSuper2_fixup
- SaveRegisters 0
+ SaveRegisters _objc_msgSendSuper2_fixup
// a1 = address of objc_super2
// a2 = address of message ref
movq %a2, %a3
// __objc_fixupMessageRef(receiver, objc_super, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+ RestoreRegisters _objc_msgSendSuper2_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a2), %a2
// Load receiver from objc_super2
movq receiver(%a1), %a1
+ cmp %r11, %r11 // set nonstret (eq) for forwarding
jmp *%r11
- END_ENTRY _objc_msgSendSuper2_fixup
+ DW_END _objc_msgSendSuper2_fixup
+ END_ENTRY _objc_msgSendSuper2_fixup
- ENTRY _objc_msgSendSuper2_fixedup
- // objc_super->class is superclass of class to search
- movq class(%a1), %r11 // cls = objc_super->class
+ STATIC_ENTRY _objc_msgSendSuper2_fixedup
movq 8(%a2), %a2 // load _cmd from message_ref
- movq 8(%r11), %r11 // cls = cls->superclass
- movq %r11, class(%a1)
- // objc_super->class is now the class to search
- jmp _objc_msgSendSuper
+ jmp _objc_msgSendSuper2
END_ENTRY _objc_msgSendSuper2_fixedup
+
+
+ ENTRY _objc_msgSendSuper2
+ DW_START _objc_msgSendSuper2
+ // objc_super->class is superclass of class to search
+
+// search the cache (objc_super in %a1)
+ movq class(%a1), %r11 // cls = objc_super->class
+ movq 8(%r11), %r11 // cls = class->superclass
+ CacheLookup NORMAL // r11 = method, eq set (nonstret fwd)
+ movq receiver(%a1), %a1 // load real receiver
+ jmp *method_imp(%r11) // goto *imp
+
+// cache miss: go search the method lists
+LCacheMiss:
+ movq receiver(%a1), %r10
+ movq class(%a1), %r11
+ movq 8(%r11), %r11
+ MethodTableLookup %r10, %a2, _objc_msgSendSuper2 // r11 = IMP
+ movq receiver(%a1), %a1 // load real receiver
+ cmp %r11, %r11 // set eq (nonstret) for forwarding
+ jmp *%r11 // goto *imp
+
+ DW_END _objc_msgSendSuper2
+ END_ENTRY _objc_msgSendSuper2
#endif
********************************************************************/
ENTRY _objc_msgSend_fpret
- CALL_MCOUNTER
+ DW_START _objc_msgSend_fpret
-// check whether selector is ignored
- cmpq $ kIgnore, %a2
- je LMsgSendFpretReturnZero
+ NilTest FPRET
-// check whether receiver is nil
- testq %a1, %a1
- je LMsgSendFpretNilSelf
-
-// receiver (in %a1) is non-nil: search the cache
-LMsgSendFpretReceiverOk:
- movq isa(%a1), %r11 // class = self->isa
- CacheLookup %a2, LMsgSendFpretCacheMiss
- // CacheLookup placed method in r11
- movq method_imp(%r11), %r11
- jmp *%r11 // goto *imp
+ GetIsaFast FPRET // r11 = self->isa
+ CacheLookup FPRET // r11 = method, eq set (nonstret fwd)
+ jmp *method_imp(%r11) // goto *imp
-// cache miss: go search the method lists
-LMsgSendFpretCacheMiss:
- MethodTableLookup isa(%a1), %a2
- // MethodTableLookup placed IMP in r11
- jmp *%r11 // goto *imp
+ NilTestSupport FPRET
-// message sent to nil: redirect to nil receiver, if any
-LMsgSendFpretNilSelf:
-1: movq __objc_nilReceiver(%rip),%a1
- testq %a1, %a1 // if (receiver != nil)
- jne LMsgSendFpretReceiverOk // send to new receiver
+ GetIsaSupport FPRET
-LMsgSendFpretReturnZero:
- // Long double return.
- fldz
- // Clear int and float/double return too.
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
+// cache miss: go search the method lists
+LCacheMiss:
+ GetIsa FPRET // r11 = self->isa
+ MethodTableLookup %a1, %a2, _objc_msgSend_fpret // r11 = IMP
+ cmp %r11, %r11 // set eq (nonstret) for forwarding
+ jmp *%r11 // goto *imp
-LMsgSendFpretExit:
+ DW_END _objc_msgSend_fpret
END_ENTRY _objc_msgSend_fpret
#if __OBJC2__
ENTRY _objc_msgSend_fpret_fixup
+ DW_START _objc_msgSend_fpret_fixup
- testq %a1, %a1
- je LMsgSendFpretFixupNilSelf
+ NilTest FPRET
+
+ SaveRegisters _objc_msgSend_fpret_fixup
+
+ // Dereference obj/isa/cache to crash before _objc_fixupMessageRef
+ movq 8(%a2), %a6 // selector
+ GetIsa FPRET // r11 = isa = *receiver
+ movq cache(%r11), %a5 // cache = *isa
+ movq mask(%a5), %a4 // *cache
- SaveRegisters 0
// a1 = receiver
// a2 = address of message ref
movq %a2, %a3
- movq $0, %a2
+ xorl %a2d, %a2d
// __objc_fixupMessageRef(receiver, 0, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+
+ RestoreRegisters _objc_msgSend_fpret_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a2), %a2
+ cmp %r11, %r11 // set nonstret (eq) for forwarding
jmp *%r11
-LMsgSendFpretFixupNilSelf:
- // Long double return.
- fldz
- // Clear int and float/double return too.
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
+ NilTestSupport FPRET
- END_ENTRY _objc_msgSend_fpret_fixup
+ DW_END _objc_msgSend_fpret_fixup
+ END_ENTRY _objc_msgSend_fpret_fixup
- ENTRY _objc_msgSend_fpret_fixedup
+ STATIC_ENTRY _objc_msgSend_fpret_fixedup
// Load _cmd from the message_ref
movq 8(%a2), %a2
jmp _objc_msgSend_fpret
********************************************************************/
ENTRY _objc_msgSend_fp2ret
- CALL_MCOUNTER
+ DW_START _objc_msgSend_fp2ret
-// check whether selector is ignored
- cmpq $ kIgnore, %a2
- je LMsgSendFp2retReturnZero
+ NilTest FP2RET
-// check whether receiver is nil
- testq %a1, %a1
- je LMsgSendFp2retNilSelf
-
-// receiver (in %a1) is non-nil: search the cache
-LMsgSendFp2retReceiverOk:
- movq isa(%a1), %r11 // class = self->isa
- CacheLookup %a2, LMsgSendFp2retCacheMiss
- // CacheLookup placed method in r11
- movq method_imp(%r11), %r11
- jmp *%r11 // goto *imp
+ GetIsaFast FP2RET // r11 = self->isa
+ CacheLookup FP2RET // r11 = method, eq set (nonstret fwd)
+ jmp *method_imp(%r11) // goto *imp
+ NilTestSupport FP2RET
+
+ GetIsaSupport FP2RET
+
// cache miss: go search the method lists
-LMsgSendFp2retCacheMiss:
- MethodTableLookup isa(%a1), %a2
- // MethodTableLookup placed IMP in r11
+LCacheMiss:
+ GetIsa FP2RET // r11 = self->isa
+ MethodTableLookup %a1, %a2, _objc_msgSend_fp2ret // r11 = IMP
+ cmp %r11, %r11 // set eq (nonstret) for forwarding
jmp *%r11 // goto *imp
-// message sent to nil: redirect to nil receiver, if any
-LMsgSendFp2retNilSelf:
-1: movq __objc_nilReceiver(%rip),%a1
- testq %a1, %a1 // if (receiver != nil)
- jne LMsgSendFp2retReceiverOk // send to new receiver
-
-LMsgSendFp2retReturnZero:
- // complex long double return.
- fldz
- fldz
- // Clear int and float/double return too.
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
-
-LMsgSendFp2retExit:
+ DW_END _objc_msgSend_fp2ret
END_ENTRY _objc_msgSend_fp2ret
#if __OBJC2__
ENTRY _objc_msgSend_fp2ret_fixup
+ DW_START _objc_msgSend_fp2ret_fixup
- testq %a1, %a1
- je LMsgSendFp2retFixupNilSelf
+ NilTest FP2RET
+
+ SaveRegisters _objc_msgSend_fp2ret_fixup
- SaveRegisters 0
+ // Dereference obj/isa/cache to crash before _objc_fixupMessageRef
+ movq 8(%a2), %a6 // selector
+ GetIsa FP2RET // r11 = isa = *receiver
+ movq cache(%r11), %a5 // cache = *isa
+ movq mask(%a5), %a4 // *cache
+
// a1 = receiver
// a2 = address of message ref
movq %a2, %a3
- movq $0, %a2
+ xorl %a2d, %a2d
// __objc_fixupMessageRef(receiver, 0, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+
+ RestoreRegisters _objc_msgSend_fp2ret_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a2), %a2
+ cmp %r11, %r11 // set nonstret (eq) for forwarding
jmp *%r11
-LMsgSendFp2retFixupNilSelf:
- // complex long double return.
- fldz
- fldz
- // Clear int and float/double return too.
- movq $0, %rax
- movq $0, %rdx
- xorps %xmm0, %xmm0
- xorps %xmm1, %xmm1
- ret
+ NilTestSupport FP2RET
- END_ENTRY _objc_msgSend_fp2ret_fixup
+ DW_END _objc_msgSend_fp2ret_fixup
+ END_ENTRY _objc_msgSend_fp2ret_fixup
- ENTRY _objc_msgSend_fp2ret_fixedup
+ STATIC_ENTRY _objc_msgSend_fp2ret_fixedup
// Load _cmd from the message_ref
movq 8(%a2), %a2
jmp _objc_msgSend_fp2ret
********************************************************************/
ENTRY _objc_msgSend_stret
- CALL_MCOUNTER
+ DW_START _objc_msgSend_stret
-// check whether receiver is nil
- testq %a2, %a2
- je LMsgSendStretNilSelf
-
-// receiver (in %a2) is non-nil: search the cache
-LMsgSendStretReceiverOk:
- movq isa(%a2), %r11 // class = self->isa
- CacheLookup %a3, LMsgSendStretCacheMiss
- // CacheLookup placed method in %r11
- movq method_imp(%r11), %r11
-LMsgSendStretCallImp:
- cmpq %r11, L_objc_msgForward(%rip) // if imp == _objc_msgForward
- je __objc_msgForward_stret // call struct-returning fwd
- jmp *%r11 // else goto *imp
+ NilTest STRET
-// cache miss: go search the method lists
-LMsgSendStretCacheMiss:
- MethodTableLookup isa(%a2), %a3
- // MethodTableLookup placed IMP in r11
- jmp LMsgSendStretCallImp
+ GetIsaFast STRET // r11 = self->isa
+ CacheLookup STRET // r11 = method, ne set (stret fwd)
+ jmp *method_imp(%r11) // goto *imp
-// message sent to nil: redirect to nil receiver, if any
-LMsgSendStretNilSelf:
- movq __objc_nilReceiver(%rip), %a2
- testq %a2, %a2 // if (receiver != nil)
- jne LMsgSendStretReceiverOk // send to new receiver
- ret // else just return
+ NilTestSupport STRET
-LMsgSendStretExit:
+ GetIsaSupport STRET
+
+// cache miss: go search the method lists
+LCacheMiss:
+ GetIsa STRET // r11 = self->isa
+ MethodTableLookup %a2, %a3, _objc_msgSend_stret // r11 = IMP
+ test %r11, %r11 // set ne (stret) for forward; r11!=0
+ jmp *%r11 // goto *imp
+
+ DW_END _objc_msgSend_stret
END_ENTRY _objc_msgSend_stret
#if __OBJC2__
ENTRY _objc_msgSend_stret_fixup
+ DW_START _objc_msgSend_stret_fixup
- testq %a2, %a2
- je LMsgSendStretFixupNilSelf
+ NilTest STRET
+
+ SaveRegisters _objc_msgSend_stret_fixup
+
+ // Dereference obj/isa/cache to crash before _objc_fixupMessageRef
+ movq 8(%a3), %a6 // selector
+ GetIsa STRET // r11 = isa = *receiver
+ movq cache(%r11), %a5 // cache = *isa
+ movq mask(%a5), %a4 // *cache
- SaveRegisters 0
// a2 = receiver
// a3 = address of message ref
movq %a2, %a1
- movq $0, %a2
+ xorl %a2d, %a2d
// __objc_fixupMessageRef(receiver, 0, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+
+ RestoreRegisters _objc_msgSend_stret_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a3), %a3
- cmpq %r11, L_objc_msgForward(%rip) // if imp == _objc_msgForward
- je __objc_msgForward_stret // call struct-returning fwd
- jmp *%r11 // else goto *imp
+ test %r11, %r11 // set stret (ne) for forward; r11!=0
+ jmp *%r11 // goto *imp
-LMsgSendStretFixupNilSelf:
- ret
+ NilTestSupport STRET
- END_ENTRY _objc_msgSend_stret_fixup
+ DW_END _objc_msgSend_stret_fixup
+ END_ENTRY _objc_msgSend_stret_fixup
- ENTRY _objc_msgSend_stret_fixedup
+ STATIC_ENTRY _objc_msgSend_stret_fixedup
// Load _cmd from the message_ref
movq 8(%a3), %a3
jmp _objc_msgSend_stret
********************************************************************/
ENTRY _objc_msgSendSuper_stret
- CALL_MCOUNTER
+ DW_START _objc_msgSendSuper_stret
// search the cache (objc_super in %a2)
- movq class(%a2), %r11 // class = objc_super->class
- CacheLookup %a3, LMsgSendSuperStretCacheMiss
- // CacheLookup placed method in %r11
- movq method_imp(%r11), %r11
-LMsgSendSuperStretCallImp:
- movq receiver(%a2), %a2 // load real receiver
- cmpq %r11, L_objc_msgForward(%rip) // if imp == _objc_msgForward
- je __objc_msgForward_stret // call struct-returning fwd
- jmp *%r11 // else goto *imp
+ movq class(%a2), %r11 // class = objc_super->class
+ CacheLookup STRET // r11 = method, ne set (stret fwd)
+ movq receiver(%a2), %a2 // load real receiver
+ jmp *method_imp(%r11) // goto *imp
// cache miss: go search the method lists
-LMsgSendSuperStretCacheMiss:
- MethodTableLookup class(%a2), %a3
- // MethodTableLookup placed IMP in r11
- jmp LMsgSendSuperStretCallImp
+LCacheMiss:
+ movq receiver(%a2), %r10
+ movq class(%a2), %r11
+ MethodTableLookup %r10, %a3, _objc_msgSendSuper_stret // r11 = IMP
+ movq receiver(%a2), %a2 // load real receiver
+ test %r11, %r11 // set ne (stret) for forward; r11!=0
+ jmp *%r11 // goto *imp
-LMsgSendSuperStretExit:
+ DW_END _objc_msgSendSuper_stret
END_ENTRY _objc_msgSendSuper_stret
+
+/********************************************************************
+ * id objc_msgSendSuper2_stret
+ ********************************************************************/
+
#if __OBJC2__
ENTRY _objc_msgSendSuper2_stret_fixup
+ DW_START _objc_msgSendSuper2_stret_fixup
- SaveRegisters 0
+ SaveRegisters _objc_msgSendSuper2_stret_fixup
// a2 = address of objc_super2
// a3 = address of message ref
movq receiver(%a2), %a1
// __objc_fixupMessageRef(receiver, objc_super, ref)
call __objc_fixupMessageRef
movq %rax, %r11
- RestoreRegisters
+ RestoreRegisters _objc_msgSendSuper2_stret_fixup
// imp is in r11
// Load _cmd from the message_ref
movq 8(%a3), %a3
// Load receiver from objc_super2
movq receiver(%a2), %a2
- cmpq %r11, L_objc_msgForward(%rip) // if imp == _objc_msgForward
- je __objc_msgForward_stret // call struct-returning fwd
- jmp *%r11 // else goto *imp
+ test %r11, %r11 // set stret (ne) for forward; r11!=0
+ jmp *%r11 // goto *imp
- END_ENTRY _objc_msgSendSuper2_stret_fixup
+ DW_END _objc_msgSendSuper2_stret_fixup
+ END_ENTRY _objc_msgSendSuper2_stret_fixup
- ENTRY _objc_msgSendSuper2_stret_fixedup
- // objc_super->class is superclass of class to search
- movq class(%a2), %r11 // cls = objc_super->class
+ STATIC_ENTRY _objc_msgSendSuper2_stret_fixedup
movq 8(%a3), %a3 // load _cmd from message_ref
- movq 8(%r11), %r11 // cls = cls->superclass
- movq %r11, class(%a2)
- // objc_super->class is now the class to search
- jmp _objc_msgSendSuper_stret
+ jmp _objc_msgSendSuper2_stret
END_ENTRY _objc_msgSendSuper2_stret_fixedup
+
+
+ ENTRY _objc_msgSendSuper2_stret
+ DW_START _objc_msgSendSuper2_stret
+
+// search the cache (objc_super in %a2)
+ movq class(%a2), %r11 // class = objc_super->class
+ movq 8(%r11), %r11 // class = class->super_class
+ CacheLookup STRET // r11 = method, ne set (stret fwd)
+ movq receiver(%a2), %a2 // load real receiver
+ jmp *method_imp(%r11) // goto *imp
+
+// cache miss: go search the method lists
+LCacheMiss:
+ movq receiver(%a2), %r10
+ movq class(%a2), %r11
+ movq 8(%r11), %r11
+ MethodTableLookup %r10, %a3, _objc_msgSendSuper2_stret // r11 = IMP
+ movq receiver(%a2), %a2 // load real receiver
+ test %r11, %r11 // set ne (stret) for forward; r11!=0
+ jmp *%r11 // goto *imp
+
+ DW_END _objc_msgSendSuper2_stret
+ END_ENTRY _objc_msgSendSuper2_stret
#endif
.private_extern __objc_forward_stret_handler
__objc_forward_stret_handler: .quad 0
- // GrP fixme don't know how to cmpq reg, _objc_msgForward
-L_objc_msgForward: .quad __objc_msgForward
- ENTRY __objc_msgForward
+ STATIC_ENTRY __objc_msgForward_internal
+ // Method cache version
+
+ // THIS IS NOT A CALLABLE C FUNCTION
+ // Out-of-band condition register is NE for stret, EQ otherwise.
- // Non-struct return only!
+ jne __objc_msgForward_stret
+ jmp __objc_msgForward
+
+ END_ENTRY __objc_msgForward_internal
+
+
+ ENTRY __objc_msgForward
+ // Non-stret version
// Call user handler, if any
movq __objc_forward_handler(%rip), %r11
movq %a6, 40+REG_AREA(%rsp)
// Save side parameter registers
- movq %r10, 0+LINK_AREA(%rsp) // static chain (fixme needed?)
+ // movq %r10, 0+LINK_AREA(%rsp) // static chain pointer == Pascal
movq %rax, 8+LINK_AREA(%rsp) // xmm count
// 16+LINK_AREA is return address
// Retrieve return address from linkage area
movq 16+LINK_AREA(%rsp), %r11
// Pop stack frame
- subq $ 8*16 + 6*8 + (4-1)*8, %rsp
+ addq $ 8*16 + 6*8 + (4-1)*8, %rsp
// Put return address back
movq %r11, (%rsp)
ret
ENTRY __objc_msgForward_stret
-
+ // Struct-return version
+
// Call user handler, if any
movq __objc_forward_stret_handler(%rip), %r11
testq %r11, %r11 // if (handler == NULL)
movq %a6, 40+REG_AREA(%rsp)
// Save side parameter registers
- movq %r10, 0+LINK_AREA(%rsp) // static chain (fixme needed?)
+ // movq %r10, 0+LINK_AREA(%rsp) // static chain pointer == Pascal
movq %rax, 8+LINK_AREA(%rsp) // xmm count
// 16+LINK_AREA is return address
// Retrieve return address from linkage area
movq 16+LINK_AREA(%rsp), %r11
// Pop stack frame
- subq $ 8*16 + 6*8 + (4-1)*8, %rsp
+ addq $ 8*16 + 6*8 + (4-1)*8, %rsp
// Put return address back
movq %r11, (%rsp)
ret
END_ENTRY __objc_msgForward_stret
+ ENTRY _objc_msgSend_debug
+ jmp _objc_msgSend
+ END_ENTRY _objc_msgSend_debug
+
+ ENTRY _objc_msgSendSuper2_debug
+ jmp _objc_msgSendSuper2
+ END_ENTRY _objc_msgSendSuper2_debug
+
+ ENTRY _objc_msgSend_stret_debug
+ jmp _objc_msgSend_stret
+ END_ENTRY _objc_msgSend_stret_debug
+
+ ENTRY _objc_msgSendSuper2_stret_debug
+ jmp _objc_msgSendSuper2_stret
+ END_ENTRY _objc_msgSendSuper2_stret_debug
+
+ ENTRY _objc_msgSend_fpret_debug
+ jmp _objc_msgSend_fpret
+ END_ENTRY _objc_msgSend_fpret_debug
+
+ ENTRY _objc_msgSend_fp2ret_debug
+ jmp _objc_msgSend_fp2ret
+ END_ENTRY _objc_msgSend_fp2ret_debug
+
+
+ ENTRY _objc_msgSend_noarg
+ jmp _objc_msgSend
+ END_ENTRY _objc_msgSend_noarg
+
+
ENTRY _method_invoke
movq method_imp(%a2), %r11
jmp *%r11
END_ENTRY _method_invoke_stret
+
+
+ STATIC_ENTRY __objc_ignored_method
+
+ movq %a1, %rax
+ ret
+
+ END_ENTRY __objc_ignored_method
+
+
+/********************************************************************
+ *
+ * id vtable_prototype(id self, message_ref *msg, ...)
+ *
+ * This code is copied to create vtable trampolines.
+ * The instruction following LvtableIndex is modified to
+ * insert each vtable index.
+ * The instructions following LvtableTagTable are modified to
+ * load the tagged isa table.
+ *
+ * This code is placed in its own section to prevent dtrace from
+ * instrumenting it. Otherwise, dtrace would insert an INT3, the
+ * code would be copied, and the copied INT3 would cause a crash.
+ *
+ * ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
+ * vtable_prototype steals %rax and does not clear %rdx on return
+ * in order to precisely pack instructions into ifetch and cache lines
+ * This means vtable dispatch must never be used for vararg calls
+ * or very large return values.
+ * ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
+ *
+ ********************************************************************/
+
+.macro VTABLE /* byte-offset, name */
+
+ .align 6
+ .private_extern _$1
+_$1:
+ test %a1, %a1
+ je LvtableReturnZero_$1 // nil check
+ testl $$1, %a1d
+ jne LvtableTaggedPointer_$1 // tag check
+
+ movq (%a1), %rax // load isa (see ABI WARNING)
+ movq 24(%rax), %rax // load vtable
+ movq 8(%a2), %a2 // load _cmd
+LvtableIndex_$1:
+ jmpq * $0 (%rax) // load imp (DO NOT CHANGE)
+
+LvtableReturnZero_$1:
+ // integer registers only; not used for fpret / stret / etc
+ xorl %eax, %eax
+ // xorl %edx, %edx (see ABI WARNING)
+ ret
+
+ nop
+LvtableTaggedPointer_$1:
+ // extract isa (bits 1-2-3) from %a1, bit 0 is kept around for the heck of it
+ movl %a1d, %eax
+ andl $$0xF, %eax
+LvtableTagTable_$1:
+.if $0 == 0x7fff
+ movq $$0x1122334455667788, %r10 // vtable_prototype (DO NOT CHANGE)
+.else
+ leaq __objc_tagged_isa_table(%rip), %r10
+.endif
+LvtableTagTableEnd_$1:
+ movq (%r10, %rax, 8), %r10 // load isa from table (see ABI WARNING
+ movq 24(%r10), %rax // load vtable
+ movq 8(%a2), %a2 // load _cmd
+LvtableIndex2_$1:
+ jmpq * $0 (%rax) // load imp (DO NOT CHANGE)
+
+LvtableEnd_$1:
+
+.endmacro
+
+ .section __TEXT,__objc_codegen,regular
+ VTABLE 0x7fff, vtable_prototype
+
+ .data
+ .align 2
+ .private_extern _vtable_prototype_size
+_vtable_prototype_size:
+ .long LvtableEnd_vtable_prototype - _vtable_prototype
+
+ .private_extern _vtable_prototype_index_offset
+_vtable_prototype_index_offset:
+ .long LvtableIndex_vtable_prototype - _vtable_prototype
+
+ .private_extern _vtable_prototype_index2_offset
+_vtable_prototype_index2_offset:
+ .long LvtableIndex2_vtable_prototype - _vtable_prototype
+
+ .private_extern _vtable_prototype_tagtable_offset
+_vtable_prototype_tagtable_offset:
+ .long LvtableTagTable_vtable_prototype - _vtable_prototype
+
+ .private_extern _vtable_prototype_tagtable_size
+_vtable_prototype_tagtable_size:
+ .long LvtableTagTableEnd_vtable_prototype - LvtableTagTable_vtable_prototype
+
+/********************************************************************
+ *
+ * id vtable_ignored(id self, message_ref *msg, ...)
+ *
+ * Vtable trampoline for GC-ignored selectors. Immediately returns self.
+ *
+ ********************************************************************/
+
+ STATIC_ENTRY _vtable_ignored
+ movq %a1, %rax
+ ret
+
+
+/********************************************************************
+ *
+ * id objc_msgSend_vtable<n>(id self, message_ref *msg, ...)
+ *
+ * Built-in expansions of vtable_prototype for the default vtable.
+ *
+ ********************************************************************/
+
+ .text
+
+ .align 4
+ .private_extern _defaultVtableTrampolineDescriptors
+_defaultVtableTrampolineDescriptors:
+ // objc_trampoline_header
+ .short 16 // headerSize
+ .short 8 // descSize
+ .long 16 // descCount
+ .quad 0 // next
+
+ // objc_trampoline_descriptor[16]
+.macro TDESC /* n */
+L_tdesc$0:
+ .long _objc_msgSend_vtable$0 - L_tdesc$0
+ .long (1<<0) + (1<<2) // MESSAGE and VTABLE
+.endmacro
+
+ TDESC 0
+ TDESC 1
+ TDESC 2
+ TDESC 3
+ TDESC 4
+ TDESC 5
+ TDESC 6
+ TDESC 7
+ TDESC 8
+ TDESC 9
+ TDESC 10
+ TDESC 11
+ TDESC 12
+ TDESC 13
+ TDESC 14
+ TDESC 15
+
+ // trampoline code
+ .align 4
+ VTABLE 0*8, objc_msgSend_vtable0
+ VTABLE 1*8, objc_msgSend_vtable1
+ VTABLE 2*8, objc_msgSend_vtable2
+ VTABLE 3*8, objc_msgSend_vtable3
+ VTABLE 4*8, objc_msgSend_vtable4
+ VTABLE 5*8, objc_msgSend_vtable5
+ VTABLE 6*8, objc_msgSend_vtable6
+ VTABLE 7*8, objc_msgSend_vtable7
+ VTABLE 8*8, objc_msgSend_vtable8
+ VTABLE 9*8, objc_msgSend_vtable9
+ VTABLE 10*8, objc_msgSend_vtable10
+ VTABLE 11*8, objc_msgSend_vtable11
+ VTABLE 12*8, objc_msgSend_vtable12
+ VTABLE 13*8, objc_msgSend_vtable13
+ VTABLE 14*8, objc_msgSend_vtable14
+ VTABLE 15*8, objc_msgSend_vtable15
+
+#endif