2 * Copyright (c) 2008 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
28 #define MH_PARAM_OUT 0
29 #define LP_PARAM_OUT 4
30 #define XMMM0_SAVE 16 /* 16-byte align */
39 #define STACK_SIZE 100 /* must be 4 mod 16 so that stack winds up 16-byte aliged */
40 #define LP_OLD_BP_SAVE 104
43 * sp+4 lazy binding info offset
44 * sp+0 address of ImageLoader cache
48 .globl dyld_stub_binder
49 .globl _misaligned_stack_error
51 subl $STACK_SIZE,%esp # makes stack 16-byte aligned
52 movl %eax,EAX_SAVE(%esp)
53 movl LP_OLD_BP_SAVE(%esp),%eax # get lazy-pointer meta-parameter
54 movl %eax,LP_LOCAL(%esp)
55 movl %ebp,LP_OLD_BP_SAVE(%esp) # store epb back chain
56 movl %esp,%ebp # set epb to be this frame
57 add $LP_OLD_BP_SAVE,%ebp
58 movl %ecx,ECX_SAVE(%esp)
59 movl %edx,EDX_SAVE(%esp)
61 _misaligned_stack_error_:
62 movdqa %xmm0,XMMM0_SAVE(%esp)
63 movdqa %xmm1,XMMM1_SAVE(%esp)
64 movdqa %xmm2,XMMM2_SAVE(%esp)
65 movdqa %xmm3,XMMM3_SAVE(%esp)
67 movl MH_LOCAL(%esp),%eax # call dyld::fastBindLazySymbol(loadercache, lazyinfo)
68 movl %eax,MH_PARAM_OUT(%esp)
69 movl LP_LOCAL(%esp),%eax
70 movl %eax,LP_PARAM_OUT(%esp)
71 call __Z21_dyld_fast_stub_entryPvl
72 movdqa XMMM0_SAVE(%esp),%xmm0 # restore registers
73 movdqa XMMM1_SAVE(%esp),%xmm1
74 movdqa XMMM2_SAVE(%esp),%xmm2
75 movdqa XMMM3_SAVE(%esp),%xmm3
76 movl ECX_SAVE(%esp),%ecx
77 movl EDX_SAVE(%esp),%edx
78 movl %eax,%ebp # move target address to epb
79 movl EAX_SAVE(%esp),%eax # restore eax
80 addl $STACK_SIZE+4,%esp # cut back stack
81 xchg %ebp, (%esp) # restore ebp and set target to top of stack
91 #define LP_PARAM_BP 16
100 #define XMMM0_SAVE 64 /* 16-byte align */
101 #define XMMM1_SAVE 80
102 #define XMMM2_SAVE 96
103 #define XMMM3_SAVE 112
104 #define XMMM4_SAVE 128
105 #define XMMM5_SAVE 144
106 #define XMMM6_SAVE 160
107 #define XMMM7_SAVE 176
108 #define STACK_SIZE 192 /* (XMMM7_SAVE+16) must be 16 byte aligned too */
112 * sp+4 lazy binding info offset
113 * sp+0 address of ImageLoader cache
116 .globl dyld_stub_binder
120 subq $STACK_SIZE,%rsp # at this point stack is 16-byte aligned because two meta-parameters where pushed
121 movq %rdi,RDI_SAVE(%rsp) # save registers that might be used as parameters
122 movq %rsi,RSI_SAVE(%rsp)
123 movq %rdx,RDX_SAVE(%rsp)
124 movq %rcx,RCX_SAVE(%rsp)
125 movq %r8,R8_SAVE(%rsp)
126 movq %r9,R9_SAVE(%rsp)
127 movq %rax,RAX_SAVE(%rsp)
128 misaligned_stack_error_entering_dyld_stub_binder:
129 movdqa %xmm0,XMMM0_SAVE(%rsp)
130 movdqa %xmm1,XMMM1_SAVE(%rsp)
131 movdqa %xmm2,XMMM2_SAVE(%rsp)
132 movdqa %xmm3,XMMM3_SAVE(%rsp)
133 movdqa %xmm4,XMMM4_SAVE(%rsp)
134 movdqa %xmm5,XMMM5_SAVE(%rsp)
135 movdqa %xmm6,XMMM6_SAVE(%rsp)
136 movdqa %xmm7,XMMM7_SAVE(%rsp)
138 movq MH_PARAM_BP(%rbp),%rdi # call fastBindLazySymbol(loadercache, lazyinfo)
139 movq LP_PARAM_BP(%rbp),%rsi
140 call __Z21_dyld_fast_stub_entryPvl
141 movq %rax,%r11 # save target
142 movdqa XMMM0_SAVE(%rsp),%xmm0 # restore registers
143 movdqa XMMM1_SAVE(%rsp),%xmm1
144 movdqa XMMM2_SAVE(%rsp),%xmm2
145 movdqa XMMM3_SAVE(%rsp),%xmm3
146 movdqa XMMM4_SAVE(%rsp),%xmm4
147 movdqa XMMM5_SAVE(%rsp),%xmm5
148 movdqa XMMM6_SAVE(%rsp),%xmm6
149 movdqa XMMM7_SAVE(%rsp),%xmm7
150 movq RDI_SAVE(%rsp),%rdi
151 movq RSI_SAVE(%rsp),%rsi
152 movq RDX_SAVE(%rsp),%rdx
153 movq RCX_SAVE(%rsp),%rcx
154 movq R8_SAVE(%rsp),%r8
155 movq R9_SAVE(%rsp),%r9
156 movq RAX_SAVE(%rsp),%rax
157 addq $STACK_SIZE,%rsp
159 addq $16,%rsp # remove meta-parameters
160 jmp *%r11 # jmp to target
167 * sp+4 lazy binding info offset
168 * sp+0 address of ImageLoader cache
173 .globl dyld_stub_binder
175 stmfd sp!, {r0,r1,r2,r3,r7,lr} // save registers
176 add r7, sp, #16 // point FP to previous FP
178 ldr r0, [sp, #24] // move address ImageLoader cache to 1st parameter
179 ldr r1, [sp, #28] // move lazy info offset 2nd parameter
181 // call dyld::fastBindLazySymbol(loadercache, lazyinfo)
182 bl __Z21_dyld_fast_stub_entryPvl
183 mov ip, r0 // move the symbol`s address into ip
185 ldmfd sp!, {r0,r1,r2,r3,r7,lr} // restore registers
186 add sp, sp, #8 // remove meta-parameters
188 bx ip // jump to the symbol`s address that was bound