]> git.saurik.com Git - apple/dyld.git/blob - src/dyld_stub_binder.s
dyld-195.5.tar.gz
[apple/dyld.git] / src / dyld_stub_binder.s
1 /*
2 * Copyright (c) 2008 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24
25
26 #ifdef __i386__
27
28 #define MH_PARAM_OUT 0
29 #define LP_PARAM_OUT 4
30 #define XMMM0_SAVE 16 /* 16-byte align */
31 #define XMMM1_SAVE 32
32 #define XMMM2_SAVE 48
33 #define XMMM3_SAVE 64
34 #define EAX_SAVE 84
35 #define ECX_SAVE 88
36 #define EDX_SAVE 92
37 #define LP_LOCAL 96
38 #define MH_LOCAL 100
39 #define STACK_SIZE 100 /* must be 4 mod 16 so that stack winds up 16-byte aliged */
40 #define LP_OLD_BP_SAVE 104
41
42 /*
43 * sp+4 lazy binding info offset
44 * sp+0 address of ImageLoader cache
45 */
46 .text
47 .align 4,0x90
48 .globl dyld_stub_binder
49 .globl _misaligned_stack_error
50 dyld_stub_binder:
51 subl $STACK_SIZE,%esp # makes stack 16-byte aligned
52 movl %eax,EAX_SAVE(%esp)
53 movl LP_OLD_BP_SAVE(%esp),%eax # get lazy-pointer meta-parameter
54 movl %eax,LP_LOCAL(%esp)
55 movl %ebp,LP_OLD_BP_SAVE(%esp) # store epb back chain
56 movl %esp,%ebp # set epb to be this frame
57 add $LP_OLD_BP_SAVE,%ebp
58 movl %ecx,ECX_SAVE(%esp)
59 movl %edx,EDX_SAVE(%esp)
60 .align 0,0x90
61 _misaligned_stack_error_:
62 movdqa %xmm0,XMMM0_SAVE(%esp)
63 movdqa %xmm1,XMMM1_SAVE(%esp)
64 movdqa %xmm2,XMMM2_SAVE(%esp)
65 movdqa %xmm3,XMMM3_SAVE(%esp)
66 dyld_stub_binder_:
67 movl MH_LOCAL(%esp),%eax # call dyld::fastBindLazySymbol(loadercache, lazyinfo)
68 movl %eax,MH_PARAM_OUT(%esp)
69 movl LP_LOCAL(%esp),%eax
70 movl %eax,LP_PARAM_OUT(%esp)
71 call __Z21_dyld_fast_stub_entryPvl
72 movdqa XMMM0_SAVE(%esp),%xmm0 # restore registers
73 movdqa XMMM1_SAVE(%esp),%xmm1
74 movdqa XMMM2_SAVE(%esp),%xmm2
75 movdqa XMMM3_SAVE(%esp),%xmm3
76 movl ECX_SAVE(%esp),%ecx
77 movl EDX_SAVE(%esp),%edx
78 movl %eax,%ebp # move target address to epb
79 movl EAX_SAVE(%esp),%eax # restore eax
80 addl $STACK_SIZE+4,%esp # cut back stack
81 xchg %ebp, (%esp) # restore ebp and set target to top of stack
82 ret # jump to target
83
84
85 #endif /* __i386__ */
86
87
88 #if __x86_64__
89
90 #define MH_PARAM_BP 8
91 #define LP_PARAM_BP 16
92
93 #define RDI_SAVE 0
94 #define RSI_SAVE 8
95 #define RDX_SAVE 16
96 #define RCX_SAVE 24
97 #define R8_SAVE 32
98 #define R9_SAVE 40
99 #define RAX_SAVE 48
100 #define XMMM0_SAVE 64 /* 16-byte align */
101 #define XMMM1_SAVE 80
102 #define XMMM2_SAVE 96
103 #define XMMM3_SAVE 112
104 #define XMMM4_SAVE 128
105 #define XMMM5_SAVE 144
106 #define XMMM6_SAVE 160
107 #define XMMM7_SAVE 176
108 #define STACK_SIZE 192 /* (XMMM7_SAVE+16) must be 16 byte aligned too */
109
110
111 /*
112 * sp+4 lazy binding info offset
113 * sp+0 address of ImageLoader cache
114 */
115 .align 2,0x90
116 .globl dyld_stub_binder
117 dyld_stub_binder:
118 pushq %rbp
119 movq %rsp,%rbp
120 subq $STACK_SIZE,%rsp # at this point stack is 16-byte aligned because two meta-parameters where pushed
121 movq %rdi,RDI_SAVE(%rsp) # save registers that might be used as parameters
122 movq %rsi,RSI_SAVE(%rsp)
123 movq %rdx,RDX_SAVE(%rsp)
124 movq %rcx,RCX_SAVE(%rsp)
125 movq %r8,R8_SAVE(%rsp)
126 movq %r9,R9_SAVE(%rsp)
127 movq %rax,RAX_SAVE(%rsp)
128 misaligned_stack_error_entering_dyld_stub_binder:
129 movdqa %xmm0,XMMM0_SAVE(%rsp)
130 movdqa %xmm1,XMMM1_SAVE(%rsp)
131 movdqa %xmm2,XMMM2_SAVE(%rsp)
132 movdqa %xmm3,XMMM3_SAVE(%rsp)
133 movdqa %xmm4,XMMM4_SAVE(%rsp)
134 movdqa %xmm5,XMMM5_SAVE(%rsp)
135 movdqa %xmm6,XMMM6_SAVE(%rsp)
136 movdqa %xmm7,XMMM7_SAVE(%rsp)
137 dyld_stub_binder_:
138 movq MH_PARAM_BP(%rbp),%rdi # call fastBindLazySymbol(loadercache, lazyinfo)
139 movq LP_PARAM_BP(%rbp),%rsi
140 call __Z21_dyld_fast_stub_entryPvl
141 movq %rax,%r11 # save target
142 movdqa XMMM0_SAVE(%rsp),%xmm0 # restore registers
143 movdqa XMMM1_SAVE(%rsp),%xmm1
144 movdqa XMMM2_SAVE(%rsp),%xmm2
145 movdqa XMMM3_SAVE(%rsp),%xmm3
146 movdqa XMMM4_SAVE(%rsp),%xmm4
147 movdqa XMMM5_SAVE(%rsp),%xmm5
148 movdqa XMMM6_SAVE(%rsp),%xmm6
149 movdqa XMMM7_SAVE(%rsp),%xmm7
150 movq RDI_SAVE(%rsp),%rdi
151 movq RSI_SAVE(%rsp),%rsi
152 movq RDX_SAVE(%rsp),%rdx
153 movq RCX_SAVE(%rsp),%rcx
154 movq R8_SAVE(%rsp),%r8
155 movq R9_SAVE(%rsp),%r9
156 movq RAX_SAVE(%rsp),%rax
157 addq $STACK_SIZE,%rsp
158 popq %rbp
159 addq $16,%rsp # remove meta-parameters
160 jmp *%r11 # jmp to target
161
162 #endif
163
164
165 #if __arm__
166 /*
167 * sp+4 lazy binding info offset
168 * sp+0 address of ImageLoader cache
169 */
170
171 .text
172 .align 2
173 .globl dyld_stub_binder
174 dyld_stub_binder:
175 stmfd sp!, {r0,r1,r2,r3,r7,lr} // save registers
176 add r7, sp, #16 // point FP to previous FP
177
178 ldr r0, [sp, #24] // move address ImageLoader cache to 1st parameter
179 ldr r1, [sp, #28] // move lazy info offset 2nd parameter
180
181 // call dyld::fastBindLazySymbol(loadercache, lazyinfo)
182 bl __Z21_dyld_fast_stub_entryPvl
183 mov ip, r0 // move the symbol`s address into ip
184
185 ldmfd sp!, {r0,r1,r2,r3,r7,lr} // restore registers
186 add sp, sp, #8 // remove meta-parameters
187
188 bx ip // jump to the symbol`s address that was bound
189
190 #endif /* __arm__ */
191