]> git.saurik.com Git - apple/xnu.git/blob - tests/ldt_code32.s
xnu-6153.81.5.tar.gz
[apple/xnu.git] / tests / ldt_code32.s
1 /*
2 * Copyright (c) 2019 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 .code64
30 .globl _compat_mode_trampoline
31 _compat_mode_trampoline:
32 /*
33 * %rdi => address of far_call_t (64-bit offset, then 16-bit selector)
34 * %rsi => lowmem stack
35 * %rdx => argument to 32-bit function
36 * %rcx => address of long mode callback
37 * %r8 => 64-bit address of _thunk64
38 */
39 movq %rsp, %rax
40 movq %rsi, %rsp
41 pushq %rax /* Save 64-bit stack pointer */
42 leaq 1f(%rip), %rax
43 movq %rdx, %r9
44 xorq %rdx, %rdx
45 movw %cs, %dx
46 shlq $32, %rdx
47 orq %rdx, %rax
48 movq %r9, %rdx
49 /*
50 * Save all callee-saved regs before calling down to compat mode,
51 * as there's no guarantee that the top 32 bits are preserved
52 * across compat mode/long mode switches.
53 */
54 pushq %rbp
55 pushq %rbx
56 pushq %r12
57 pushq %r13
58 pushq %r14
59 pushq %r15
60
61 pushq %r8 /* Push the absolute address of _thunk64 below */
62 pushq %rcx /* Push the 64-bit fn ptr that compat mode will call */
63 pushq %rdx /* Push arg to 32-bit code */
64 pushq %rax /* Push the return offset + segment onto the stack */
65
66 ljmpq *(%rdi)
67 1:
68 /*
69 * lretl from compat mode pops off the first 8 bytes,
70 * so manually reclaim the remaining 24 bytes
71 */
72 addq $0x18, %rsp
73
74 /* Restore callee-saved registers */
75 popq %r15
76 popq %r14
77 popq %r13
78 popq %r12
79 popq %rbx
80 popq %rbp
81
82 popq %rsp
83 retq
84
85
86 .code32
87 .globl _code_32
88 .align 12
89 _code_32:
90 /*
91 * After the standard stack frame is established, the stack layout is as follows:
92 *
93 * (%esp) -> old %ebp
94 * 4(%ebp) -> return %eip
95 * 8(%ebp) -> return %cs
96 * 0xc(%ebp) -> function arg (value to increment and return)
97 * 0x14(%ebp) -> 8-byte long mode function pointer to call via trampoline (with 0 args)
98 * 0x1c(%ebp) -> absolute (32-bit) base address of the 64-bit thunk
99 * (Note that the caller pushed a 64-bit value here, so the 4 bytes
100 * at 0x20(%ebp) are zeroes.)
101 */
102 pushl %ebp
103 movl %esp, %ebp
104 pushl %ebx
105 call 1f
106 1:
107 popl %ebx /* save EIP for use in PIC calculation below */
108 subl $8, %esp
109
110 movl 0x1c(%ebp), %eax
111
112 /* Populate the far call descriptor: */
113 movl %eax, -8(%ebp)
114 movl 8(%ebp), %eax /* The long-mode %cs from whence we came */
115 movl %eax, -4(%ebp)
116
117 pushl $0 /* number of arguments */
118 pushl 0x18(%ebp) /* high 32-bits of long mode funcptr */
119 pushl 0x14(%ebp) /* low 32-bits of long mode funcptr */
120
121 /*
122 * The next 2 instructions are necessary because clang cannot deal with
123 * a "leal offset(index_reg), dest_reg" construct despite the fact that
124 * this code is marked .code32 (because the target is 64-bit and cannot
125 * process this uniquely-32-bit construct.)
126 */
127 leal 2f - 1b, %eax
128 addl %ebx, %eax
129
130 pushl $0
131 pushl %cs
132 pushl $0
133 pushl %eax
134
135 /*
136 * Note that the long-mode-based function that is called will need
137 * to restore GSbase before calling into any frameworks that might
138 * access %gs-relative data.
139 */
140 ljmpl *-8(%ebp) /* far call to the long mode trampoline */
141 2:
142 /*
143 * lretq from long mode pops 16 bytes, so reclaim the remaining 12
144 */
145 addl $12, %esp
146
147 /*
148 * Do a division-by-zero so the exception handler can catch it and
149 * restore execution right after. If a signal handler is used,
150 * it must restore GSbase first if it intends to call into any
151 * frameworks / APIs that access %gs-relative data.
152 */
153 xorl %eax, %eax
154 div %eax
155
156 .globl _first_invalid_opcode
157 _first_invalid_opcode:
158 /*
159 * Next, try to perform a sysenter syscall -- which should result in
160 * a #UD.
161 */
162 leal 3f - 1b, %edx
163 addl %ebx, %edx /* return address is expected in %edx */
164 pushl %ecx
165 movl %esp, %ecx /* stack ptr is expected in %ecx */
166 sysenter
167 3:
168 popl %ecx
169
170 /*
171 * Do the same with each of the old-style INT syscalls.
172 */
173 int $0x80
174 int $0x81
175 .globl _last_invalid_opcode
176 _last_invalid_opcode:
177 int $0x82
178
179 /*
180 * discard the return value from the trampolined function and
181 * increment the value passed in as this function's first argument
182 * then return that value + 1 so caller can verify a successful
183 * thunk.
184 */
185 movl 0xc(%ebp), %eax
186 incl %eax
187 addl $8, %esp
188 popl %ebx
189 popl %ebp
190 lret
191
192 .code64
193
194 .globl _thunk64
195 _thunk64:
196 /*
197 * The thunk is a very simple code fragment that uses an
198 * absolute address modified at setup time to call into
199 * the long mode trampoline.far call data passed on the stack to jump to long mode
200 * code (where %rip-relative addressing will work properly.)
201 *
202 */
203 .globl _thunk64_movabs
204 _thunk64_movabs:
205 movabs $0xdeadbeeffeedface, %rax
206 jmpq *%rax
207
208
209 .globl _compat_mode_trampoline_len
210 _compat_mode_trampoline_len:
211 .long (. - _compat_mode_trampoline)
212
213
214 .globl _long_mode_trampoline
215 _long_mode_trampoline:
216 /*
217 * After creating a standard stack frame, the stack layout is:
218 *
219 * 8(%rbp) => %eip of far return to compat mode
220 * 0x10(%rbp) => %cs of far return to compat mode
221 * 0x18(%rbp) => low 32-bits of function pointer
222 * 0x1C(%rbp) => high 32-bits of function pointer
223 * 0x20(%rbp) => number of parameters (0..4)
224 * 0x24(%rbp) => first argument [low 32-bits] (if needed)
225 * 0x28(%rbp) => first argument [high 32-bits] (if needed)
226 * 0x2c(%rbp) => second argument [low 32-bits] (if needed)
227 * 0x30(%rbp) => second argument [high 32-bits] (if needed)
228 * 0x34(%rbp) => third argument [low 32-bits] (if needed)
229 * 0x38(%rbp) => third argument [high 32-bits] (if needed)
230 * 0x3c(%rbp) => fourth argument [low 32-bits] (if needed)
231 * 0x40(%rbp) => fourth argument [high 32-bits] (if needed)
232 *
233 * Note that we continue to use the existing (<4G) stack
234 * after the call into long mode.
235 */
236 pushq %rbp
237 movq %rsp, %rbp
238 pushq %rdi
239 pushq %rsi
240 pushq %rcx
241 movl 0x20(%rbp), %eax
242
243 testl %eax, %eax
244 jz 5f
245
246 movq 0x24(%rbp), %rdi
247 decl %eax
248
249 2:
250 testl %eax, %eax
251 jz 5f
252
253 movq 0x2c(%rbp), %rsi
254 decl %eax
255
256 3:
257 testl %eax, %eax
258 jz 5f
259
260 movq 0x34(%rbp), %rdx
261 decl %eax
262
263 4:
264 testl %eax, %eax
265 jnz 1f /* too many arguments specified -- bail out and return */
266
267 movq 0x3c(%rbp), %rcx
268
269 5: /* Call passed-in function */
270 /* Note that the stack MUST be 16-byte aligned before we call into frameworks in long mode */
271
272 pushq %rbx
273 movq %rsp, %rbx
274 subq $0x10, %rsp
275 andq $0xffffffffffffffe0, %rsp
276
277 callq *0x18(%rbp)
278 movq %rbx, %rsp
279 popq %rbx
280 1:
281 popq %rcx
282 popq %rsi
283 popq %rdi
284 popq %rbp
285 lretq