]>
Commit | Line | Data |
---|---|---|
81345200 A |
1 | /* |
2 | * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved. | |
3 | * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> | |
4 | * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved. | |
5 | * | |
6 | * Redistribution and use in source and binary forms, with or without | |
7 | * modification, are permitted provided that the following conditions | |
8 | * are met: | |
9 | * | |
10 | * 1. Redistributions of source code must retain the above copyright | |
11 | * notice, this list of conditions and the following disclaimer. | |
12 | * 2. Redistributions in binary form must reproduce the above copyright | |
13 | * notice, this list of conditions and the following disclaimer in the | |
14 | * documentation and/or other materials provided with the distribution. | |
15 | * 3. Neither the name of Apple Inc. ("Apple") nor the names of | |
16 | * its contributors may be used to endorse or promote products derived | |
17 | * from this software without specific prior written permission. | |
18 | * | |
19 | * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY | |
20 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
21 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
22 | * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY | |
23 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
24 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
25 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | |
26 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
27 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
28 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
29 | */ | |
30 | ||
31 | #ifndef JITStubsX86_64_h | |
32 | #define JITStubsX86_64_h | |
33 | ||
34 | #include "JITStubsX86Common.h" | |
35 | ||
36 | #if !CPU(X86_64) | |
37 | #error "JITStubsX86_64.h should only be #included if CPU(X86_64)" | |
38 | #endif | |
39 | ||
40 | #if !USE(JSVALUE64) | |
41 | #error "JITStubsX86_64.h only implements USE(JSVALUE64)" | |
42 | #endif | |
43 | ||
44 | namespace JSC { | |
45 | ||
46 | #if COMPILER(GCC) | |
47 | ||
48 | #if USE(MASM_PROBE) | |
49 | asm ( | |
50 | ".globl " SYMBOL_STRING(ctiMasmProbeTrampoline) "\n" | |
51 | HIDE_SYMBOL(ctiMasmProbeTrampoline) "\n" | |
52 | SYMBOL_STRING(ctiMasmProbeTrampoline) ":" "\n" | |
53 | ||
54 | "pushfq" "\n" | |
55 | ||
56 | // MacroAssembler::probe() has already generated code to store some values. | |
57 | // Together with the rflags pushed above, the top of stack now looks like | |
58 | // this: | |
59 | // esp[0 * ptrSize]: rflags | |
60 | // esp[1 * ptrSize]: return address / saved rip | |
61 | // esp[2 * ptrSize]: probeFunction | |
62 | // esp[3 * ptrSize]: arg1 | |
63 | // esp[4 * ptrSize]: arg2 | |
64 | // esp[5 * ptrSize]: saved rax | |
65 | // esp[6 * ptrSize]: saved rsp | |
66 | ||
67 | "movq %rsp, %rax" "\n" | |
68 | "subq $" STRINGIZE_VALUE_OF(PROBE_SIZE) ", %rsp" "\n" | |
69 | ||
70 | // The X86_64 ABI specifies that the worse case stack alignment requirement | |
71 | // is 32 bytes. | |
72 | "andq $~0x1f, %rsp" "\n" | |
73 | ||
74 | "movq %rbp, " STRINGIZE_VALUE_OF(PROBE_CPU_EBP_OFFSET) "(%rsp)" "\n" | |
75 | "movq %rsp, %rbp" "\n" // Save the ProbeContext*. | |
76 | ||
77 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_ECX_OFFSET) "(%rbp)" "\n" | |
78 | "movq %rdx, " STRINGIZE_VALUE_OF(PROBE_CPU_EDX_OFFSET) "(%rbp)" "\n" | |
79 | "movq %rbx, " STRINGIZE_VALUE_OF(PROBE_CPU_EBX_OFFSET) "(%rbp)" "\n" | |
80 | "movq %rsi, " STRINGIZE_VALUE_OF(PROBE_CPU_ESI_OFFSET) "(%rbp)" "\n" | |
81 | "movq %rdi, " STRINGIZE_VALUE_OF(PROBE_CPU_EDI_OFFSET) "(%rbp)" "\n" | |
82 | ||
83 | "movq 0 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
84 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EFLAGS_OFFSET) "(%rbp)" "\n" | |
85 | "movq 1 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
86 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EIP_OFFSET) "(%rbp)" "\n" | |
87 | "movq 2 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
88 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_PROBE_FUNCTION_OFFSET) "(%rbp)" "\n" | |
89 | "movq 3 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
90 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_ARG1_OFFSET) "(%rbp)" "\n" | |
91 | "movq 4 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
92 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_ARG2_OFFSET) "(%rbp)" "\n" | |
93 | "movq 5 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
94 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EAX_OFFSET) "(%rbp)" "\n" | |
95 | "movq 6 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax), %rcx" "\n" | |
96 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rbp)" "\n" | |
97 | ||
98 | "movq %r8, " STRINGIZE_VALUE_OF(PROBE_CPU_R8_OFFSET) "(%rbp)" "\n" | |
99 | "movq %r9, " STRINGIZE_VALUE_OF(PROBE_CPU_R9_OFFSET) "(%rbp)" "\n" | |
100 | "movq %r10, " STRINGIZE_VALUE_OF(PROBE_CPU_R10_OFFSET) "(%rbp)" "\n" | |
101 | "movq %r11, " STRINGIZE_VALUE_OF(PROBE_CPU_R11_OFFSET) "(%rbp)" "\n" | |
102 | "movq %r12, " STRINGIZE_VALUE_OF(PROBE_CPU_R12_OFFSET) "(%rbp)" "\n" | |
103 | "movq %r13, " STRINGIZE_VALUE_OF(PROBE_CPU_R13_OFFSET) "(%rbp)" "\n" | |
104 | "movq %r14, " STRINGIZE_VALUE_OF(PROBE_CPU_R14_OFFSET) "(%rbp)" "\n" | |
105 | "movq %r15, " STRINGIZE_VALUE_OF(PROBE_CPU_R15_OFFSET) "(%rbp)" "\n" | |
106 | ||
107 | "movdqa %xmm0, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp)" "\n" | |
108 | "movdqa %xmm1, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp)" "\n" | |
109 | "movdqa %xmm2, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp)" "\n" | |
110 | "movdqa %xmm3, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp)" "\n" | |
111 | "movdqa %xmm4, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp)" "\n" | |
112 | "movdqa %xmm5, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp)" "\n" | |
113 | "movdqa %xmm6, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp)" "\n" | |
114 | "movdqa %xmm7, " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp)" "\n" | |
115 | ||
116 | "movq %rbp, %rdi" "\n" // the ProbeContext* arg. | |
117 | "call *" STRINGIZE_VALUE_OF(PROBE_PROBE_FUNCTION_OFFSET) "(%rbp)" "\n" | |
118 | ||
119 | // To enable probes to modify register state, we copy all registers | |
120 | // out of the ProbeContext before returning. | |
121 | ||
122 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EDX_OFFSET) "(%rbp), %rdx" "\n" | |
123 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EBX_OFFSET) "(%rbp), %rbx" "\n" | |
124 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ESI_OFFSET) "(%rbp), %rsi" "\n" | |
125 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EDI_OFFSET) "(%rbp), %rdi" "\n" | |
126 | ||
127 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R8_OFFSET) "(%rbp), %r8" "\n" | |
128 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R9_OFFSET) "(%rbp), %r9" "\n" | |
129 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R10_OFFSET) "(%rbp), %r10" "\n" | |
130 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R11_OFFSET) "(%rbp), %r11" "\n" | |
131 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R12_OFFSET) "(%rbp), %r12" "\n" | |
132 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R13_OFFSET) "(%rbp), %r13" "\n" | |
133 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R14_OFFSET) "(%rbp), %r14" "\n" | |
134 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_R15_OFFSET) "(%rbp), %r15" "\n" | |
135 | ||
136 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM0_OFFSET) "(%rbp), %xmm0" "\n" | |
137 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM1_OFFSET) "(%rbp), %xmm1" "\n" | |
138 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM2_OFFSET) "(%rbp), %xmm2" "\n" | |
139 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM3_OFFSET) "(%rbp), %xmm3" "\n" | |
140 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM4_OFFSET) "(%rbp), %xmm4" "\n" | |
141 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM5_OFFSET) "(%rbp), %xmm5" "\n" | |
142 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM6_OFFSET) "(%rbp), %xmm6" "\n" | |
143 | "movdqa " STRINGIZE_VALUE_OF(PROBE_CPU_XMM7_OFFSET) "(%rbp), %xmm7" "\n" | |
144 | ||
145 | // There are 6 more registers left to restore: | |
146 | // rax, rcx, rbp, rsp, rip, and rflags. | |
147 | // We need to handle these last few restores carefully because: | |
148 | // | |
149 | // 1. We need to push the return address on the stack for ret to use | |
150 | // That means we need to write to the stack. | |
151 | // 2. The user probe function may have altered the restore value of esp to | |
152 | // point to the vicinity of one of the restore values for the remaining | |
153 | // registers left to be restored. | |
154 | // That means, for requirement 1, we may end up writing over some of the | |
155 | // restore values. We can check for this, and first copy the restore | |
156 | // values to a "safe area" on the stack before commencing with the action | |
157 | // for requirement 1. | |
158 | // 3. For both requirement 2, we need to ensure that the "safe area" is | |
159 | // protected from interrupt handlers overwriting it. Hence, the esp needs | |
160 | // to be adjusted to include the "safe area" before we start copying the | |
161 | // the restore values. | |
162 | ||
163 | "movq %rbp, %rax" "\n" | |
164 | "addq $" STRINGIZE_VALUE_OF(PROBE_CPU_EFLAGS_OFFSET) ", %rax" "\n" | |
165 | "cmpq %rax, " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rbp)" "\n" | |
166 | "jg " SYMBOL_STRING(ctiMasmProbeTrampolineEnd) "\n" | |
167 | ||
168 | // Locate the "safe area" at 2x sizeof(ProbeContext) below where the new | |
169 | // rsp will be. This time we don't have to 32-byte align it because we're | |
170 | // not using to store any xmm regs. | |
171 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rbp), %rax" "\n" | |
172 | "subq $2 * " STRINGIZE_VALUE_OF(PROBE_SIZE) ", %rax" "\n" | |
173 | "movq %rax, %rsp" "\n" | |
174 | ||
175 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EAX_OFFSET) "(%rbp), %rcx" "\n" | |
176 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EAX_OFFSET) "(%rax)" "\n" | |
177 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ECX_OFFSET) "(%rbp), %rcx" "\n" | |
178 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_ECX_OFFSET) "(%rax)" "\n" | |
179 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EBP_OFFSET) "(%rbp), %rcx" "\n" | |
180 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EBP_OFFSET) "(%rax)" "\n" | |
181 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rbp), %rcx" "\n" | |
182 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rax)" "\n" | |
183 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EIP_OFFSET) "(%rbp), %rcx" "\n" | |
184 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EIP_OFFSET) "(%rax)" "\n" | |
185 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EFLAGS_OFFSET) "(%rbp), %rcx" "\n" | |
186 | "movq %rcx, " STRINGIZE_VALUE_OF(PROBE_CPU_EFLAGS_OFFSET) "(%rax)" "\n" | |
187 | "movq %rax, %rbp" "\n" | |
188 | ||
189 | SYMBOL_STRING(ctiMasmProbeTrampolineEnd) ":" "\n" | |
190 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ESP_OFFSET) "(%rbp), %rax" "\n" | |
191 | "subq $5 * " STRINGIZE_VALUE_OF(PTR_SIZE) ", %rax" "\n" | |
192 | // At this point, %rsp should be < %rax. | |
193 | ||
194 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EFLAGS_OFFSET) "(%rbp), %rcx" "\n" | |
195 | "movq %rcx, 0 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax)" "\n" | |
196 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EAX_OFFSET) "(%rbp), %rcx" "\n" | |
197 | "movq %rcx, 1 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax)" "\n" | |
198 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_ECX_OFFSET) "(%rbp), %rcx" "\n" | |
199 | "movq %rcx, 2 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax)" "\n" | |
200 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EBP_OFFSET) "(%rbp), %rcx" "\n" | |
201 | "movq %rcx, 3 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax)" "\n" | |
202 | "movq " STRINGIZE_VALUE_OF(PROBE_CPU_EIP_OFFSET) "(%rbp), %rcx" "\n" | |
203 | "movq %rcx, 4 * " STRINGIZE_VALUE_OF(PTR_SIZE) "(%rax)" "\n" | |
204 | "movq %rax, %rsp" "\n" | |
205 | ||
206 | "popfq" "\n" | |
207 | "popq %rax" "\n" | |
208 | "popq %rcx" "\n" | |
209 | "popq %rbp" "\n" | |
210 | "ret" "\n" | |
211 | ); | |
212 | #endif // USE(MASM_PROBE) | |
213 | ||
214 | #endif // COMPILER(GCC) | |
215 | ||
216 | } // namespace JSC | |
217 | ||
218 | #endif // JITStubsX86_64_h |