]>
Commit | Line | Data |
---|---|---|
cb323159 A |
1 | /* |
2 | * Copyright (c) 2018 Apple Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * This file contains Original Code and/or Modifications of Original Code | |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
14 | * | |
15 | * Please obtain a copy of the License at | |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
23 | * Please see the License for the specific language governing rights and | |
24 | * limitations under the License. | |
25 | * | |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ | |
27 | */ | |
28 | ||
29 | #ifndef _PEXPERT_ARM_BOARD_CONFIG_H | |
30 | #include <pexpert/arm64/board_config.h> | |
31 | #endif | |
32 | ||
c6bf4f31 A |
33 | #if XNU_MONITOR |
34 | /* Exit path defines; for controlling PPL -> kernel transitions. */ | |
35 | #define PPL_EXIT_DISPATCH 0 /* This is a clean exit after a PPL request. */ | |
36 | #define PPL_EXIT_PANIC_CALL 1 /* The PPL has called panic. */ | |
37 | #define PPL_EXIT_BAD_CALL 2 /* The PPL request failed. */ | |
38 | #define PPL_EXIT_EXCEPTION 3 /* The PPL took an exception. */ | |
39 | ||
40 | #define KERNEL_MODE_ELR ELR_GL11 | |
41 | #define KERNEL_MODE_FAR FAR_GL11 | |
42 | #define KERNEL_MODE_ESR ESR_GL11 | |
43 | #define KERNEL_MODE_SPSR SPSR_GL11 | |
44 | #define KERNEL_MODE_ASPSR ASPSR_GL11 | |
45 | #define KERNEL_MODE_VBAR VBAR_GL11 | |
46 | #define KERNEL_MODE_TPIDR TPIDR_GL11 | |
47 | ||
48 | #define GUARDED_MODE_ELR ELR_EL1 | |
49 | #define GUARDED_MODE_FAR FAR_EL1 | |
50 | #define GUARDED_MODE_ESR ESR_EL1 | |
51 | #define GUARDED_MODE_SPSR SPSR_EL1 | |
52 | #define GUARDED_MODE_ASPSR ASPSR_EL1 | |
53 | #define GUARDED_MODE_VBAR VBAR_EL1 | |
54 | #define GUARDED_MODE_TPIDR TPIDR_EL1 | |
55 | ||
56 | /* | |
57 | * GET_PMAP_CPU_DATA | |
58 | * | |
59 | * Retrieves the PPL per-CPU data for the current CPU. | |
60 | * arg0 - Address of the PPL per-CPU data is returned through this | |
61 | * arg1 - Scratch register | |
62 | * arg2 - Scratch register | |
63 | * | |
64 | */ | |
65 | .macro GET_PMAP_CPU_DATA | |
66 | /* Get the CPU ID. */ | |
67 | mrs $0, MPIDR_EL1 | |
68 | #ifdef CPU_CLUSTER_OFFSETS | |
69 | ubfx $1, $0, MPIDR_AFF1_SHIFT, MPIDR_AFF1_WIDTH | |
70 | cmp $1, __ARM_CLUSTER_COUNT__ | |
71 | b.hs . | |
72 | adrp $2, EXT(pmap_cluster_offsets)@page | |
73 | add $2, $2, EXT(pmap_cluster_offsets)@pageoff | |
74 | ldr $1, [$2, $1, lsl #3] | |
75 | and $0, $0, MPIDR_AFF0_MASK | |
76 | add $0, $0, $1 | |
77 | #else | |
78 | and $0, $0, MPIDR_AFF0_MASK | |
79 | #endif | |
80 | ||
81 | /* Get the PPL CPU data array. */ | |
82 | adrp $1, EXT(pmap_cpu_data_array)@page | |
83 | add $1, $1, EXT(pmap_cpu_data_array)@pageoff | |
84 | ||
85 | /* | |
86 | * Sanity check the CPU ID (this is not a panic because this pertains to | |
87 | * the hardware configuration; this should only fail if our | |
88 | * understanding of the hardware is incorrect). | |
89 | */ | |
90 | cmp $0, MAX_CPUS | |
91 | b.hs . | |
92 | ||
93 | mov $2, PMAP_CPU_DATA_ARRAY_ENTRY_SIZE | |
94 | /* Get the PPL per-CPU data. */ | |
95 | madd $0, $0, $2, $1 | |
96 | .endmacro | |
97 | #endif /* XNU_MONITOR */ | |
cb323159 A |
98 | |
99 | /* | |
100 | * INIT_SAVED_STATE_FLAVORS | |
101 | * | |
102 | * Initializes the saved state flavors of a new saved state structure | |
103 | * arg0 - saved state pointer | |
104 | * arg1 - 32-bit scratch reg | |
105 | * arg2 - 32-bit scratch reg | |
106 | */ | |
107 | .macro INIT_SAVED_STATE_FLAVORS | |
108 | mov $1, ARM_SAVED_STATE64 // Set saved state to 64-bit flavor | |
109 | mov $2, ARM_SAVED_STATE64_COUNT | |
110 | stp $1, $2, [$0, SS_FLAVOR] | |
111 | mov $1, ARM_NEON_SAVED_STATE64 // Set neon state to 64-bit flavor | |
112 | str $1, [$0, NS_FLAVOR] | |
113 | mov $1, ARM_NEON_SAVED_STATE64_COUNT | |
114 | str $1, [$0, NS_COUNT] | |
115 | .endmacro | |
116 | ||
117 | /* | |
118 | * SPILL_REGISTERS | |
119 | * | |
eb6b6ca3 | 120 | * Spills the current set of registers (excluding x0, x1, sp) to the specified |
cb323159 A |
121 | * save area. |
122 | * x0 - Address of the save area | |
123 | */ | |
124 | ||
125 | .macro SPILL_REGISTERS | |
126 | stp x2, x3, [x0, SS64_X2] // Save remaining GPRs | |
127 | stp x4, x5, [x0, SS64_X4] | |
128 | stp x6, x7, [x0, SS64_X6] | |
129 | stp x8, x9, [x0, SS64_X8] | |
130 | stp x10, x11, [x0, SS64_X10] | |
131 | stp x12, x13, [x0, SS64_X12] | |
132 | stp x14, x15, [x0, SS64_X14] | |
133 | stp x16, x17, [x0, SS64_X16] | |
134 | stp x18, x19, [x0, SS64_X18] | |
135 | stp x20, x21, [x0, SS64_X20] | |
136 | stp x22, x23, [x0, SS64_X22] | |
137 | stp x24, x25, [x0, SS64_X24] | |
138 | stp x26, x27, [x0, SS64_X26] | |
eb6b6ca3 A |
139 | stp x28, fp, [x0, SS64_X28] |
140 | str lr, [x0, SS64_LR] | |
cb323159 A |
141 | |
142 | /* Save arm_neon_saved_state64 */ | |
143 | ||
144 | stp q0, q1, [x0, NS64_Q0] | |
145 | stp q2, q3, [x0, NS64_Q2] | |
146 | stp q4, q5, [x0, NS64_Q4] | |
147 | stp q6, q7, [x0, NS64_Q6] | |
148 | stp q8, q9, [x0, NS64_Q8] | |
149 | stp q10, q11, [x0, NS64_Q10] | |
150 | stp q12, q13, [x0, NS64_Q12] | |
151 | stp q14, q15, [x0, NS64_Q14] | |
152 | stp q16, q17, [x0, NS64_Q16] | |
153 | stp q18, q19, [x0, NS64_Q18] | |
154 | stp q20, q21, [x0, NS64_Q20] | |
155 | stp q22, q23, [x0, NS64_Q22] | |
156 | stp q24, q25, [x0, NS64_Q24] | |
157 | stp q26, q27, [x0, NS64_Q26] | |
158 | stp q28, q29, [x0, NS64_Q28] | |
159 | stp q30, q31, [x0, NS64_Q30] | |
160 | ||
eb6b6ca3 | 161 | mrs x22, ELR_EL1 // Get exception link register |
cb323159 A |
162 | mrs x23, SPSR_EL1 // Load CPSR into var reg x23 |
163 | mrs x24, FPSR | |
164 | mrs x25, FPCR | |
165 | ||
166 | #if defined(HAS_APPLE_PAC) | |
167 | /* Save x1 and LR to preserve across call */ | |
168 | mov x21, x1 | |
169 | mov x20, lr | |
170 | ||
171 | /* | |
172 | * Create thread state signature | |
173 | * | |
174 | * Arg0: The ARM context pointer | |
175 | * Arg1: The PC value to sign | |
176 | * Arg2: The CPSR value to sign | |
177 | * Arg3: The LR value to sign | |
178 | * Arg4: The X16 value to sign | |
179 | * Arg5: The X17 value to sign | |
180 | */ | |
eb6b6ca3 | 181 | mov x1, x22 |
cb323159 | 182 | mov w2, w23 |
eb6b6ca3 | 183 | mov x3, x20 |
cb323159 A |
184 | mov x4, x16 |
185 | mov x5, x17 | |
186 | bl _ml_sign_thread_state | |
187 | ||
188 | mov lr, x20 | |
189 | mov x1, x21 | |
190 | #endif /* defined(HAS_APPLE_PAC) */ | |
191 | ||
eb6b6ca3 | 192 | str x22, [x0, SS64_PC] // Save ELR to PCB |
cb323159 A |
193 | str w23, [x0, SS64_CPSR] // Save CPSR to PCB |
194 | str w24, [x0, NS64_FPSR] | |
195 | str w25, [x0, NS64_FPCR] | |
196 | ||
197 | mrs x20, FAR_EL1 | |
198 | mrs x21, ESR_EL1 | |
199 | ||
200 | str x20, [x0, SS64_FAR] | |
201 | str w21, [x0, SS64_ESR] | |
202 | .endmacro | |
203 | ||
204 | .macro DEADLOOP | |
205 | b . | |
206 | .endmacro |