]> git.saurik.com Git - apple/xnu.git/blob - osfmk/arm64/pgtrace_decoder.c
xnu-7195.50.7.100.1.tar.gz
[apple/xnu.git] / osfmk / arm64 / pgtrace_decoder.c
1 /*
2 * Copyright (c) 2015 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 #if CONFIG_PGTRACE
30 #include <kern/debug.h>
31 #include <kern/clock.h>
32 #include <pexpert/pexpert.h>
33 #include <arm/pmap.h>
34 #include "pgtrace_decoder.h"
35
36 //-------------------------------------------------------------------
37 // Macros
38 //
39 #define DBG 1
40 #if DBG == 1
41 #define INLINE __attribute__((noinline))
42 #else
43 #define INLINE inline
44 #endif
45
46 #define BITS(v, msb, lsb) ((v) << (31-msb) >> (31-msb) >> (lsb))
47 #define READ_GPR_X(ss, n, v) { \
48 if (__builtin_expect(n < 31, 1)) (v) = (ss)->ss_64.x[(n)]; \
49 else if (n == 31) (v) = 0; \
50 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
51 }
52 #define READ_GPR_W(ss, n, v) { \
53 if (__builtin_expect(n < 31, 1)) (v) = *(uint32_t*)&((ss)->ss_64.x[(n)]); \
54 else if (n == 31) (v) = 0; \
55 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
56 }
57 #define WRITE_GPR_X(ss, n, v) { \
58 if (__builtin_expect(n < 31, 1)) (ss)->ss_64.x[(n)] = (v); \
59 else if (n == 31) {} \
60 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
61 }
62 #define WRITE_GPR_W(ss, n, v) { \
63 if (__builtin_expect(n < 31, 1)) *(uint32_t*)&((ss)->ss_64.x[(n)]) = (v); \
64 else if (n == 31) {} \
65 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
66 }
67 #define SIGN_EXTEND_64(val, width) (((int64_t)(val) << (64 - (width)) >> (64 - (width))))
68 #define ZERO_EXTEND_64(val, width) (((uint64_t)(val) << (64 - (width))) >> (64 - (width)))
69
70 //-------------------------------------------------------------------
71 // Types
72 //
73 typedef int (*run_t)(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
74
75 typedef struct {
76 vm_offset_t addr;
77 uint64_t bytes;
78 } instruction_info_t;
79
80 typedef bool (*get_info_t)(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
81
82 typedef struct {
83 uint32_t mask;
84 uint32_t value;
85 run_t run;
86 get_info_t get_info;
87 } type_entry_t;
88
89 //-------------------------------------------------------------------
90 // Statics
91 //
92 static int run_simd(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
93 static int run_c335(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
94 static int run_c336(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
95 static int run_c337(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
96 static int run_c338(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
97 static int run_c339(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
98 static int run_c3310(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
99 static int run_c3311(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
100 static int run_c3312(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
101 static int run_c3313(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
102 static int run_c3314(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
103 static int run_c3315(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
104 static int run_c3316(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
105 static bool get_info_simd(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
106 static bool get_info_c335(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
107 static bool get_info_c336(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
108 static bool get_info_c337(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
109 static bool get_info_c338(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
110 static bool get_info_c339(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
111 static bool get_info_c3310(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
112 static bool get_info_c3311(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
113 static bool get_info_c3312(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
114 static bool get_info_c3313(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
115 static bool get_info_c3314(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
116 static bool get_info_c3315(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
117 static bool get_info_c3316(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
118
119 // Table from ARM DDI 0487A.a C3.3
120 static type_entry_t typetbl[] = {
121 { 0x3f000000, 0x08000000, run_c336, get_info_c336 }, // Load/store exclusive
122 { 0x3b000000, 0x18000000, run_c335, get_info_c335 }, // Load register (literal)
123 { 0x3b800000, 0x28000000, run_c337, get_info_c337 }, // Load/store no-allocate pair (offset)
124 { 0x3b800000, 0x28800000, run_c3315, get_info_c3315 }, // Load/store register pair (post-indexed)
125 { 0x3b800000, 0x29000000, run_c3314, get_info_c3314 }, // Load/store register pair (offset)
126 { 0x3b800000, 0x29800000, run_c3316, get_info_c3316 }, // Load/store register pair (pre-indexed)
127 { 0x3b200c00, 0x38000000, run_c3312, get_info_c3312 }, // Load/store register (unscaled immediate)
128 { 0x3b200c00, 0x38000400, run_c338, get_info_c338 }, // Load/store register (immediate post-indexed)
129 { 0x3b200c00, 0x38000800, run_c3311, get_info_c3311 }, // Load/store register (unprivileged)
130 { 0x3b200c00, 0x38000c00, run_c339, get_info_c339 }, // Load/store register (immediate pre-indexed)
131 { 0x3b200c00, 0x38200800, run_c3310, get_info_c3310 }, // Load/store register (register offset)
132 { 0x3b000000, 0x39000000, run_c3313, get_info_c3313 }, // Load/store register (unsigned immediate)
133
134 { 0xbfbf0000, 0x0c000000, run_simd, get_info_simd }, // AdvSIMD load/store multiple structures
135 { 0xbfa00000, 0x0c800000, run_simd, get_info_simd }, // AdvSIMD load/store multiple structures (post-indexed)
136 { 0xbf980000, 0x0d000000, run_simd, get_info_simd }, // AdvSIMD load/store single structure
137 { 0xbf800000, 0x0d800000, run_simd, get_info_simd } // AdvSIMD load/store single structure (post-indexed)
138 };
139
140 static pgtrace_stats_t stats;
141
142 INLINE static void
143 do_str(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
144 {
145 uint32_t wt;
146 uint64_t xt;
147
148 res->rr_rw = PGTRACE_RW_STORE;
149
150 if (size == 8) {
151 READ_GPR_X(ss, Rt, xt);
152 res->rr_addrdata[0].ad_data = xt;
153 } else {
154 READ_GPR_W(ss, Rt, wt);
155 res->rr_addrdata[0].ad_data = wt;
156 }
157
158 if (size == 1) {
159 __asm__ volatile ("strb %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
160 } else if (size == 2) {
161 __asm__ volatile ("strh %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
162 } else if (size == 4) {
163 __asm__ volatile ("str %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
164 } else if (size == 8) {
165 __asm__ volatile ("str %x[xt], [%[va]]\n" :: [xt] "r"(xt), [va] "r"(va));
166 } else {
167 panic("%s Invalid size %d\n", __func__, size);
168 }
169
170 stats.stat_decoder.sd_str++;
171 }
172
173 INLINE static void
174 do_ldr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
175 {
176 uint32_t wt;
177 uint64_t xt;
178
179 res->rr_rw = PGTRACE_RW_LOAD;
180
181 if (size == 1) {
182 __asm__ volatile ("ldrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
183 } else if (size == 2) {
184 __asm__ volatile ("ldrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
185 } else if (size == 4) {
186 __asm__ volatile ("ldr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
187 } else if (size == 8) {
188 __asm__ volatile ("ldr %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
189 } else {
190 panic("%s Invalid size %d\n", __func__, size);
191 }
192
193 if (size == 8) {
194 WRITE_GPR_X(ss, Rt, xt);
195 res->rr_addrdata[0].ad_data = xt;
196 } else {
197 WRITE_GPR_W(ss, Rt, wt);
198 res->rr_addrdata[0].ad_data = wt;
199 }
200
201 stats.stat_decoder.sd_ldr++;
202 }
203
204 INLINE static void
205 do_stp(uint8_t size, uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
206 {
207 uint32_t wt1, wt2;
208 uint64_t xt1, xt2;
209
210 if (size == 4) {
211 READ_GPR_W(ss, Rt, wt1);
212 READ_GPR_W(ss, Rt2, wt2);
213 __asm__ volatile ("stp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
214 res->rr_rw = PGTRACE_RW_STORE;
215 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
216 res->rr_addrdata[0].ad_data = wt1;
217 res->rr_addrdata[1].ad_data = wt2;
218 } else if (size == 8) {
219 READ_GPR_X(ss, Rt, xt1);
220 READ_GPR_X(ss, Rt2, xt2);
221 __asm__ volatile ("stp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
222 res->rr_rw = PGTRACE_RW_STORE;
223 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
224 res->rr_addrdata[0].ad_data = xt1;
225 res->rr_addrdata[1].ad_data = xt2;
226 } else {
227 panic("%s Invalid size %d\n", __func__, size);
228 }
229
230 stats.stat_decoder.sd_stp++;
231 }
232
233 INLINE static void
234 do_ldp(uint8_t size, uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
235 {
236 uint32_t wt1, wt2;
237 uint64_t xt1, xt2;
238
239 if (size == 4) {
240 __asm__ volatile ("ldp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
241 WRITE_GPR_W(ss, Rt, wt1);
242 WRITE_GPR_W(ss, Rt2, wt2);
243 res->rr_rw = PGTRACE_RW_STORE;
244 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
245 res->rr_addrdata[0].ad_data = wt1;
246 res->rr_addrdata[1].ad_data = wt2;
247 } else if (size == 8) {
248 __asm__ volatile ("ldp %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
249 WRITE_GPR_X(ss, Rt, xt1);
250 WRITE_GPR_X(ss, Rt2, xt2);
251 res->rr_rw = PGTRACE_RW_STORE;
252 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
253 res->rr_addrdata[0].ad_data = xt1;
254 res->rr_addrdata[1].ad_data = xt2;
255 } else {
256 panic("%s Invalid size %d\n", __func__, size);
257 }
258
259 stats.stat_decoder.sd_ldp++;
260 }
261
262 INLINE static void
263 do_ldpsw(uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
264 {
265 uint64_t xt1, xt2;
266
267 __asm__ volatile ("ldpsw %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
268 WRITE_GPR_X(ss, Rt, xt1);
269 WRITE_GPR_X(ss, Rt2, xt2);
270 res->rr_rw = PGTRACE_RW_LOAD;
271 res->rr_addrdata[1].ad_addr = va + sizeof(uint32_t);
272 res->rr_addrdata[0].ad_data = xt1;
273 res->rr_addrdata[1].ad_data = xt2;
274
275 stats.stat_decoder.sd_ldpsw++;
276 }
277
278 INLINE static void
279 do_ldrs(uint8_t size, uint8_t extsize, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
280 {
281 uint32_t wt;
282 uint64_t xt;
283
284 res->rr_rw = PGTRACE_RW_LOAD;
285
286 if (size == 1 && extsize == 4) {
287 __asm__ volatile ("ldrsb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
288 } else if (size == 1 && extsize == 8) {
289 __asm__ volatile ("ldrsb %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
290 } else if (size == 2 && extsize == 4) {
291 __asm__ volatile ("ldrsh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
292 } else if (size == 2 && extsize == 8) {
293 __asm__ volatile ("ldrsh %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
294 } else if (size == 4 && extsize == 8) {
295 __asm__ volatile ("ldrsw %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
296 } else {
297 panic("%s Invalid size %d extsize=%d\n", __func__, size, extsize);
298 }
299
300 if (extsize == 8) {
301 WRITE_GPR_X(ss, Rt, xt);
302 res->rr_addrdata[0].ad_data = xt;
303 } else {
304 WRITE_GPR_W(ss, Rt, wt);
305 res->rr_addrdata[0].ad_data = wt;
306 }
307
308 stats.stat_decoder.sd_ldrs++;
309 }
310
311 INLINE static void
312 do_ldtrs(uint8_t size, uint8_t extsize, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
313 {
314 uint32_t wt;
315 uint64_t xt;
316
317 res->rr_rw = PGTRACE_RW_LOAD;
318
319 if (size == 1 && extsize == 4) {
320 __asm__ volatile ("ldtrsb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
321 } else if (size == 1 && extsize == 8) {
322 __asm__ volatile ("ldtrsb %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
323 } else if (size == 2 && extsize == 4) {
324 __asm__ volatile ("ldtrsh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
325 } else if (size == 2 && extsize == 8) {
326 __asm__ volatile ("ldtrsh %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
327 } else if (size == 4 && extsize == 8) {
328 __asm__ volatile ("ldtrsw %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
329 } else {
330 panic("%s Invalid size %d extsize=%d\n", __func__, size, extsize);
331 }
332
333 if (extsize == 8) {
334 WRITE_GPR_X(ss, Rt, xt);
335 res->rr_addrdata[0].ad_data = xt;
336 } else {
337 WRITE_GPR_W(ss, Rt, wt);
338 res->rr_addrdata[0].ad_data = wt;
339 }
340
341 stats.stat_decoder.sd_ldtrs++;
342 }
343
344 INLINE static void
345 do_ldtr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
346 {
347 uint32_t wt;
348 uint64_t xt;
349
350 res->rr_rw = PGTRACE_RW_LOAD;
351
352 if (size == 1) {
353 __asm__ volatile ("ldtrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
354 } else if (size == 2) {
355 __asm__ volatile ("ldtrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
356 } else if (size == 4) {
357 __asm__ volatile ("ldtr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
358 } else if (size == 8) {
359 __asm__ volatile ("ldtr %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
360 } else {
361 panic("%s Invalid size %d\n", __func__, size);
362 }
363
364 if (size == 8) {
365 WRITE_GPR_X(ss, Rt, xt);
366 res->rr_addrdata[0].ad_data = xt;
367 } else {
368 WRITE_GPR_W(ss, Rt, wt);
369 res->rr_addrdata[0].ad_data = wt;
370 }
371
372 stats.stat_decoder.sd_ldtr++;
373 }
374
375 INLINE static void
376 do_sttr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
377 {
378 uint32_t wt;
379 uint64_t xt;
380
381 res->rr_rw = PGTRACE_RW_STORE;
382
383 if (size == 8) {
384 READ_GPR_X(ss, Rt, xt);
385 res->rr_addrdata[0].ad_data = xt;
386 } else {
387 READ_GPR_W(ss, Rt, wt);
388 res->rr_addrdata[0].ad_data = wt;
389 }
390
391 if (size == 1) {
392 __asm__ volatile ("sttrb %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
393 } else if (size == 2) {
394 __asm__ volatile ("sttrh %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
395 } else if (size == 4) {
396 __asm__ volatile ("sttr %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
397 } else if (size == 8) {
398 __asm__ volatile ("sttr %x[xt], [%[va]]\n" :: [xt] "r"(xt), [va] "r"(va));
399 } else {
400 panic("%s Invalid size %d\n", __func__, size);
401 }
402
403 stats.stat_decoder.sd_sttr++;
404 }
405
406 INLINE static void
407 do_prfm(uint8_t Rt, vm_offset_t va, pgtrace_run_result_t *res)
408 {
409 if (Rt == 0) {
410 __asm__ volatile ("prfm pldl1keep, [%[va]]\n" : : [va] "r"(va));
411 } else if (Rt == 1) {
412 __asm__ volatile ("prfm pldl1strm, [%[va]]\n" : : [va] "r"(va));
413 } else if (Rt == 2) {
414 __asm__ volatile ("prfm pldl2keep, [%[va]]\n" : : [va] "r"(va));
415 } else if (Rt == 3) {
416 __asm__ volatile ("prfm pldl2strm, [%[va]]\n" : : [va] "r"(va));
417 } else if (Rt == 4) {
418 __asm__ volatile ("prfm pldl3keep, [%[va]]\n" : : [va] "r"(va));
419 } else if (Rt == 5) {
420 __asm__ volatile ("prfm pldl3strm, [%[va]]\n" : : [va] "r"(va));
421 } else if (Rt == 6) {
422 __asm__ volatile ("prfm #6, [%[va]]\n" : : [va] "r"(va));
423 } else if (Rt == 7) {
424 __asm__ volatile ("prfm #7, [%[va]]\n" : : [va] "r"(va));
425 } else if (Rt == 8) {
426 __asm__ volatile ("prfm #8, [%[va]]\n" : : [va] "r"(va));
427 } else if (Rt == 9) {
428 __asm__ volatile ("prfm #9, [%[va]]\n" : : [va] "r"(va));
429 } else if (Rt == 10) {
430 __asm__ volatile ("prfm #10, [%[va]]\n" : : [va] "r"(va));
431 } else if (Rt == 11) {
432 __asm__ volatile ("prfm #11, [%[va]]\n" : : [va] "r"(va));
433 } else if (Rt == 12) {
434 __asm__ volatile ("prfm #12, [%[va]]\n" : : [va] "r"(va));
435 } else if (Rt == 13) {
436 __asm__ volatile ("prfm #13, [%[va]]\n" : : [va] "r"(va));
437 } else if (Rt == 14) {
438 __asm__ volatile ("prfm #14, [%[va]]\n" : : [va] "r"(va));
439 } else if (Rt == 15) {
440 __asm__ volatile ("prfm #15, [%[va]]\n" : : [va] "r"(va));
441 } else if (Rt == 16) {
442 __asm__ volatile ("prfm pstl1keep, [%[va]]\n" : : [va] "r"(va));
443 } else if (Rt == 17) {
444 __asm__ volatile ("prfm pstl1strm, [%[va]]\n" : : [va] "r"(va));
445 } else if (Rt == 18) {
446 __asm__ volatile ("prfm pstl2keep, [%[va]]\n" : : [va] "r"(va));
447 } else if (Rt == 19) {
448 __asm__ volatile ("prfm pstl2strm, [%[va]]\n" : : [va] "r"(va));
449 } else if (Rt == 20) {
450 __asm__ volatile ("prfm pstl3keep, [%[va]]\n" : : [va] "r"(va));
451 } else if (Rt == 21) {
452 __asm__ volatile ("prfm pstl3strm, [%[va]]\n" : : [va] "r"(va));
453 } else if (Rt == 22) {
454 __asm__ volatile ("prfm #22, [%[va]]\n" : : [va] "r"(va));
455 } else if (Rt == 23) {
456 __asm__ volatile ("prfm #23, [%[va]]\n" : : [va] "r"(va));
457 } else if (Rt == 24) {
458 __asm__ volatile ("prfm #24, [%[va]]\n" : : [va] "r"(va));
459 } else if (Rt == 25) {
460 __asm__ volatile ("prfm #25, [%[va]]\n" : : [va] "r"(va));
461 } else if (Rt == 26) {
462 __asm__ volatile ("prfm #26, [%[va]]\n" : : [va] "r"(va));
463 } else if (Rt == 27) {
464 __asm__ volatile ("prfm #27, [%[va]]\n" : : [va] "r"(va));
465 } else if (Rt == 28) {
466 __asm__ volatile ("prfm #28, [%[va]]\n" : : [va] "r"(va));
467 } else if (Rt == 29) {
468 __asm__ volatile ("prfm #29, [%[va]]\n" : : [va] "r"(va));
469 } else if (Rt == 30) {
470 __asm__ volatile ("prfm #30, [%[va]]\n" : : [va] "r"(va));
471 } else if (Rt == 31) {
472 __asm__ volatile ("prfm #31, [%[va]]\n" : : [va] "r"(va));
473 } else {
474 panic("%s Invalid Rt %d\n", __func__, Rt);
475 }
476
477 res->rr_num = 0;
478 res->rr_rw = PGTRACE_RW_PREFETCH;
479
480 stats.stat_decoder.sd_prfm++;
481 }
482
483 #define CANNOTDECODE(msg, inst) do {\
484 panic("%s: " msg " inst=%x not supported yet\n", __func__, inst);\
485 } while (0)
486
487 static int
488 run_simd(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
489 {
490 #pragma unused(pa,va,ss,res)
491 CANNOTDECODE("simd", inst);
492 return 0;
493 }
494
495 static int
496 run_c335(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
497 {
498 uint32_t opc = BITS(inst, 31, 30),
499 v = BITS(inst, 26, 26),
500 Rt = BITS(inst, 4, 0);
501 uint8_t fields = (opc << 1) | v;
502
503 res->rr_num = 1;
504 res->rr_addrdata[0].ad_addr = pa;
505
506 if (fields == 0) {
507 do_ldr(4, Rt, va, ss, res);
508 } else if ((fields == 1) ||
509 (fields == 3) ||
510 (fields == 5)) {
511 CANNOTDECODE("simd", inst);
512 } else if (fields == 2) {
513 do_ldr(8, Rt, va, ss, res);
514 } else if (fields == 4) {
515 do_ldrs(4, 8, Rt, va, ss, res);
516 } else if (fields == 6) {
517 do_prfm(Rt, va, res);
518 } else {
519 CANNOTDECODE("unknown", inst);
520 }
521
522 stats.stat_decoder.sd_c335++;
523
524 return 0;
525 }
526
527 static int
528 run_c336(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
529 {
530 uint32_t ws, wt, wt1, wt2;
531 uint64_t xt, xt1, xt2;
532 uint32_t size = BITS(inst, 31, 30),
533 o2 = BITS(inst, 23, 23),
534 L = BITS(inst, 22, 22),
535 o1 = BITS(inst, 21, 21),
536 Rs = BITS(inst, 20, 16),
537 o0 = BITS(inst, 15, 15),
538 Rt2 = BITS(inst, 14, 10),
539 Rt = BITS(inst, 4, 0);
540 uint8_t fields = (size << 4) | (o2 << 3) | (L << 2) | (o1 << 1) | o0;
541
542 kprintf("%s Load/store exclusive on device memory???n", __func__);
543
544 res->rr_num = 1;
545 res->rr_addrdata[0].ad_addr = pa;
546
547 switch (fields) {
548 case 0:
549 READ_GPR_W(ss, Rt, wt);
550 __asm__ volatile ("stxrb %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
551 WRITE_GPR_W(ss, Rs, ws);
552 res->rr_rw = PGTRACE_RW_STORE;
553 res->rr_addrdata[0].ad_data = wt;
554 break;
555 case 1:
556 READ_GPR_W(ss, Rt, wt);
557 __asm__ volatile ("stlxrb %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
558 WRITE_GPR_W(ss, Rs, ws);
559 res->rr_rw = PGTRACE_RW_STORE;
560 res->rr_addrdata[0].ad_data = wt;
561 break;
562 case 4:
563 __asm__ volatile ("ldxrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
564 WRITE_GPR_W(ss, Rt, wt);
565 res->rr_rw = PGTRACE_RW_LOAD;
566 res->rr_addrdata[0].ad_data = wt;
567 break;
568 case 5:
569 __asm__ volatile ("ldaxrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
570 WRITE_GPR_W(ss, Rt, wt);
571 res->rr_rw = PGTRACE_RW_LOAD;
572 res->rr_addrdata[0].ad_data = wt;
573 break;
574 case 9:
575 READ_GPR_W(ss, Rt, wt);
576 __asm__ volatile ("stlrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
577 res->rr_rw = PGTRACE_RW_STORE;
578 res->rr_addrdata[0].ad_data = wt;
579 break;
580 case 0xd:
581 __asm__ volatile ("ldarb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
582 WRITE_GPR_W(ss, Rt, wt);
583 res->rr_rw = PGTRACE_RW_LOAD;
584 res->rr_addrdata[0].ad_data = wt;
585 break;
586 case 0x10:
587 READ_GPR_W(ss, Rt, wt);
588 __asm__ volatile ("stxrh %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
589 WRITE_GPR_W(ss, Rs, ws);
590 res->rr_rw = PGTRACE_RW_STORE;
591 res->rr_addrdata[0].ad_data = wt;
592 break;
593 case 0x11:
594 READ_GPR_W(ss, Rt, wt);
595 __asm__ volatile ("stlxrh %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
596 WRITE_GPR_W(ss, Rs, ws);
597 res->rr_rw = PGTRACE_RW_STORE;
598 res->rr_addrdata[0].ad_data = wt;
599 break;
600 case 0x14:
601 __asm__ volatile ("ldxrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
602 WRITE_GPR_W(ss, Rt, wt);
603 res->rr_rw = PGTRACE_RW_LOAD;
604 res->rr_addrdata[0].ad_data = wt;
605 break;
606 case 0x15:
607 __asm__ volatile ("ldaxrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
608 WRITE_GPR_W(ss, Rt, wt);
609 res->rr_rw = PGTRACE_RW_LOAD;
610 res->rr_addrdata[0].ad_data = wt;
611 break;
612 case 0x19:
613 READ_GPR_W(ss, Rt, wt);
614 __asm__ volatile ("stlrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
615 res->rr_rw = PGTRACE_RW_STORE;
616 res->rr_addrdata[0].ad_data = wt;
617 break;
618 case 0x1d:
619 __asm__ volatile ("ldarh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
620 WRITE_GPR_W(ss, Rt, wt);
621 res->rr_rw = PGTRACE_RW_LOAD;
622 res->rr_addrdata[0].ad_data = wt;
623 break;
624 case 0x20:
625 READ_GPR_W(ss, Rt, wt);
626 __asm__ volatile ("stxr %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
627 WRITE_GPR_W(ss, Rs, ws);
628 res->rr_rw = PGTRACE_RW_STORE;
629 res->rr_addrdata[0].ad_data = wt;
630 break;
631 case 0x21:
632 READ_GPR_W(ss, Rt, wt);
633 __asm__ volatile ("stlxr %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
634 WRITE_GPR_W(ss, Rs, ws);
635 res->rr_rw = PGTRACE_RW_STORE;
636 res->rr_addrdata[0].ad_data = wt;
637 break;
638 case 0x22:
639 READ_GPR_W(ss, Rt, wt1);
640 READ_GPR_W(ss, Rt2, wt2);
641 __asm__ volatile ("stxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws] "=r"(ws) : [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
642 WRITE_GPR_W(ss, Rs, ws);
643 res->rr_rw = PGTRACE_RW_STORE;
644 res->rr_num = 2;
645 res->rr_addrdata[0].ad_addr = va;
646 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
647 res->rr_addrdata[0].ad_data = wt1;
648 res->rr_addrdata[1].ad_data = wt2;
649 break;
650 case 0x23:
651 READ_GPR_W(ss, Rt, wt1);
652 READ_GPR_W(ss, Rt2, wt2);
653 __asm__ volatile ("stlxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws] "=r"(ws) : [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
654 WRITE_GPR_W(ss, Rs, ws);
655 res->rr_rw = PGTRACE_RW_STORE;
656 res->rr_num = 2;
657 res->rr_addrdata[0].ad_addr = va;
658 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
659 res->rr_addrdata[0].ad_data = wt1;
660 res->rr_addrdata[1].ad_data = wt2;
661 break;
662 case 0x24:
663 __asm__ volatile ("ldxr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
664 WRITE_GPR_W(ss, Rt, wt);
665 res->rr_rw = PGTRACE_RW_LOAD;
666 res->rr_addrdata[0].ad_data = wt;
667 break;
668 case 0x25:
669 __asm__ volatile ("ldaxr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
670 WRITE_GPR_W(ss, Rt, wt);
671 res->rr_rw = PGTRACE_RW_LOAD;
672 res->rr_addrdata[0].ad_data = wt;
673 break;
674 case 0x26:
675 __asm__ volatile ("ldxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
676 WRITE_GPR_W(ss, Rt, wt1);
677 WRITE_GPR_W(ss, Rt2, wt2);
678 res->rr_rw = PGTRACE_RW_LOAD;
679 res->rr_num = 2;
680 res->rr_addrdata[0].ad_addr = va;
681 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
682 res->rr_addrdata[0].ad_data = wt1;
683 res->rr_addrdata[1].ad_data = wt2;
684 break;
685 case 0x27:
686 __asm__ volatile ("ldaxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
687 WRITE_GPR_W(ss, Rt, wt1);
688 WRITE_GPR_W(ss, Rt2, wt2);
689 res->rr_rw = PGTRACE_RW_LOAD;
690 res->rr_num = 2;
691 res->rr_addrdata[0].ad_addr = va;
692 res->rr_addrdata[1].ad_addr = va + sizeof(wt1);
693 res->rr_addrdata[0].ad_data = wt1;
694 res->rr_addrdata[1].ad_data = wt2;
695 break;
696 case 0x29:
697 READ_GPR_W(ss, Rt, wt);
698 __asm__ volatile ("stlr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
699 res->rr_rw = PGTRACE_RW_STORE;
700 res->rr_addrdata[0].ad_data = wt;
701 break;
702 case 0x2d:
703 __asm__ volatile ("ldar %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
704 WRITE_GPR_W(ss, Rt, wt);
705 res->rr_rw = PGTRACE_RW_LOAD;
706 res->rr_addrdata[0].ad_data = wt;
707 break;
708 case 0x30:
709 READ_GPR_X(ss, Rt, xt);
710 __asm__ volatile ("stxr %w[ws], %[xt], [%[va]]\n" : [ws] "=r"(ws) : [xt] "r"(xt), [va] "r"(va));
711 WRITE_GPR_W(ss, Rs, ws);
712 res->rr_rw = PGTRACE_RW_STORE;
713 res->rr_addrdata[0].ad_data = xt;
714 break;
715 case 0x31:
716 READ_GPR_X(ss, Rt, xt);
717 __asm__ volatile ("stlxr %w[ws], %[xt], [%[va]]\n" : [ws] "=r"(ws) : [xt] "r"(xt), [va] "r"(va));
718 WRITE_GPR_W(ss, Rs, ws);
719 res->rr_rw = PGTRACE_RW_STORE;
720 res->rr_addrdata[0].ad_data = xt;
721 break;
722 case 0x32:
723 READ_GPR_X(ss, Rt, xt1);
724 READ_GPR_X(ss, Rt2, xt2);
725 __asm__ volatile ("stxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws] "=r"(ws) : [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
726 WRITE_GPR_W(ss, Rs, ws);
727 res->rr_rw = PGTRACE_RW_STORE;
728 res->rr_num = 2;
729 res->rr_addrdata[0].ad_addr = va;
730 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
731 res->rr_addrdata[0].ad_data = xt1;
732 res->rr_addrdata[1].ad_data = xt2;
733 break;
734 case 0x33:
735 READ_GPR_X(ss, Rt, xt1);
736 READ_GPR_X(ss, Rt2, xt2);
737 __asm__ volatile ("stlxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws] "=r"(ws) : [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
738 WRITE_GPR_W(ss, Rs, ws);
739 res->rr_rw = PGTRACE_RW_STORE;
740 res->rr_num = 2;
741 res->rr_addrdata[0].ad_addr = va;
742 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
743 res->rr_addrdata[0].ad_data = xt1;
744 res->rr_addrdata[1].ad_data = xt2;
745 break;
746 case 0x34:
747 __asm__ volatile ("ldxr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
748 WRITE_GPR_X(ss, Rt, xt);
749 res->rr_rw = PGTRACE_RW_LOAD;
750 res->rr_addrdata[0].ad_data = xt;
751 break;
752 case 0x35:
753 __asm__ volatile ("ldaxr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
754 WRITE_GPR_X(ss, Rt, xt);
755 res->rr_rw = PGTRACE_RW_LOAD;
756 res->rr_addrdata[0].ad_data = xt;
757 break;
758 case 0x36:
759 __asm__ volatile ("ldxp %[xt1], %[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
760 WRITE_GPR_X(ss, Rt, xt1);
761 WRITE_GPR_X(ss, Rt2, xt2);
762 res->rr_rw = PGTRACE_RW_LOAD;
763 res->rr_num = 2;
764 res->rr_addrdata[0].ad_addr = va;
765 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
766 res->rr_addrdata[0].ad_data = xt1;
767 res->rr_addrdata[0].ad_data = xt2;
768 break;
769 case 0x37:
770 __asm__ volatile ("ldaxp %[xt1], %[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
771 WRITE_GPR_X(ss, Rt, xt1);
772 WRITE_GPR_X(ss, Rt2, xt2);
773 res->rr_rw = PGTRACE_RW_LOAD;
774 res->rr_num = 2;
775 res->rr_addrdata[0].ad_addr = va;
776 res->rr_addrdata[1].ad_addr = va + sizeof(xt1);
777 res->rr_addrdata[0].ad_data = xt1;
778 res->rr_addrdata[0].ad_data = xt2;
779 break;
780 case 0x39:
781 READ_GPR_X(ss, Rt, xt);
782 __asm__ volatile ("stlr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
783 res->rr_rw = PGTRACE_RW_STORE;
784 res->rr_addrdata[0].ad_data = xt;
785 break;
786 case 0x3d:
787 __asm__ volatile ("ldar %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
788 WRITE_GPR_X(ss, Rt, xt);
789 res->rr_rw = PGTRACE_RW_LOAD;
790 res->rr_addrdata[0].ad_data = xt;
791 break;
792 default:
793 CANNOTDECODE("unknown", inst);
794 }
795
796 stats.stat_decoder.sd_c336++;
797
798 return 0;
799 }
800
801 static int
802 run_c337(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
803 {
804 uint32_t wt1, wt2;
805 uint64_t xt1, xt2;
806 uint32_t opc = BITS(inst, 31, 30),
807 V = BITS(inst, 26, 26),
808 L = BITS(inst, 22, 22),
809 Rt = BITS(inst, 4, 0),
810 Rt2 = BITS(inst, 14, 10);
811 uint8_t fields = (opc << 2) | (V << 1) | L;
812
813 switch (fields) {
814 case 0:
815 READ_GPR_W(ss, Rt, wt1);
816 READ_GPR_W(ss, Rt2, wt2);
817 __asm__ volatile ("stnp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
818 res->rr_rw = PGTRACE_RW_STORE;
819 res->rr_num = 2;
820 res->rr_addrdata[0].ad_addr = pa;
821 res->rr_addrdata[1].ad_addr = pa + sizeof(wt1);
822 res->rr_addrdata[0].ad_data = wt1;
823 res->rr_addrdata[1].ad_data = wt2;
824 break;
825 case 1:
826 __asm__ volatile ("ldnp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
827 WRITE_GPR_W(ss, Rt, wt1);
828 WRITE_GPR_W(ss, Rt2, wt2);
829 res->rr_rw = PGTRACE_RW_STORE;
830 res->rr_num = 2;
831 res->rr_addrdata[0].ad_addr = pa;
832 res->rr_addrdata[1].ad_addr = pa + sizeof(wt1);
833 res->rr_addrdata[0].ad_data = wt1;
834 res->rr_addrdata[1].ad_data = wt2;
835 break;
836 case 2:
837 case 3:
838 case 6:
839 case 7:
840 case 10:
841 case 11:
842 CANNOTDECODE("simd", inst);
843 case 8:
844 READ_GPR_X(ss, Rt, xt1);
845 READ_GPR_X(ss, Rt2, xt2);
846 __asm__ volatile ("stnp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
847 res->rr_rw = PGTRACE_RW_STORE;
848 res->rr_num = 2;
849 res->rr_addrdata[0].ad_addr = pa;
850 res->rr_addrdata[1].ad_addr = pa + sizeof(xt1);
851 res->rr_addrdata[0].ad_data = xt1;
852 res->rr_addrdata[1].ad_data = xt2;
853 break;
854 case 9:
855 __asm__ volatile ("ldnp %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
856 WRITE_GPR_X(ss, Rt, xt1);
857 WRITE_GPR_X(ss, Rt2, xt2);
858 res->rr_rw = PGTRACE_RW_STORE;
859 res->rr_num = 2;
860 res->rr_addrdata[0].ad_addr = pa;
861 res->rr_addrdata[1].ad_addr = pa + sizeof(xt1);
862 res->rr_addrdata[0].ad_data = xt1;
863 res->rr_addrdata[1].ad_data = xt2;
864 break;
865 default:
866 CANNOTDECODE("simd", inst);
867 }
868
869 stats.stat_decoder.sd_c337++;
870
871 return 0;
872 }
873
874 static int
875 run_c338(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
876 {
877 uint32_t size = BITS(inst, 31, 30),
878 V = BITS(inst, 26, 26),
879 opc = BITS(inst, 23, 22),
880 Rt = BITS(inst, 4, 0);
881 uint8_t fields = (size << 3) | (V << 2) | opc;
882
883 res->rr_num = 1;
884 res->rr_addrdata[0].ad_addr = pa;
885
886 if (fields == 0) {
887 do_str(1, Rt, va, ss, res);
888 } else if (fields == 1) {
889 do_ldr(1, Rt, va, ss, res);
890 } else if (fields == 2) {
891 do_ldrs(1, 8, Rt, va, ss, res);
892 } else if (fields == 3) {
893 do_ldrs(1, 4, Rt, va, ss, res);
894 } else if ((fields == 4) ||
895 (fields == 5) ||
896 (fields == 6) ||
897 (fields == 7) ||
898 (fields == 12) ||
899 (fields == 13) ||
900 (fields == 0x14) ||
901 (fields == 0x15) ||
902 (fields == 0x1c) ||
903 (fields == 0x1d)) {
904 CANNOTDECODE("simd", inst);
905 } else if (fields == 8) {
906 do_str(2, Rt, va, ss, res);
907 } else if (fields == 9) {
908 do_ldr(2, Rt, va, ss, res);
909 } else if (fields == 10) {
910 do_ldrs(2, 8, Rt, va, ss, res);
911 } else if (fields == 11) {
912 do_ldrs(2, 4, Rt, va, ss, res);
913 } else if (fields == 0x10) {
914 do_str(4, Rt, va, ss, res);
915 } else if (fields == 0x11) {
916 do_ldr(4, Rt, va, ss, res);
917 } else if (fields == 0x12) {
918 do_ldrs(4, 8, Rt, va, ss, res);
919 } else if (fields == 0x18) {
920 do_str(8, Rt, va, ss, res);
921 } else if (fields == 0x19) {
922 do_ldr(8, Rt, va, ss, res);
923 } else {
924 CANNOTDECODE("unknown", inst);
925 }
926
927 stats.stat_decoder.sd_c338++;
928
929 return 0;
930 }
931
932 static int
933 run_c339(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
934 {
935 uint32_t size = BITS(inst, 31, 30),
936 V = BITS(inst, 26, 26),
937 opc = BITS(inst, 23, 22),
938 Rt = BITS(inst, 4, 0);
939 uint8_t fields = (size << 3) | (V << 2) | opc;
940
941 res->rr_num = 1;
942 res->rr_addrdata[0].ad_addr = pa;
943
944 if (fields == 0) {
945 do_str(1, Rt, va, ss, res);
946 } else if (fields == 1) {
947 do_ldr(1, Rt, va, ss, res);
948 } else if (fields == 2) {
949 do_ldrs(1, 8, Rt, va, ss, res);
950 } else if (fields == 3) {
951 do_ldrs(1, 4, Rt, va, ss, res);
952 } else if ((fields == 4) ||
953 (fields == 5) ||
954 (fields == 6) ||
955 (fields == 7) ||
956 (fields == 12) ||
957 (fields == 13) ||
958 (fields == 0x14) ||
959 (fields == 0x15) ||
960 (fields == 0x1c) ||
961 (fields == 0x1d)) {
962 CANNOTDECODE("simd", inst);
963 } else if (fields == 8) {
964 do_str(2, Rt, va, ss, res);
965 } else if (fields == 9) {
966 do_ldr(2, Rt, va, ss, res);
967 } else if (fields == 10) {
968 do_ldrs(2, 8, Rt, va, ss, res);
969 } else if (fields == 11) {
970 do_ldrs(2, 4, Rt, va, ss, res);
971 } else if (fields == 0x10) {
972 do_str(4, Rt, va, ss, res);
973 } else if (fields == 0x11) {
974 do_ldr(4, Rt, va, ss, res);
975 } else if (fields == 0x12) {
976 do_ldrs(4, 8, Rt, va, ss, res);
977 } else if (fields == 0x18) {
978 do_str(8, Rt, va, ss, res);
979 } else if (fields == 0x19) {
980 do_ldr(8, Rt, va, ss, res);
981 } else {
982 CANNOTDECODE("unknown", inst);
983 }
984
985 stats.stat_decoder.sd_c339++;
986
987 return 0;
988 }
989
990 static int
991 run_c3310(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
992 {
993 uint32_t size = BITS(inst, 31, 30),
994 V = BITS(inst, 26, 26),
995 opc = BITS(inst, 23, 22),
996 Rt = BITS(inst, 4, 0);
997 uint8_t fields = (size << 3) | (V << 2) | opc;
998
999 res->rr_num = 1;
1000 res->rr_addrdata[0].ad_addr = pa;
1001
1002 if (fields == 0) {
1003 do_str(1, Rt, va, ss, res);
1004 } else if (fields == 1) {
1005 do_ldr(1, Rt, va, ss, res);
1006 } else if (fields == 2) {
1007 do_ldrs(1, 8, Rt, va, ss, res);
1008 } else if (fields == 3) {
1009 do_ldrs(1, 4, Rt, va, ss, res);
1010 } else if ((fields == 4) ||
1011 (fields == 5) ||
1012 (fields == 6) ||
1013 (fields == 7) ||
1014 (fields == 12) ||
1015 (fields == 13) ||
1016 (fields == 0x14) ||
1017 (fields == 0x15) ||
1018 (fields == 0x1c) ||
1019 (fields == 0x1d)) {
1020 CANNOTDECODE("simd", inst);
1021 } else if (fields == 8) {
1022 do_str(2, Rt, va, ss, res);
1023 } else if (fields == 9) {
1024 do_ldr(2, Rt, va, ss, res);
1025 } else if (fields == 10) {
1026 do_ldrs(2, 8, Rt, va, ss, res);
1027 } else if (fields == 11) {
1028 do_ldrs(2, 4, Rt, va, ss, res);
1029 } else if (fields == 0x10) {
1030 do_str(4, Rt, va, ss, res);
1031 } else if (fields == 0x11) {
1032 do_ldr(4, Rt, va, ss, res);
1033 } else if (fields == 0x12) {
1034 do_ldrs(4, 8, Rt, va, ss, res);
1035 } else if (fields == 0x18) {
1036 do_str(8, Rt, va, ss, res);
1037 } else if (fields == 0x19) {
1038 do_ldr(8, Rt, va, ss, res);
1039 } else if (fields == 0x1a) {
1040 do_prfm(Rt, va, res);
1041 } else {
1042 CANNOTDECODE("unknown", inst);
1043 }
1044
1045 stats.stat_decoder.sd_c3310++;
1046
1047 return 0;
1048 }
1049
1050 static int
1051 run_c3311(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1052 {
1053 uint32_t size = BITS(inst, 31, 30),
1054 V = BITS(inst, 26, 26),
1055 opc = BITS(inst, 23, 22),
1056 Rt = BITS(inst, 4, 0);
1057 uint8_t fields = (size << 3) | (V << 2) | opc;
1058
1059 res->rr_num = 1;
1060 res->rr_addrdata[0].ad_addr = pa;
1061
1062 if (fields == 0) {
1063 do_sttr(1, Rt, va, ss, res);
1064 } else if (fields == 1) {
1065 do_ldtr(1, Rt, va, ss, res);
1066 } else if (fields == 2) {
1067 do_ldtrs(1, 8, Rt, va, ss, res);
1068 } else if (fields == 3) {
1069 do_ldtrs(1, 4, Rt, va, ss, res);
1070 } else if (fields == 8) {
1071 do_sttr(2, Rt, va, ss, res);
1072 } else if (fields == 9) {
1073 do_ldtr(2, Rt, va, ss, res);
1074 } else if (fields == 10) {
1075 do_ldtrs(2, 8, Rt, va, ss, res);
1076 } else if (fields == 11) {
1077 do_ldtrs(2, 4, Rt, va, ss, res);
1078 } else if (fields == 0x10) {
1079 do_sttr(4, Rt, va, ss, res);
1080 } else if (fields == 0x11) {
1081 do_ldtr(4, Rt, va, ss, res);
1082 } else if (fields == 0x12) {
1083 do_ldtrs(4, 8, Rt, va, ss, res);
1084 } else if (fields == 0x18) {
1085 do_sttr(8, Rt, va, ss, res);
1086 } else if (fields == 0x19) {
1087 do_ldtr(8, Rt, va, ss, res);
1088 } else {
1089 CANNOTDECODE("unknown", inst);
1090 }
1091
1092 stats.stat_decoder.sd_c3311++;
1093
1094 return 0;
1095 }
1096
1097 static int
1098 run_c3312(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1099 {
1100 uint32_t size = BITS(inst, 31, 30),
1101 V = BITS(inst, 26, 26),
1102 opc = BITS(inst, 23, 22),
1103 Rt = BITS(inst, 4, 0);
1104 uint8_t fields = (size << 3) | (V << 2) | opc;
1105
1106 res->rr_num = 1;
1107 res->rr_addrdata[0].ad_addr = pa;
1108
1109 if (fields == 0) {
1110 do_str(1, Rt, va, ss, res);
1111 } else if (fields == 1) {
1112 do_ldr(1, Rt, va, ss, res);
1113 } else if (fields == 2) {
1114 do_ldrs(1, 8, Rt, va, ss, res);
1115 } else if (fields == 3) {
1116 do_ldrs(1, 4, Rt, va, ss, res);
1117 } else if ((fields == 4) ||
1118 (fields == 5) ||
1119 (fields == 6) ||
1120 (fields == 7) ||
1121 (fields == 12) ||
1122 (fields == 13) ||
1123 (fields == 0x14) ||
1124 (fields == 0x15) ||
1125 (fields == 0x1c) ||
1126 (fields == 0x1d)) {
1127 CANNOTDECODE("simd", inst);
1128 } else if (fields == 8) {
1129 do_str(2, Rt, va, ss, res);
1130 } else if (fields == 9) {
1131 do_ldr(2, Rt, va, ss, res);
1132 } else if (fields == 10) {
1133 do_ldrs(2, 8, Rt, va, ss, res);
1134 } else if (fields == 11) {
1135 do_ldrs(2, 4, Rt, va, ss, res);
1136 } else if (fields == 0x10) {
1137 do_str(4, Rt, va, ss, res);
1138 } else if (fields == 0x11) {
1139 do_ldr(4, Rt, va, ss, res);
1140 } else if (fields == 0x12) {
1141 do_ldrs(4, 8, Rt, va, ss, res);
1142 } else if (fields == 0x18) {
1143 do_str(8, Rt, va, ss, res);
1144 } else if (fields == 0x19) {
1145 do_ldr(8, Rt, va, ss, res);
1146 } else if (fields == 0x1a) {
1147 do_prfm(Rt, va, res);
1148 } else {
1149 CANNOTDECODE("unknown", inst);
1150 }
1151
1152 stats.stat_decoder.sd_c3312++;
1153
1154 return 0;
1155 }
1156
1157 static int
1158 run_c3313(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1159 {
1160 uint32_t size = BITS(inst, 31, 30),
1161 V = BITS(inst, 26, 26),
1162 opc = BITS(inst, 23, 22),
1163 Rt = BITS(inst, 4, 0);
1164 uint8_t fields = (size << 3) | (V << 2) | opc;
1165
1166 res->rr_num = 1;
1167 res->rr_addrdata[0].ad_addr = pa;
1168
1169 if (fields == 0) {
1170 do_str(1, Rt, va, ss, res);
1171 } else if (fields == 1) {
1172 do_ldr(1, Rt, va, ss, res);
1173 } else if (fields == 2) {
1174 do_ldrs(1, 8, Rt, va, ss, res);
1175 } else if (fields == 3) {
1176 do_ldrs(1, 4, Rt, va, ss, res);
1177 } else if ((fields == 4) ||
1178 (fields == 5) ||
1179 (fields == 6) ||
1180 (fields == 7) ||
1181 (fields == 12) ||
1182 (fields == 13) ||
1183 (fields == 0x14) ||
1184 (fields == 0x15) ||
1185 (fields == 0x1c) ||
1186 (fields == 0x1d)) {
1187 CANNOTDECODE("simd", inst);
1188 } else if (fields == 8) {
1189 do_str(2, Rt, va, ss, res);
1190 } else if (fields == 9) {
1191 do_ldr(2, Rt, va, ss, res);
1192 } else if (fields == 10) {
1193 do_ldrs(2, 8, Rt, va, ss, res);
1194 } else if (fields == 11) {
1195 do_ldrs(2, 4, Rt, va, ss, res);
1196 } else if (fields == 0x10) {
1197 do_str(4, Rt, va, ss, res);
1198 } else if (fields == 0x11) {
1199 do_ldr(4, Rt, va, ss, res);
1200 } else if (fields == 0x12) {
1201 do_ldrs(4, 8, Rt, va, ss, res);
1202 } else if (fields == 0x18) {
1203 do_str(8, Rt, va, ss, res);
1204 } else if (fields == 0x19) {
1205 do_ldr(8, Rt, va, ss, res);
1206 } else if (fields == 0x1a) {
1207 do_prfm(Rt, va, res);
1208 } else {
1209 CANNOTDECODE("unknown", inst);
1210 }
1211
1212 stats.stat_decoder.sd_c3313++;
1213
1214 return 0;
1215 }
1216
1217 static int
1218 run_c3314(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1219 {
1220 uint32_t opc = BITS(inst, 31, 30),
1221 V = BITS(inst, 26, 26),
1222 L = BITS(inst, 22, 22),
1223 Rt = BITS(inst, 4, 0),
1224 Rt2 = BITS(inst, 14, 10);
1225 uint8_t fields = (opc << 2) | (V << 1) | L;
1226
1227 res->rr_num = 2;
1228 res->rr_addrdata[0].ad_addr = pa;
1229
1230 if (fields == 0) {
1231 do_stp(4, Rt, Rt2, va, ss, res);
1232 } else if (fields == 1) {
1233 do_ldp(4, Rt, Rt2, va, ss, res);
1234 } else if ((fields == 2) ||
1235 (fields == 3) ||
1236 (fields == 6) ||
1237 (fields == 7) ||
1238 (fields == 10) ||
1239 (fields == 11)) {
1240 CANNOTDECODE("simd", inst);
1241 } else if (fields == 5) {
1242 do_ldpsw(Rt, Rt2, va, ss, res);
1243 } else if (fields == 8) {
1244 do_stp(8, Rt, Rt2, va, ss, res);
1245 } else if (fields == 9) {
1246 do_ldp(8, Rt, Rt2, va, ss, res);
1247 } else {
1248 CANNOTDECODE("unknown", inst);
1249 }
1250
1251 stats.stat_decoder.sd_c3314++;
1252
1253 return 0;
1254 }
1255
1256 static int
1257 run_c3315(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1258 {
1259 uint32_t opc = BITS(inst, 31, 30),
1260 V = BITS(inst, 26, 26),
1261 L = BITS(inst, 22, 22),
1262 Rt = BITS(inst, 4, 0),
1263 Rt2 = BITS(inst, 14, 10);
1264 uint8_t fields = (opc << 2) | (V << 1) | L;
1265
1266 res->rr_num = 2;
1267 res->rr_addrdata[0].ad_addr = pa;
1268
1269 if (fields == 0) {
1270 do_stp(4, Rt, Rt2, va, ss, res);
1271 } else if (fields == 1) {
1272 do_ldp(4, Rt, Rt2, va, ss, res);
1273 } else if ((fields == 2) ||
1274 (fields == 3) ||
1275 (fields == 6) ||
1276 (fields == 7) ||
1277 (fields == 10) ||
1278 (fields == 11)) {
1279 CANNOTDECODE("simd", inst);
1280 } else if (fields == 5) {
1281 do_ldpsw(Rt, Rt2, va, ss, res);
1282 } else if (fields == 8) {
1283 do_stp(8, Rt, Rt2, va, ss, res);
1284 } else if (fields == 9) {
1285 do_ldp(8, Rt, Rt2, va, ss, res);
1286 } else {
1287 CANNOTDECODE("unknown", inst);
1288 }
1289
1290 stats.stat_decoder.sd_c3315++;
1291
1292 return 0;
1293 }
1294
1295 static int
1296 run_c3316(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1297 {
1298 uint32_t opc = BITS(inst, 31, 30),
1299 V = BITS(inst, 26, 26),
1300 L = BITS(inst, 22, 22),
1301 Rt = BITS(inst, 4, 0),
1302 Rt2 = BITS(inst, 14, 10);
1303 uint8_t fields = (opc << 2) | (V << 1) | L;
1304
1305 res->rr_num = 2;
1306 res->rr_addrdata[0].ad_addr = pa;
1307
1308 if (fields == 0) {
1309 do_stp(4, Rt, Rt2, va, ss, res);
1310 } else if (fields == 1) {
1311 do_ldp(4, Rt, Rt2, va, ss, res);
1312 } else if ((fields == 2) ||
1313 (fields == 3) ||
1314 (fields == 6) ||
1315 (fields == 7) ||
1316 (fields == 10) ||
1317 (fields == 11)) {
1318 CANNOTDECODE("simd", inst);
1319 } else if (fields == 5) {
1320 do_ldpsw(Rt, Rt2, va, ss, res);
1321 } else if (fields == 8) {
1322 do_stp(8, Rt, Rt2, va, ss, res);
1323 } else if (fields == 9) {
1324 do_ldp(8, Rt, Rt2, va, ss, res);
1325 } else {
1326 CANNOTDECODE("unknown", inst);
1327 }
1328
1329 stats.stat_decoder.sd_c3316++;
1330
1331 return 0;
1332 }
1333
1334 static bool
1335 get_info_simd(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1336 {
1337 #pragma unused(inst, ss, info)
1338 CANNOTDECODE("simd", inst);
1339 return false;
1340 }
1341
1342 // load register (literal)
1343 static bool
1344 get_info_c335(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1345 {
1346 uint32_t opc = BITS(inst, 31, 30);
1347 uint32_t V = BITS(inst, 26, 26);
1348 uint32_t imm19 = BITS(inst, 23, 5);
1349 uint32_t fields = (opc << 1) | V;
1350 uint8_t scale;
1351
1352 if (__builtin_expect(fields > 6, false)) {
1353 CANNOTDECODE("invalid", inst);
1354 return false;
1355 }
1356
1357 assert(fields <= 6);
1358
1359 if (V == 1) {
1360 scale = 2 + opc;
1361 } else {
1362 switch (opc) {
1363 case 0 ... 1:
1364 scale = 2 + opc;
1365 break;
1366 case 2:
1367 scale = 2;
1368 break;
1369 default:
1370 CANNOTDECODE("invalid", inst);
1371 return false;
1372 }
1373 }
1374
1375 info->bytes = 1 << scale;
1376 info->addr = ss->ss_64.pc + (SIGN_EXTEND_64(imm19, 19) << 2);
1377
1378 return true;
1379 }
1380
1381 // load/store exclusive
1382 static bool
1383 get_info_c336(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1384 {
1385 uint32_t size = BITS(inst, 31, 30);
1386 uint32_t o2 = BITS(inst, 23, 23);
1387 uint32_t L = BITS(inst, 22, 22);
1388 uint32_t o1 = BITS(inst, 21, 21);
1389 uint32_t o0 = BITS(inst, 15, 15);
1390 uint32_t Rn = BITS(inst, 9, 5);
1391 uint32_t fields = (size << 4) | (o2 << 3) | (L << 2) | (o1 << 1) | o0;
1392
1393 if (__builtin_expect((2 <= fields && fields <= 3) ||
1394 (6 <= fields && fields <= 8) ||
1395 (10 <= fields && fields <= 12) ||
1396 (14 <= fields && fields <= 15) ||
1397 (18 <= fields && fields <= 19) ||
1398 (22 <= fields && fields <= 24) ||
1399 (26 <= fields && fields <= 28) ||
1400 (30 <= fields && fields <= 31) ||
1401 (40 == fields) ||
1402 (42 <= fields && fields <= 44) ||
1403 (46 <= fields && fields <= 47) ||
1404 (56 == fields) ||
1405 (58 <= fields && fields <= 60) ||
1406 (62 <= fields), false)) {
1407 CANNOTDECODE("invalid", inst);
1408 return false;
1409 }
1410
1411 info->bytes = (1 << size) << o1;
1412 info->addr = ss->ss_64.x[Rn];
1413
1414 return true;
1415 }
1416
1417 // load/store no-allocate pair (offset)
1418 bool
1419 get_info_c337(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1420 {
1421 uint32_t opc = BITS(inst, 31, 30);
1422 uint32_t V = BITS(inst, 26, 26);
1423 uint32_t L = BITS(inst, 22, 22);
1424 uint32_t imm7 = BITS(inst, 21, 15);
1425 uint32_t Rn = BITS(inst, 9, 5);
1426 uint32_t fields = (opc << 2) | (V << 1) | L;
1427 uint8_t scale;
1428
1429 if (__builtin_expect((4 <= fields && fields <= 5) ||
1430 (12 <= fields), false)) {
1431 CANNOTDECODE("invalid", inst);
1432 return false;
1433 }
1434
1435 if (V == 1) {
1436 scale = opc + 2;
1437 } else {
1438 scale = BITS(opc, 1, 1) + 2;
1439 }
1440
1441 // double since it's pair
1442 info->bytes = 2 * (1 << scale);
1443 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1444
1445 return true;
1446 }
1447
1448 // load/store reigster (immediate post-indexed)
1449 static bool
1450 get_info_c338(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1451 {
1452 uint32_t size = BITS(inst, 31, 30);
1453 uint32_t V = BITS(inst, 26, 26);
1454 uint32_t opc = BITS(inst, 23, 22);
1455 uint32_t Rn = BITS(inst, 9, 5);
1456 uint32_t fields = (size << 3) | (V << 2) | opc;
1457 uint8_t scale;
1458
1459 if (__builtin_expect((14 <= fields && fields <= 15) ||
1460 (19 == fields) ||
1461 (22 <= fields && fields <= 23) ||
1462 (26 <= fields && fields <= 27) ||
1463 (30 <= fields), false)) {
1464 CANNOTDECODE("invalid", inst);
1465 return false;
1466 }
1467
1468 if (V == 1) {
1469 scale = BITS(opc, 1, 1) << 2 | size;
1470 } else {
1471 scale = size;
1472 }
1473
1474 info->bytes = 1 << scale;
1475 // post-indexed
1476 info->addr = ss->ss_64.x[Rn];
1477
1478 return true;
1479 }
1480
1481 // load/store register (immediate pre-indexed)
1482 static bool
1483 get_info_c339(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1484 {
1485 uint32_t size = BITS(inst, 31, 30);
1486 uint32_t V = BITS(inst, 26, 26);
1487 uint32_t opc = BITS(inst, 23, 22);
1488 uint32_t imm9 = BITS(inst, 20, 12);
1489 uint32_t Rn = BITS(inst, 9, 5);
1490 uint32_t fields = (size << 3) | (V << 2) | opc;
1491 uint8_t scale;
1492
1493 if (__builtin_expect((14 <= fields && fields <= 15) ||
1494 (19 == fields) ||
1495 (22 <= fields && fields <= 23) ||
1496 (26 <= fields && fields <= 27) ||
1497 (30 <= fields), false)) {
1498 CANNOTDECODE("invalid", inst);
1499 return false;
1500 }
1501
1502 if (V == 1) {
1503 scale = BITS(opc, 1, 1) << 2 | size;
1504 } else {
1505 scale = size;
1506 }
1507
1508 info->bytes = 1 << scale;
1509 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1510
1511 return true;
1512 }
1513
1514 // load/store register (register offset)
1515 static bool
1516 get_info_c3310(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1517 {
1518 uint32_t size = BITS(inst, 31, 30);
1519 uint32_t V = BITS(inst, 26, 26);
1520 uint32_t opc = BITS(inst, 23, 22);
1521 uint32_t Rm = BITS(inst, 20, 16);
1522 uint32_t option = BITS(inst, 15, 13);
1523 uint32_t S = BITS(inst, 12, 12);
1524 uint32_t Rn = BITS(inst, 9, 5);
1525 uint32_t fields = (size << 3) | (V << 2) | opc;
1526 uint32_t scale;
1527
1528 if (__builtin_expect((14 <= fields && fields <= 15) ||
1529 (19 == fields) ||
1530 (22 <= fields && fields <= 23) ||
1531 (27 == fields) ||
1532 (30 <= fields), false)) {
1533 CANNOTDECODE("invalid", inst);
1534 return false;
1535 }
1536
1537 if (V == 1) {
1538 scale = BITS(opc, 1, 1) | size;
1539 } else {
1540 scale = size;
1541 }
1542
1543 info->bytes = 1 << scale;
1544
1545 uint64_t m = ss->ss_64.x[Rm];
1546 uint8_t shift = (S == 1 ? scale : 0);
1547
1548 switch (option) {
1549 case 0 ... 3:
1550 info->addr = ss->ss_64.x[Rn] + (ZERO_EXTEND_64(m, 8 << option) << shift);
1551 break;
1552 case 4 ... 7:
1553 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(m, 8 << BITS(option, 1, 0)) << shift);
1554 break;
1555 default:
1556 CANNOTDECODE("invalid", inst);
1557 return false;
1558 }
1559
1560 return true;
1561 }
1562
1563 // load/store register (unprivileged)
1564 static bool
1565 get_info_c3311(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1566 {
1567 uint32_t size = BITS(inst, 31, 30);
1568 uint32_t V = BITS(inst, 26, 26);
1569 uint32_t opc = BITS(inst, 23, 22);
1570 uint32_t imm9 = BITS(inst, 20, 12);
1571 uint32_t Rn = BITS(inst, 9, 5);
1572 uint32_t fields = (size << 3) | (V << 2) | opc;
1573
1574 if (__builtin_expect((4 <= fields && fields <= 7) ||
1575 (12 <= fields && fields <= 15) ||
1576 (19 <= fields && fields <= 23) ||
1577 (26 <= fields), false)) {
1578 CANNOTDECODE("invalid", inst);
1579 return false;
1580 }
1581
1582 info->bytes = 1 << size;
1583 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1584
1585 return true;
1586 }
1587
1588 // load/store register (unscaled immediate)
1589 static bool
1590 get_info_c3312(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1591 {
1592 uint32_t size = BITS(inst, 31, 30);
1593 uint32_t V = BITS(inst, 26, 26);
1594 uint32_t opc = BITS(inst, 23, 22);
1595 uint32_t imm9 = BITS(inst, 20, 12);
1596 uint32_t Rn = BITS(inst, 9, 5);
1597 uint32_t fields = (size << 3) | (V << 2) | opc;
1598 uint32_t scale;
1599
1600 if (__builtin_expect((14 <= fields && fields <= 15) ||
1601 (19 == fields) ||
1602 (22 <= fields && fields <= 23) ||
1603 (27 == fields) ||
1604 (30 <= fields), false)) {
1605 CANNOTDECODE("invalid", inst);
1606 return false;
1607 }
1608
1609 if (V == 1) {
1610 scale = BITS(opc, 1, 1) << 2 | size;
1611 } else {
1612 scale = size;
1613 }
1614
1615 info->bytes = 1 < scale;
1616 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1617
1618 return true;
1619 }
1620
1621 // load/store register (unsigned immediate)
1622 bool
1623 get_info_c3313(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1624 {
1625 uint32_t size = BITS(inst, 31, 30);
1626 uint32_t V = BITS(inst, 26, 26);
1627 uint32_t opc = BITS(inst, 23, 22);
1628 uint32_t imm12 = BITS(inst, 21, 10);
1629 uint32_t Rn = BITS(inst, 9, 5);
1630 uint32_t fields = (size << 3) | (V << 2) | opc;
1631 uint32_t scale;
1632
1633 if (__builtin_expect((14 <= fields && fields <= 15) ||
1634 (19 == fields) ||
1635 (22 <= fields && fields <= 23) ||
1636 (27 == fields) ||
1637 (30 <= fields), false)) {
1638 CANNOTDECODE("invalid", inst);
1639 return false;
1640 }
1641
1642 if (V == 1) {
1643 scale = BITS(opc, 1, 1) << 2 | size;
1644 } else {
1645 scale = size;
1646 }
1647
1648 info->bytes = 1 << scale;
1649 info->addr = ss->ss_64.x[Rn] + (ZERO_EXTEND_64(imm12, 12) << scale);
1650
1651 return true;
1652 }
1653
1654 // load/store register pair (offset)
1655 static bool
1656 get_info_c3314(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1657 {
1658 uint32_t opc = BITS(inst, 31, 30);
1659 uint32_t V = BITS(inst, 26, 26);
1660 uint32_t L = BITS(inst, 22, 22);
1661 uint32_t imm7 = BITS(inst, 21, 15);
1662 uint32_t Rn = BITS(inst, 9, 5);
1663 uint32_t fields = (opc << 2) | (V << 1) | L;
1664 uint8_t scale = 2 + (opc >> 1);
1665
1666 if (__builtin_expect((4 == fields) ||
1667 (12 <= fields), false)) {
1668 CANNOTDECODE("invalid", inst);
1669 return false;
1670 }
1671
1672 if (V == 1) {
1673 scale = 2 + opc;
1674 } else {
1675 scale = 2 + BITS(opc, 1, 1);
1676 }
1677
1678 info->bytes = 2 * (1 << scale);
1679 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1680
1681 return true;
1682 }
1683
1684 // load/store register pair (post-indexed)
1685 static bool
1686 get_info_c3315(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1687 {
1688 uint32_t opc = BITS(inst, 31, 30);
1689 uint32_t V = BITS(inst, 26, 26);
1690 uint32_t L = BITS(inst, 22, 22);
1691 uint32_t Rn = BITS(inst, 9, 5);
1692 uint32_t fields = (opc << 2) | (V << 1) | L;
1693 uint8_t scale = 2 + (opc >> 1);
1694
1695 if (__builtin_expect((4 == fields) ||
1696 (12 <= fields), false)) {
1697 CANNOTDECODE("invalid", inst);
1698 return false;
1699 }
1700
1701 if (V == 1) {
1702 scale = 2 + opc;
1703 } else {
1704 scale = 2 + BITS(opc, 1, 1);
1705 }
1706
1707 info->bytes = 2 * (1 << scale);
1708 // post-indexed
1709 info->addr = ss->ss_64.x[Rn];
1710
1711 return true;
1712 }
1713
1714 // load/store register pair (pre-indexed)
1715 static bool
1716 get_info_c3316(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1717 {
1718 uint32_t opc = BITS(inst, 31, 30);
1719 uint32_t V = BITS(inst, 26, 26);
1720 uint32_t L = BITS(inst, 22, 22);
1721 uint32_t imm7 = BITS(inst, 21, 15);
1722 uint32_t Rn = BITS(inst, 9, 5);
1723 uint32_t fields = (opc << 2) | (V << 1) | L;
1724 uint8_t scale = 2 + (opc >> 1);
1725
1726 if (__builtin_expect((4 == fields) ||
1727 (12 <= fields), false)) {
1728 CANNOTDECODE("invalid", inst);
1729 return false;
1730 }
1731
1732 if (V == 1) {
1733 scale = 2 + opc;
1734 } else {
1735 scale = 2 + BITS(opc, 1, 1);
1736 }
1737
1738 info->bytes = 2 * (1 << scale);
1739 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1740
1741 return true;
1742 }
1743
1744
1745 //-------------------------------------------------------------------
1746 // Globals
1747 //
1748 int
1749 pgtrace_decode_and_run(uint32_t inst, vm_offset_t fva, vm_map_offset_t *cva_page, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1750 {
1751 uint8_t len = sizeof(typetbl) / sizeof(type_entry_t);
1752 run_t run = NULL;
1753 get_info_t get_info = NULL;
1754 vm_offset_t pa, cva;
1755 vm_offset_t cva_front_page = cva_page[0];
1756 vm_offset_t cva_cur_page = cva_page[1];
1757 instruction_info_t info;
1758
1759 for (uint8_t i = 0; i < len; i++) {
1760 if ((typetbl[i].mask & inst) == typetbl[i].value) {
1761 run = typetbl[i].run;
1762 get_info = typetbl[i].get_info;
1763 break;
1764 }
1765 }
1766
1767 assert(run != NULL && get_info != NULL);
1768
1769 get_info(inst, ss, &info);
1770
1771 if (info.addr == fva) {
1772 cva = cva_cur_page + (fva & ARM_PGMASK);
1773 } else {
1774 // which means a front page is not a tracing page
1775 cva = cva_front_page + (fva & ARM_PGMASK);
1776 }
1777
1778 pa = mmu_kvtop(cva);
1779 if (!pa) {
1780 panic("%s: invalid address cva=%lx fva=%lx info.addr=%lx inst=%x", __func__, cva, fva, info.addr, inst);
1781 }
1782
1783 absolutetime_to_nanoseconds(mach_absolute_time(), &res->rr_time);
1784 run(inst, pa, cva, ss, res);
1785
1786 return 0;
1787 }
1788
1789 void
1790 pgtrace_decoder_get_stats(pgtrace_stats_t *s)
1791 {
1792 memcpy((void *)&(s->stat_decoder), &(stats.stat_decoder), sizeof(stats.stat_decoder));
1793 }
1794 #endif