]> git.saurik.com Git - apple/xnu.git/blame - osfmk/arm64/pgtrace_decoder.c
xnu-4570.61.1.tar.gz
[apple/xnu.git] / osfmk / arm64 / pgtrace_decoder.c
CommitLineData
5ba3f43e
A
1/*
2 * Copyright (c) 2015 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#if CONFIG_PGTRACE
30#include <kern/debug.h>
31#include <kern/clock.h>
32#include <pexpert/pexpert.h>
33#include <arm/pmap.h>
34#include "pgtrace_decoder.h"
35
36//-------------------------------------------------------------------
37// Macros
38//
39#define DBG 1
40#if DBG == 1
41#define INLINE __attribute__((noinline))
42#else
43#define INLINE inline
44#endif
45
46#define BITS(v, msb, lsb) ((v) << (31-msb) >> (31-msb) >> (lsb))
47#define READ_GPR_X(ss, n, v) { \
48 if (__builtin_expect(n < 31, 1)) (v) = (ss)->ss_64.x[(n)]; \
49 else if (n == 31) (v) = 0; \
50 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
51}
52#define READ_GPR_W(ss, n, v) { \
53 if (__builtin_expect(n < 31, 1)) (v) = *(uint32_t*)&((ss)->ss_64.x[(n)]); \
54 else if (n == 31) (v) = 0; \
55 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
56}
57#define WRITE_GPR_X(ss, n, v) { \
58 if (__builtin_expect(n < 31, 1)) (ss)->ss_64.x[(n)] = (v); \
59 else if (n == 31) {} \
60 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
61}
62#define WRITE_GPR_W(ss, n, v) { \
63 if (__builtin_expect(n < 31, 1)) *(uint32_t*)&((ss)->ss_64.x[(n)]) = (v); \
64 else if (n == 31) {} \
65 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
66}
67#define SIGN_EXTEND_64(val, width) (((int64_t)(val) << (64 - (width)) >> (64 - (width))))
68#define ZERO_EXTEND_64(val, width) (((uint64_t)(val) << (64 - (width))) >> (64 - (width)))
69
70//-------------------------------------------------------------------
71// Types
72//
73typedef int (*run_t)(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
74
75typedef struct {
76 vm_offset_t addr;
77 uint64_t bytes;
78} instruction_info_t;
79
80typedef bool (*get_info_t)(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
81
82typedef struct {
83 uint32_t mask;
84 uint32_t value;
85 run_t run;
86 get_info_t get_info;
87} type_entry_t;
88
89//-------------------------------------------------------------------
90// Statics
91//
92static int run_simd(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
93static int run_c335(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
94static int run_c336(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
95static int run_c337(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
96static int run_c338(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
97static int run_c339(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
98static int run_c3310(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
99static int run_c3311(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
100static int run_c3312(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
101static int run_c3313(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
102static int run_c3314(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
103static int run_c3315(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
104static int run_c3316(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res);
105static bool get_info_simd(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
106static bool get_info_c335(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
107static bool get_info_c336(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
108static bool get_info_c337(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
109static bool get_info_c338(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
110static bool get_info_c339(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
111static bool get_info_c3310(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
112static bool get_info_c3311(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
113static bool get_info_c3312(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
114static bool get_info_c3313(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
115static bool get_info_c3314(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
116static bool get_info_c3315(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
117static bool get_info_c3316(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info);
118
119// Table from ARM DDI 0487A.a C3.3
120static type_entry_t typetbl[] = {
121 { 0x3f000000, 0x08000000, run_c336, get_info_c336 }, // Load/store exclusive
122 { 0x3b000000, 0x18000000, run_c335, get_info_c335 }, // Load register (literal)
123 { 0x3b800000, 0x28000000, run_c337, get_info_c337 }, // Load/store no-allocate pair (offset)
124 { 0x3b800000, 0x28800000, run_c3315, get_info_c3315 }, // Load/store register pair (post-indexed)
125 { 0x3b800000, 0x29000000, run_c3314, get_info_c3314 }, // Load/store register pair (offset)
126 { 0x3b800000, 0x29800000, run_c3316, get_info_c3316 }, // Load/store register pair (pre-indexed)
127 { 0x3b200c00, 0x38000000, run_c3312, get_info_c3312 }, // Load/store register (unscaled immediate)
128 { 0x3b200c00, 0x38000400, run_c338, get_info_c338 }, // Load/store register (immediate post-indexed)
129 { 0x3b200c00, 0x38000800, run_c3311, get_info_c3311 }, // Load/store register (unprivileged)
130 { 0x3b200c00, 0x38000c00, run_c339, get_info_c339 }, // Load/store register (immediate pre-indexed)
131 { 0x3b200c00, 0x38200800, run_c3310, get_info_c3310 }, // Load/store register (register offset)
132 { 0x3b000000, 0x39000000, run_c3313, get_info_c3313 }, // Load/store register (unsigned immediate)
133
134 { 0xbfbf0000, 0x0c000000, run_simd, get_info_simd }, // AdvSIMD load/store multiple structures
135 { 0xbfa00000, 0x0c800000, run_simd, get_info_simd }, // AdvSIMD load/store multiple structures (post-indexed)
136 { 0xbf980000, 0x0d000000, run_simd, get_info_simd }, // AdvSIMD load/store single structure
137 { 0xbf800000, 0x0d800000, run_simd, get_info_simd } // AdvSIMD load/store single structure (post-indexed)
138};
139
140static pgtrace_stats_t stats;
141
142INLINE static void do_str(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
143{
144 uint32_t wt;
145 uint64_t xt;
146
147 res->rr_rw = PGTRACE_RW_STORE;
148
149 if (size == 8) {
150 READ_GPR_X(ss, Rt, xt);
151 res->rr_addrdata[0].ad_data = xt;
152 } else {
153 READ_GPR_W(ss, Rt, wt);
154 res->rr_addrdata[0].ad_data = wt;
155 }
156
157 if (size == 1) __asm__ volatile("strb %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
158 else if (size == 2) __asm__ volatile("strh %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
159 else if (size == 4) __asm__ volatile("str %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
160 else if (size == 8) __asm__ volatile("str %x[xt], [%[va]]\n" :: [xt] "r"(xt), [va] "r"(va));
161 else panic("%s Invalid size %d\n", __func__, size);
162
163 stats.stat_decoder.sd_str++;
164}
165
166INLINE static void do_ldr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
167{
168 uint32_t wt;
169 uint64_t xt;
170
171 res->rr_rw = PGTRACE_RW_LOAD;
172
173 if (size == 1) __asm__ volatile("ldrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
174 else if (size == 2) __asm__ volatile("ldrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
175 else if (size == 4) __asm__ volatile("ldr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
176 else if (size == 8) __asm__ volatile("ldr %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
177 else panic("%s Invalid size %d\n", __func__, size);
178
179 if (size == 8) {
180 WRITE_GPR_X(ss, Rt, xt);
181 res->rr_addrdata[0].ad_data = xt;
182 } else {
183 WRITE_GPR_W(ss, Rt, wt);
184 res->rr_addrdata[0].ad_data = wt;
185 }
186
187 stats.stat_decoder.sd_ldr++;
188}
189
190INLINE static void do_stp(uint8_t size, uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
191{
192 uint32_t wt1, wt2;
193 uint64_t xt1, xt2;
194
195 if (size == 4) {
196 READ_GPR_W(ss, Rt, wt1);
197 READ_GPR_W(ss, Rt2, wt2);
198 __asm__ volatile("stp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
199 res->rr_rw = PGTRACE_RW_STORE;
200 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
201 res->rr_addrdata[0].ad_data = wt1;
202 res->rr_addrdata[1].ad_data = wt2;
203 } else if (size == 8) {
204 READ_GPR_X(ss, Rt, xt1);
205 READ_GPR_X(ss, Rt2, xt2);
206 __asm__ volatile("stp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
207 res->rr_rw = PGTRACE_RW_STORE;
208 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
209 res->rr_addrdata[0].ad_data = xt1;
210 res->rr_addrdata[1].ad_data = xt2;
211 } else panic("%s Invalid size %d\n", __func__, size);
212
213 stats.stat_decoder.sd_stp++;
214}
215
216INLINE static void do_ldp(uint8_t size, uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
217{
218 uint32_t wt1, wt2;
219 uint64_t xt1, xt2;
220
221 if (size == 4) {
222 __asm__ volatile("ldp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
223 WRITE_GPR_W(ss, Rt, wt1);
224 WRITE_GPR_W(ss, Rt2, wt2);
225 res->rr_rw = PGTRACE_RW_STORE;
226 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
227 res->rr_addrdata[0].ad_data = wt1;
228 res->rr_addrdata[1].ad_data = wt2;
229 } else if (size == 8) {
230 __asm__ volatile("ldp %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
231 WRITE_GPR_X(ss, Rt, xt1);
232 WRITE_GPR_X(ss, Rt2, xt2);
233 res->rr_rw = PGTRACE_RW_STORE;
234 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
235 res->rr_addrdata[0].ad_data = xt1;
236 res->rr_addrdata[1].ad_data = xt2;
237 } else panic("%s Invalid size %d\n", __func__, size);
238
239 stats.stat_decoder.sd_ldp++;
240}
241
242INLINE static void do_ldpsw(uint8_t Rt, uint8_t Rt2, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
243{
244 uint64_t xt1, xt2;
245
246 __asm__ volatile("ldpsw %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
247 WRITE_GPR_X(ss, Rt, xt1);
248 WRITE_GPR_X(ss, Rt2, xt2);
249 res->rr_rw = PGTRACE_RW_LOAD;
250 res->rr_addrdata[1].ad_addr = va+sizeof(uint32_t);
251 res->rr_addrdata[0].ad_data = xt1;
252 res->rr_addrdata[1].ad_data = xt2;
253
254 stats.stat_decoder.sd_ldpsw++;
255}
256
257INLINE static void do_ldrs(uint8_t size, uint8_t extsize, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
258{
259 uint32_t wt;
260 uint64_t xt;
261
262 res->rr_rw = PGTRACE_RW_LOAD;
263
264 if (size == 1 && extsize == 4) __asm__ volatile("ldrsb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
265 else if (size == 1 && extsize == 8) __asm__ volatile("ldrsb %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
266 else if (size == 2 && extsize == 4) __asm__ volatile("ldrsh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
267 else if (size == 2 && extsize == 8) __asm__ volatile("ldrsh %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
268 else if (size == 4 && extsize == 8) __asm__ volatile("ldrsw %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
269 else panic("%s Invalid size %d extsize=%d\n", __func__, size, extsize);
270
271 if (extsize == 8) {
272 WRITE_GPR_X(ss, Rt, xt);
273 res->rr_addrdata[0].ad_data = xt;
274 } else {
275 WRITE_GPR_W(ss, Rt, wt);
276 res->rr_addrdata[0].ad_data = wt;
277 }
278
279 stats.stat_decoder.sd_ldrs++;
280}
281
282INLINE static void do_ldtrs(uint8_t size, uint8_t extsize, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
283{
284 uint32_t wt;
285 uint64_t xt;
286
287 res->rr_rw = PGTRACE_RW_LOAD;
288
289 if (size == 1 && extsize == 4) __asm__ volatile("ldtrsb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
290 else if (size == 1 && extsize == 8) __asm__ volatile("ldtrsb %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
291 else if (size == 2 && extsize == 4) __asm__ volatile("ldtrsh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
292 else if (size == 2 && extsize == 8) __asm__ volatile("ldtrsh %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
293 else if (size == 4 && extsize == 8) __asm__ volatile("ldtrsw %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
294 else panic("%s Invalid size %d extsize=%d\n", __func__, size, extsize);
295
296 if (extsize == 8) {
297 WRITE_GPR_X(ss, Rt, xt);
298 res->rr_addrdata[0].ad_data = xt;
299 } else {
300 WRITE_GPR_W(ss, Rt, wt);
301 res->rr_addrdata[0].ad_data = wt;
302 }
303
304 stats.stat_decoder.sd_ldtrs++;
305}
306
307INLINE static void do_ldtr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
308{
309 uint32_t wt;
310 uint64_t xt;
311
312 res->rr_rw = PGTRACE_RW_LOAD;
313
314 if (size == 1) __asm__ volatile("ldtrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
315 else if (size == 2) __asm__ volatile("ldtrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
316 else if (size == 4) __asm__ volatile("ldtr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
317 else if (size == 8) __asm__ volatile("ldtr %x[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
318 else panic("%s Invalid size %d\n", __func__, size);
319
320 if (size == 8) {
321 WRITE_GPR_X(ss, Rt, xt);
322 res->rr_addrdata[0].ad_data = xt;
323 } else {
324 WRITE_GPR_W(ss, Rt, wt);
325 res->rr_addrdata[0].ad_data = wt;
326 }
327
328 stats.stat_decoder.sd_ldtr++;
329}
330
331INLINE static void do_sttr(uint8_t size, uint8_t Rt, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
332{
333 uint32_t wt;
334 uint64_t xt;
335
336 res->rr_rw = PGTRACE_RW_STORE;
337
338 if (size == 8) {
339 READ_GPR_X(ss, Rt, xt);
340 res->rr_addrdata[0].ad_data = xt;
341 } else {
342 READ_GPR_W(ss, Rt, wt);
343 res->rr_addrdata[0].ad_data = wt;
344 }
345
346 if (size == 1) __asm__ volatile("sttrb %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
347 else if (size == 2) __asm__ volatile("sttrh %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
348 else if (size == 4) __asm__ volatile("sttr %w[wt], [%[va]]\n" :: [wt] "r"(wt), [va] "r"(va));
349 else if (size == 8) __asm__ volatile("sttr %x[xt], [%[va]]\n" :: [xt] "r"(xt), [va] "r"(va));
350 else panic("%s Invalid size %d\n", __func__, size);
351
352 stats.stat_decoder.sd_sttr++;
353}
354
355INLINE static void do_prfm(uint8_t Rt, vm_offset_t va, pgtrace_run_result_t *res)
356{
357 if (Rt == 0) __asm__ volatile("prfm pldl1keep, [%[va]]\n" : : [va] "r"(va));
358 else if (Rt == 1) __asm__ volatile("prfm pldl1strm, [%[va]]\n" : : [va] "r"(va));
359 else if (Rt == 2) __asm__ volatile("prfm pldl2keep, [%[va]]\n" : : [va] "r"(va));
360 else if (Rt == 3) __asm__ volatile("prfm pldl2strm, [%[va]]\n" : : [va] "r"(va));
361 else if (Rt == 4) __asm__ volatile("prfm pldl3keep, [%[va]]\n" : : [va] "r"(va));
362 else if (Rt == 5) __asm__ volatile("prfm pldl3strm, [%[va]]\n" : : [va] "r"(va));
363 else if (Rt == 6) __asm__ volatile("prfm #6, [%[va]]\n" : : [va] "r"(va));
364 else if (Rt == 7) __asm__ volatile("prfm #7, [%[va]]\n" : : [va] "r"(va));
365 else if (Rt == 8) __asm__ volatile("prfm #8, [%[va]]\n" : : [va] "r"(va));
366 else if (Rt == 9) __asm__ volatile("prfm #9, [%[va]]\n" : : [va] "r"(va));
367 else if (Rt == 10) __asm__ volatile("prfm #10, [%[va]]\n" : : [va] "r"(va));
368 else if (Rt == 11) __asm__ volatile("prfm #11, [%[va]]\n" : : [va] "r"(va));
369 else if (Rt == 12) __asm__ volatile("prfm #12, [%[va]]\n" : : [va] "r"(va));
370 else if (Rt == 13) __asm__ volatile("prfm #13, [%[va]]\n" : : [va] "r"(va));
371 else if (Rt == 14) __asm__ volatile("prfm #14, [%[va]]\n" : : [va] "r"(va));
372 else if (Rt == 15) __asm__ volatile("prfm #15, [%[va]]\n" : : [va] "r"(va));
373 else if (Rt == 16) __asm__ volatile("prfm pstl1keep, [%[va]]\n" : : [va] "r"(va));
374 else if (Rt == 17) __asm__ volatile("prfm pstl1strm, [%[va]]\n" : : [va] "r"(va));
375 else if (Rt == 18) __asm__ volatile("prfm pstl2keep, [%[va]]\n" : : [va] "r"(va));
376 else if (Rt == 19) __asm__ volatile("prfm pstl2strm, [%[va]]\n" : : [va] "r"(va));
377 else if (Rt == 20) __asm__ volatile("prfm pstl3keep, [%[va]]\n" : : [va] "r"(va));
378 else if (Rt == 21) __asm__ volatile("prfm pstl3strm, [%[va]]\n" : : [va] "r"(va));
379 else if (Rt == 22) __asm__ volatile("prfm #22, [%[va]]\n" : : [va] "r"(va));
380 else if (Rt == 23) __asm__ volatile("prfm #23, [%[va]]\n" : : [va] "r"(va));
381 else if (Rt == 24) __asm__ volatile("prfm #24, [%[va]]\n" : : [va] "r"(va));
382 else if (Rt == 25) __asm__ volatile("prfm #25, [%[va]]\n" : : [va] "r"(va));
383 else if (Rt == 26) __asm__ volatile("prfm #26, [%[va]]\n" : : [va] "r"(va));
384 else if (Rt == 27) __asm__ volatile("prfm #27, [%[va]]\n" : : [va] "r"(va));
385 else if (Rt == 28) __asm__ volatile("prfm #28, [%[va]]\n" : : [va] "r"(va));
386 else if (Rt == 29) __asm__ volatile("prfm #29, [%[va]]\n" : : [va] "r"(va));
387 else if (Rt == 30) __asm__ volatile("prfm #30, [%[va]]\n" : : [va] "r"(va));
388 else if (Rt == 31) __asm__ volatile("prfm #31, [%[va]]\n" : : [va] "r"(va));
389 else panic("%s Invalid Rt %d\n", __func__, Rt);
390
391 res->rr_num = 0;
392 res->rr_rw = PGTRACE_RW_PREFETCH;
393
394 stats.stat_decoder.sd_prfm++;
395}
396
397#define CANNOTDECODE(msg, inst) do {\
398 panic("%s: " msg " inst=%x not supported yet\n", __func__, inst);\
399} while (0)
400
401static int run_simd(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
402{
403#pragma unused(pa,va,ss,res)
404 CANNOTDECODE("simd", inst);
405 return 0;
406}
407
408static int run_c335(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
409{
410 uint32_t opc = BITS(inst, 31, 30),
411 v = BITS(inst, 26, 26),
412 Rt = BITS(inst, 4, 0);
413 uint8_t fields = (opc << 1) | v;
414
415 res->rr_num = 1;
416 res->rr_addrdata[0].ad_addr = pa;
417
418 if (fields == 0) do_ldr(4, Rt, va, ss, res);
419 else if ((fields == 1) ||
420 (fields == 3) ||
421 (fields == 5)) CANNOTDECODE("simd", inst);
422 else if (fields == 2) do_ldr(8, Rt, va, ss, res);
423 else if (fields == 4) do_ldrs(4, 8, Rt, va, ss, res);
424 else if (fields == 6) do_prfm(Rt, va, res);
425 else CANNOTDECODE("unknown", inst);
426
427 stats.stat_decoder.sd_c335++;
428
429 return 0;
430}
431
432static int run_c336(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
433{
434 uint32_t ws, wt, wt1, wt2;
435 uint64_t xt, xt1, xt2;
436 uint32_t size = BITS(inst, 31, 30),
437 o2 = BITS(inst, 23, 23),
438 L = BITS(inst, 22, 22),
439 o1 = BITS(inst, 21, 21),
440 Rs = BITS(inst, 20, 16),
441 o0 = BITS(inst, 15, 15),
442 Rt2 = BITS(inst, 14, 10),
443 Rt = BITS(inst, 4, 0);
444 uint8_t fields = (size << 4) | (o2 << 3) | (L << 2) | (o1 << 1) | o0;
445
446 kprintf("%s Load/store exclusive on device memory???n", __func__);
447
448 res->rr_num = 1;
449 res->rr_addrdata[0].ad_addr = pa;
450
451 switch (fields) {
452 case 0:
453 READ_GPR_W(ss, Rt, wt);
454 __asm__ volatile("stxrb %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
455 WRITE_GPR_W(ss, Rs, ws);
456 res->rr_rw = PGTRACE_RW_STORE;
457 res->rr_addrdata[0].ad_data = wt;
458 break;
459 case 1:
460 READ_GPR_W(ss, Rt, wt);
461 __asm__ volatile("stlxrb %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
462 WRITE_GPR_W(ss, Rs, ws);
463 res->rr_rw = PGTRACE_RW_STORE;
464 res->rr_addrdata[0].ad_data = wt;
465 break;
466 case 4:
467 __asm__ volatile("ldxrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
468 WRITE_GPR_W(ss, Rt, wt);
469 res->rr_rw = PGTRACE_RW_LOAD;
470 res->rr_addrdata[0].ad_data = wt;
471 break;
472 case 5:
473 __asm__ volatile("ldaxrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
474 WRITE_GPR_W(ss, Rt, wt);
475 res->rr_rw = PGTRACE_RW_LOAD;
476 res->rr_addrdata[0].ad_data = wt;
477 break;
478 case 9:
479 READ_GPR_W(ss, Rt, wt);
480 __asm__ volatile("stlrb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
481 res->rr_rw = PGTRACE_RW_STORE;
482 res->rr_addrdata[0].ad_data = wt;
483 break;
484 case 0xd:
485 __asm__ volatile("ldarb %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
486 WRITE_GPR_W(ss, Rt, wt);
487 res->rr_rw = PGTRACE_RW_LOAD;
488 res->rr_addrdata[0].ad_data = wt;
489 break;
490 case 0x10:
491 READ_GPR_W(ss, Rt, wt);
492 __asm__ volatile("stxrh %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
493 WRITE_GPR_W(ss, Rs, ws);
494 res->rr_rw = PGTRACE_RW_STORE;
495 res->rr_addrdata[0].ad_data = wt;
496 break;
497 case 0x11:
498 READ_GPR_W(ss, Rt, wt);
499 __asm__ volatile("stlxrh %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
500 WRITE_GPR_W(ss, Rs, ws);
501 res->rr_rw = PGTRACE_RW_STORE;
502 res->rr_addrdata[0].ad_data = wt;
503 break;
504 case 0x14:
505 __asm__ volatile("ldxrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
506 WRITE_GPR_W(ss, Rt, wt);
507 res->rr_rw = PGTRACE_RW_LOAD;
508 res->rr_addrdata[0].ad_data = wt;
509 break;
510 case 0x15:
511 __asm__ volatile("ldaxrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
512 WRITE_GPR_W(ss, Rt, wt);
513 res->rr_rw = PGTRACE_RW_LOAD;
514 res->rr_addrdata[0].ad_data = wt;
515 break;
516 case 0x19:
517 READ_GPR_W(ss, Rt, wt);
518 __asm__ volatile("stlrh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
519 res->rr_rw = PGTRACE_RW_STORE;
520 res->rr_addrdata[0].ad_data = wt;
521 break;
522 case 0x1d:
523 __asm__ volatile("ldarh %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
524 WRITE_GPR_W(ss, Rt, wt);
525 res->rr_rw = PGTRACE_RW_LOAD;
526 res->rr_addrdata[0].ad_data = wt;
527 break;
528 case 0x20:
529 READ_GPR_W(ss, Rt, wt);
530 __asm__ volatile("stxr %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
531 WRITE_GPR_W(ss, Rs, ws);
532 res->rr_rw = PGTRACE_RW_STORE;
533 res->rr_addrdata[0].ad_data = wt;
534 break;
535 case 0x21:
536 READ_GPR_W(ss, Rt, wt);
537 __asm__ volatile("stlxr %w[ws], %w[wt], [%[va]]\n" : [ws] "=r"(ws) : [wt] "r"(wt), [va] "r"(va));
538 WRITE_GPR_W(ss, Rs, ws);
539 res->rr_rw = PGTRACE_RW_STORE;
540 res->rr_addrdata[0].ad_data = wt;
541 break;
542 case 0x22:
543 READ_GPR_W(ss, Rt, wt1);
544 READ_GPR_W(ss, Rt2, wt2);
545 __asm__ volatile("stxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws] "=r"(ws) : [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
546 WRITE_GPR_W(ss, Rs, ws);
547 res->rr_rw = PGTRACE_RW_STORE;
548 res->rr_num = 2;
549 res->rr_addrdata[0].ad_addr = va;
550 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
551 res->rr_addrdata[0].ad_data = wt1;
552 res->rr_addrdata[1].ad_data = wt2;
553 break;
554 case 0x23:
555 READ_GPR_W(ss, Rt, wt1);
556 READ_GPR_W(ss, Rt2, wt2);
557 __asm__ volatile("stlxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws] "=r"(ws) : [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
558 WRITE_GPR_W(ss, Rs, ws);
559 res->rr_rw = PGTRACE_RW_STORE;
560 res->rr_num = 2;
561 res->rr_addrdata[0].ad_addr = va;
562 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
563 res->rr_addrdata[0].ad_data = wt1;
564 res->rr_addrdata[1].ad_data = wt2;
565 break;
566 case 0x24:
567 __asm__ volatile("ldxr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
568 WRITE_GPR_W(ss, Rt, wt);
569 res->rr_rw = PGTRACE_RW_LOAD;
570 res->rr_addrdata[0].ad_data = wt;
571 break;
572 case 0x25:
573 __asm__ volatile("ldaxr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
574 WRITE_GPR_W(ss, Rt, wt);
575 res->rr_rw = PGTRACE_RW_LOAD;
576 res->rr_addrdata[0].ad_data = wt;
577 break;
578 case 0x26:
579 __asm__ volatile("ldxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
580 WRITE_GPR_W(ss, Rt, wt1);
581 WRITE_GPR_W(ss, Rt2, wt2);
582 res->rr_rw = PGTRACE_RW_LOAD;
583 res->rr_num = 2;
584 res->rr_addrdata[0].ad_addr = va;
585 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
586 res->rr_addrdata[0].ad_data = wt1;
587 res->rr_addrdata[1].ad_data = wt2;
588 break;
589 case 0x27:
590 __asm__ volatile("ldaxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
591 WRITE_GPR_W(ss, Rt, wt1);
592 WRITE_GPR_W(ss, Rt2, wt2);
593 res->rr_rw = PGTRACE_RW_LOAD;
594 res->rr_num = 2;
595 res->rr_addrdata[0].ad_addr = va;
596 res->rr_addrdata[1].ad_addr = va+sizeof(wt1);
597 res->rr_addrdata[0].ad_data = wt1;
598 res->rr_addrdata[1].ad_data = wt2;
599 break;
600 case 0x29:
601 READ_GPR_W(ss, Rt, wt);
602 __asm__ volatile("stlr %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
603 res->rr_rw = PGTRACE_RW_STORE;
604 res->rr_addrdata[0].ad_data = wt;
605 break;
606 case 0x2d:
607 __asm__ volatile("ldar %w[wt], [%[va]]\n" : [wt] "=r"(wt) : [va] "r"(va));
608 WRITE_GPR_W(ss, Rt, wt);
609 res->rr_rw = PGTRACE_RW_LOAD;
610 res->rr_addrdata[0].ad_data = wt;
611 break;
612 case 0x30:
613 READ_GPR_X(ss, Rt, xt);
614 __asm__ volatile("stxr %w[ws], %[xt], [%[va]]\n" : [ws] "=r"(ws) : [xt] "r"(xt), [va] "r"(va));
615 WRITE_GPR_W(ss, Rs, ws);
616 res->rr_rw = PGTRACE_RW_STORE;
617 res->rr_addrdata[0].ad_data = xt;
618 break;
619 case 0x31:
620 READ_GPR_X(ss, Rt, xt);
621 __asm__ volatile("stlxr %w[ws], %[xt], [%[va]]\n" : [ws] "=r"(ws) : [xt] "r"(xt), [va] "r"(va));
622 WRITE_GPR_W(ss, Rs, ws);
623 res->rr_rw = PGTRACE_RW_STORE;
624 res->rr_addrdata[0].ad_data = xt;
625 break;
626 case 0x32:
627 READ_GPR_X(ss, Rt, xt1);
628 READ_GPR_X(ss, Rt2, xt2);
629 __asm__ volatile("stxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws] "=r"(ws) : [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
630 WRITE_GPR_W(ss, Rs, ws);
631 res->rr_rw = PGTRACE_RW_STORE;
632 res->rr_num = 2;
633 res->rr_addrdata[0].ad_addr = va;
634 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
635 res->rr_addrdata[0].ad_data = xt1;
636 res->rr_addrdata[1].ad_data = xt2;
637 break;
638 case 0x33:
639 READ_GPR_X(ss, Rt, xt1);
640 READ_GPR_X(ss, Rt2, xt2);
641 __asm__ volatile("stlxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws] "=r"(ws) : [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
642 WRITE_GPR_W(ss, Rs, ws);
643 res->rr_rw = PGTRACE_RW_STORE;
644 res->rr_num = 2;
645 res->rr_addrdata[0].ad_addr = va;
646 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
647 res->rr_addrdata[0].ad_data = xt1;
648 res->rr_addrdata[1].ad_data = xt2;
649 break;
650 case 0x34:
651 __asm__ volatile("ldxr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
652 WRITE_GPR_X(ss, Rt, xt);
653 res->rr_rw = PGTRACE_RW_LOAD;
654 res->rr_addrdata[0].ad_data = xt;
655 break;
656 case 0x35:
657 __asm__ volatile("ldaxr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
658 WRITE_GPR_X(ss, Rt, xt);
659 res->rr_rw = PGTRACE_RW_LOAD;
660 res->rr_addrdata[0].ad_data = xt;
661 break;
662 case 0x36:
663 __asm__ volatile("ldxp %[xt1], %[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
664 WRITE_GPR_X(ss, Rt, xt1);
665 WRITE_GPR_X(ss, Rt2, xt2);
666 res->rr_rw = PGTRACE_RW_LOAD;
667 res->rr_num = 2;
668 res->rr_addrdata[0].ad_addr = va;
669 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
670 res->rr_addrdata[0].ad_data = xt1;
671 res->rr_addrdata[0].ad_data = xt2;
672 break;
673 case 0x37:
674 __asm__ volatile("ldaxp %[xt1], %[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
675 WRITE_GPR_X(ss, Rt, xt1);
676 WRITE_GPR_X(ss, Rt2, xt2);
677 res->rr_rw = PGTRACE_RW_LOAD;
678 res->rr_num = 2;
679 res->rr_addrdata[0].ad_addr = va;
680 res->rr_addrdata[1].ad_addr = va+sizeof(xt1);
681 res->rr_addrdata[0].ad_data = xt1;
682 res->rr_addrdata[0].ad_data = xt2;
683 break;
684 case 0x39:
685 READ_GPR_X(ss, Rt, xt);
686 __asm__ volatile("stlr %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
687 res->rr_rw = PGTRACE_RW_STORE;
688 res->rr_addrdata[0].ad_data = xt;
689 break;
690 case 0x3d:
691 __asm__ volatile("ldar %[xt], [%[va]]\n" : [xt] "=r"(xt) : [va] "r"(va));
692 WRITE_GPR_X(ss, Rt, xt);
693 res->rr_rw = PGTRACE_RW_LOAD;
694 res->rr_addrdata[0].ad_data = xt;
695 break;
696 default:
697 CANNOTDECODE("unknown", inst);
698 }
699
700 stats.stat_decoder.sd_c336++;
701
702 return 0;
703}
704
705static int run_c337(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
706{
707 uint32_t wt1, wt2;
708 uint64_t xt1, xt2;
709 uint32_t opc = BITS(inst, 31, 30),
710 V = BITS(inst, 26, 26),
711 L = BITS(inst, 22, 22),
712 Rt = BITS(inst, 4, 0),
713 Rt2 = BITS(inst, 14, 10);
714 uint8_t fields = (opc << 2) | (V << 1) | L;
715
716 switch (fields) {
717 case 0:
718 READ_GPR_W(ss, Rt, wt1);
719 READ_GPR_W(ss, Rt2, wt2);
720 __asm__ volatile("stnp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1] "r"(wt1), [wt2] "r"(wt2), [va] "r"(va));
721 res->rr_rw = PGTRACE_RW_STORE;
722 res->rr_num = 2;
723 res->rr_addrdata[0].ad_addr = pa;
724 res->rr_addrdata[1].ad_addr = pa+sizeof(wt1);
725 res->rr_addrdata[0].ad_data = wt1;
726 res->rr_addrdata[1].ad_data = wt2;
727 break;
728 case 1:
729 __asm__ volatile("ldnp %w[wt1], %w[wt2], [%[va]]\n" : [wt1] "=r"(wt1), [wt2] "=r"(wt2) : [va] "r"(va));
730 WRITE_GPR_W(ss, Rt, wt1);
731 WRITE_GPR_W(ss, Rt2, wt2);
732 res->rr_rw = PGTRACE_RW_STORE;
733 res->rr_num = 2;
734 res->rr_addrdata[0].ad_addr = pa;
735 res->rr_addrdata[1].ad_addr = pa+sizeof(wt1);
736 res->rr_addrdata[0].ad_data = wt1;
737 res->rr_addrdata[1].ad_data = wt2;
738 break;
739 case 2:
740 case 3:
741 case 6:
742 case 7:
743 case 10:
744 case 11:
745 CANNOTDECODE("simd", inst);
746 case 8:
747 READ_GPR_X(ss, Rt, xt1);
748 READ_GPR_X(ss, Rt2, xt2);
749 __asm__ volatile("stnp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1] "r"(xt1), [xt2] "r"(xt2), [va] "r"(va));
750 res->rr_rw = PGTRACE_RW_STORE;
751 res->rr_num = 2;
752 res->rr_addrdata[0].ad_addr = pa;
753 res->rr_addrdata[1].ad_addr = pa+sizeof(xt1);
754 res->rr_addrdata[0].ad_data = xt1;
755 res->rr_addrdata[1].ad_data = xt2;
756 break;
757 case 9:
758 __asm__ volatile("ldnp %x[xt1], %x[xt2], [%[va]]\n" : [xt1] "=r"(xt1), [xt2] "=r"(xt2) : [va] "r"(va));
759 WRITE_GPR_X(ss, Rt, xt1);
760 WRITE_GPR_X(ss, Rt2, xt2);
761 res->rr_rw = PGTRACE_RW_STORE;
762 res->rr_num = 2;
763 res->rr_addrdata[0].ad_addr = pa;
764 res->rr_addrdata[1].ad_addr = pa+sizeof(xt1);
765 res->rr_addrdata[0].ad_data = xt1;
766 res->rr_addrdata[1].ad_data = xt2;
767 break;
768 default:
769 CANNOTDECODE("simd", inst);
770 }
771
772 stats.stat_decoder.sd_c337++;
773
774 return 0;
775}
776
777static int run_c338(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
778{
779 uint32_t size = BITS(inst, 31, 30),
780 V = BITS(inst, 26, 26),
781 opc = BITS(inst, 23, 22),
782 Rt = BITS(inst, 4, 0);
783 uint8_t fields = (size << 3) | (V << 2) | opc;
784
785 res->rr_num = 1;
786 res->rr_addrdata[0].ad_addr = pa;
787
788 if (fields == 0) do_str(1, Rt, va, ss, res);
789 else if (fields == 1) do_ldr(1, Rt, va, ss, res);
790 else if (fields == 2) do_ldrs(1, 8, Rt, va, ss, res);
791 else if (fields == 3) do_ldrs(1, 4, Rt, va, ss, res);
792 else if ((fields == 4) ||
793 (fields == 5) ||
794 (fields == 6) ||
795 (fields == 7) ||
796 (fields == 12) ||
797 (fields == 13) ||
798 (fields == 0x14) ||
799 (fields == 0x15) ||
800 (fields == 0x1c) ||
801 (fields == 0x1d)) CANNOTDECODE("simd", inst);
802 else if (fields == 8) do_str(2, Rt, va, ss, res);
803 else if (fields == 9) do_ldr(2, Rt, va, ss, res);
804 else if (fields == 10) do_ldrs(2, 8, Rt, va, ss, res);
805 else if (fields == 11) do_ldrs(2, 4, Rt, va, ss, res);
806 else if (fields == 0x10) do_str(4, Rt, va, ss, res);
807 else if (fields == 0x11) do_ldr(4, Rt, va, ss, res);
808 else if (fields == 0x12) do_ldrs(4, 8, Rt, va, ss, res);
809 else if (fields == 0x18) do_str(8, Rt, va, ss, res);
810 else if (fields == 0x19) do_ldr(8, Rt, va, ss, res);
811 else CANNOTDECODE("unknown", inst);
812
813 stats.stat_decoder.sd_c338++;
814
815 return 0;
816}
817
818static int run_c339(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
819{
820 uint32_t size = BITS(inst, 31, 30),
821 V = BITS(inst, 26, 26),
822 opc = BITS(inst, 23, 22),
823 Rt = BITS(inst, 4, 0);
824 uint8_t fields = (size << 3) | (V << 2) | opc;
825
826 res->rr_num = 1;
827 res->rr_addrdata[0].ad_addr = pa;
828
829 if (fields == 0) do_str(1, Rt, va, ss, res);
830 else if (fields == 1) do_ldr(1, Rt, va, ss, res);
831 else if (fields == 2) do_ldrs(1, 8, Rt, va, ss, res);
832 else if (fields == 3) do_ldrs(1, 4, Rt, va, ss, res);
833 else if ((fields == 4) ||
834 (fields == 5) ||
835 (fields == 6) ||
836 (fields == 7) ||
837 (fields == 12) ||
838 (fields == 13) ||
839 (fields == 0x14) ||
840 (fields == 0x15) ||
841 (fields == 0x1c) ||
842 (fields == 0x1d)) CANNOTDECODE("simd", inst);
843 else if (fields == 8) do_str(2, Rt, va, ss, res);
844 else if (fields == 9) do_ldr(2, Rt, va, ss, res);
845 else if (fields == 10) do_ldrs(2, 8, Rt, va, ss, res);
846 else if (fields == 11) do_ldrs(2, 4, Rt, va, ss, res);
847 else if (fields == 0x10) do_str(4, Rt, va, ss, res);
848 else if (fields == 0x11) do_ldr(4, Rt, va, ss, res);
849 else if (fields == 0x12) do_ldrs(4, 8, Rt, va, ss, res);
850 else if (fields == 0x18) do_str(8, Rt, va, ss, res);
851 else if (fields == 0x19) do_ldr(8, Rt, va, ss, res);
852 else CANNOTDECODE("unknown", inst);
853
854 stats.stat_decoder.sd_c339++;
855
856 return 0;
857}
858
859static int run_c3310(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
860{
861 uint32_t size = BITS(inst, 31, 30),
862 V = BITS(inst, 26, 26),
863 opc = BITS(inst, 23, 22),
864 Rt = BITS(inst, 4, 0);
865 uint8_t fields = (size << 3) | (V << 2) | opc;
866
867 res->rr_num = 1;
868 res->rr_addrdata[0].ad_addr = pa;
869
870 if (fields == 0) do_str(1, Rt, va, ss, res);
871 else if (fields == 1) do_ldr(1, Rt, va, ss, res);
872 else if (fields == 2) do_ldrs(1, 8, Rt, va, ss, res);
873 else if (fields == 3) do_ldrs(1, 4, Rt, va, ss, res);
874 else if ((fields == 4) ||
875 (fields == 5) ||
876 (fields == 6) ||
877 (fields == 7) ||
878 (fields == 12) ||
879 (fields == 13) ||
880 (fields == 0x14) ||
881 (fields == 0x15) ||
882 (fields == 0x1c) ||
883 (fields == 0x1d)) CANNOTDECODE("simd", inst);
884 else if (fields == 8) do_str(2, Rt, va, ss, res);
885 else if (fields == 9) do_ldr(2, Rt, va, ss, res);
886 else if (fields == 10) do_ldrs(2, 8, Rt, va, ss, res);
887 else if (fields == 11) do_ldrs(2, 4, Rt, va, ss, res);
888 else if (fields == 0x10) do_str(4, Rt, va, ss, res);
889 else if (fields == 0x11) do_ldr(4, Rt, va, ss, res);
890 else if (fields == 0x12) do_ldrs(4, 8, Rt, va, ss, res);
891 else if (fields == 0x18) do_str(8, Rt, va, ss, res);
892 else if (fields == 0x19) do_ldr(8, Rt, va, ss, res);
893 else if (fields == 0x1a) do_prfm(Rt, va, res);
894 else CANNOTDECODE("unknown", inst);
895
896 stats.stat_decoder.sd_c3310++;
897
898 return 0;
899}
900
901static int run_c3311(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
902{
903 uint32_t size = BITS(inst, 31, 30),
904 V = BITS(inst, 26, 26),
905 opc = BITS(inst, 23, 22),
906 Rt = BITS(inst, 4, 0);
907 uint8_t fields = (size << 3) | (V << 2) | opc;
908
909 res->rr_num = 1;
910 res->rr_addrdata[0].ad_addr = pa;
911
912 if (fields == 0) do_sttr(1, Rt, va, ss, res);
913 else if (fields == 1) do_ldtr(1, Rt, va, ss, res);
914 else if (fields == 2) do_ldtrs(1, 8, Rt, va, ss, res);
915 else if (fields == 3) do_ldtrs(1, 4, Rt, va, ss, res);
916 else if (fields == 8) do_sttr(2, Rt, va, ss, res);
917 else if (fields == 9) do_ldtr(2, Rt, va, ss, res);
918 else if (fields == 10) do_ldtrs(2, 8, Rt, va, ss, res);
919 else if (fields == 11) do_ldtrs(2, 4, Rt, va, ss, res);
920 else if (fields == 0x10) do_sttr(4, Rt, va, ss, res);
921 else if (fields == 0x11) do_ldtr(4, Rt, va, ss, res);
922 else if (fields == 0x12) do_ldtrs(4, 8, Rt, va, ss, res);
923 else if (fields == 0x18) do_sttr(8, Rt, va, ss, res);
924 else if (fields == 0x19) do_ldtr(8, Rt, va, ss, res);
925 else CANNOTDECODE("unknown", inst);
926
927 stats.stat_decoder.sd_c3311++;
928
929 return 0;
930}
931
932static int run_c3312(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
933{
934 uint32_t size = BITS(inst, 31, 30),
935 V = BITS(inst, 26, 26),
936 opc = BITS(inst, 23, 22),
937 Rt = BITS(inst, 4, 0);
938 uint8_t fields = (size << 3) | (V << 2) | opc;
939
940 res->rr_num = 1;
941 res->rr_addrdata[0].ad_addr = pa;
942
943 if (fields == 0) do_str(1, Rt, va, ss, res);
944 else if (fields == 1) do_ldr(1, Rt, va, ss, res);
945 else if (fields == 2) do_ldrs(1, 8, Rt, va, ss, res);
946 else if (fields == 3) do_ldrs(1, 4, Rt, va, ss, res);
947 else if ((fields == 4) ||
948 (fields == 5) ||
949 (fields == 6) ||
950 (fields == 7) ||
951 (fields == 12) ||
952 (fields == 13) ||
953 (fields == 0x14) ||
954 (fields == 0x15) ||
955 (fields == 0x1c) ||
956 (fields == 0x1d)) CANNOTDECODE("simd", inst);
957 else if (fields == 8) do_str(2, Rt, va, ss, res);
958 else if (fields == 9) do_ldr(2, Rt, va, ss, res);
959 else if (fields == 10) do_ldrs(2, 8, Rt, va, ss, res);
960 else if (fields == 11) do_ldrs(2, 4, Rt, va, ss, res);
961 else if (fields == 0x10) do_str(4, Rt, va, ss, res);
962 else if (fields == 0x11) do_ldr(4, Rt, va, ss, res);
963 else if (fields == 0x12) do_ldrs(4, 8, Rt, va, ss, res);
964 else if (fields == 0x18) do_str(8, Rt, va, ss, res);
965 else if (fields == 0x19) do_ldr(8, Rt, va, ss, res);
966 else if (fields == 0x1a) do_prfm(Rt, va, res);
967 else CANNOTDECODE("unknown", inst);
968
969 stats.stat_decoder.sd_c3312++;
970
971 return 0;
972}
973
974static int run_c3313(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
975{
976 uint32_t size = BITS(inst, 31, 30),
977 V = BITS(inst, 26, 26),
978 opc = BITS(inst, 23, 22),
979 Rt = BITS(inst, 4, 0);
980 uint8_t fields = (size << 3) | (V << 2) | opc;
981
982 res->rr_num = 1;
983 res->rr_addrdata[0].ad_addr = pa;
984
985 if (fields == 0) do_str(1, Rt, va, ss, res);
986 else if (fields == 1) do_ldr(1, Rt, va, ss, res);
987 else if (fields == 2) do_ldrs(1, 8, Rt, va, ss, res);
988 else if (fields == 3) do_ldrs(1, 4, Rt, va, ss, res);
989 else if ((fields == 4) ||
990 (fields == 5) ||
991 (fields == 6) ||
992 (fields == 7) ||
993 (fields == 12) ||
994 (fields == 13) ||
995 (fields == 0x14) ||
996 (fields == 0x15) ||
997 (fields == 0x1c) ||
998 (fields == 0x1d)) CANNOTDECODE("simd", inst);
999 else if (fields == 8) do_str(2, Rt, va, ss, res);
1000 else if (fields == 9) do_ldr(2, Rt, va, ss, res);
1001 else if (fields == 10) do_ldrs(2, 8, Rt, va, ss, res);
1002 else if (fields == 11) do_ldrs(2, 4, Rt, va, ss, res);
1003 else if (fields == 0x10) do_str(4, Rt, va, ss, res);
1004 else if (fields == 0x11) do_ldr(4, Rt, va, ss, res);
1005 else if (fields == 0x12) do_ldrs(4, 8, Rt, va, ss, res);
1006 else if (fields == 0x18) do_str(8, Rt, va, ss, res);
1007 else if (fields == 0x19) do_ldr(8, Rt, va, ss, res);
1008 else if (fields == 0x1a) do_prfm(Rt, va, res);
1009 else CANNOTDECODE("unknown", inst);
1010
1011 stats.stat_decoder.sd_c3313++;
1012
1013 return 0;
1014}
1015
1016static int run_c3314(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1017{
1018 uint32_t opc = BITS(inst, 31, 30),
1019 V = BITS(inst, 26, 26),
1020 L = BITS(inst, 22, 22),
1021 Rt = BITS(inst, 4, 0),
1022 Rt2 = BITS(inst, 14, 10);
1023 uint8_t fields = (opc << 2) | (V << 1) | L;
1024
1025 res->rr_num = 2;
1026 res->rr_addrdata[0].ad_addr = pa;
1027
1028 if (fields == 0) do_stp(4, Rt, Rt2, va, ss, res);
1029 else if (fields == 1) do_ldp(4, Rt, Rt2, va, ss, res);
1030 else if ((fields == 2) ||
1031 (fields == 3) ||
1032 (fields == 6) ||
1033 (fields == 7) ||
1034 (fields == 10) ||
1035 (fields == 11)) CANNOTDECODE("simd", inst);
1036 else if (fields == 5) do_ldpsw(Rt, Rt2, va, ss, res);
1037 else if (fields == 8) do_stp(8, Rt, Rt2, va, ss, res);
1038 else if (fields == 9) do_ldp(8, Rt, Rt2, va, ss, res);
1039 else CANNOTDECODE("unknown", inst);
1040
1041 stats.stat_decoder.sd_c3314++;
1042
1043 return 0;
1044}
1045
1046static int run_c3315(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1047{
1048 uint32_t opc = BITS(inst, 31, 30),
1049 V = BITS(inst, 26, 26),
1050 L = BITS(inst, 22, 22),
1051 Rt = BITS(inst, 4, 0),
1052 Rt2 = BITS(inst, 14, 10);
1053 uint8_t fields = (opc << 2) | (V << 1) | L;
1054
1055 res->rr_num = 2;
1056 res->rr_addrdata[0].ad_addr = pa;
1057
1058 if (fields == 0) do_stp(4, Rt, Rt2, va, ss, res);
1059 else if (fields == 1) do_ldp(4, Rt, Rt2, va, ss, res);
1060 else if ((fields == 2) ||
1061 (fields == 3) ||
1062 (fields == 6) ||
1063 (fields == 7) ||
1064 (fields == 10) ||
1065 (fields == 11)) CANNOTDECODE("simd", inst);
1066 else if (fields == 5) do_ldpsw(Rt, Rt2, va, ss, res);
1067 else if (fields == 8) do_stp(8, Rt, Rt2, va, ss, res);
1068 else if (fields == 9) do_ldp(8, Rt, Rt2, va, ss, res);
1069 else CANNOTDECODE("unknown", inst);
1070
1071 stats.stat_decoder.sd_c3315++;
1072
1073 return 0;
1074}
1075
1076static int run_c3316(uint32_t inst, vm_offset_t pa, vm_offset_t va, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1077{
1078 uint32_t opc = BITS(inst, 31, 30),
1079 V = BITS(inst, 26, 26),
1080 L = BITS(inst, 22, 22),
1081 Rt = BITS(inst, 4, 0),
1082 Rt2 = BITS(inst, 14, 10);
1083 uint8_t fields = (opc << 2) | (V << 1) | L;
1084
1085 res->rr_num = 2;
1086 res->rr_addrdata[0].ad_addr = pa;
1087
1088 if (fields == 0) do_stp(4, Rt, Rt2, va, ss, res);
1089 else if (fields == 1) do_ldp(4, Rt, Rt2, va, ss, res);
1090 else if ((fields == 2) ||
1091 (fields == 3) ||
1092 (fields == 6) ||
1093 (fields == 7) ||
1094 (fields == 10) ||
1095 (fields == 11)) CANNOTDECODE("simd", inst);
1096 else if (fields == 5) do_ldpsw(Rt, Rt2, va, ss, res);
1097 else if (fields == 8) do_stp(8, Rt, Rt2, va, ss, res);
1098 else if (fields == 9) do_ldp(8, Rt, Rt2, va, ss, res);
1099 else CANNOTDECODE("unknown", inst);
1100
1101 stats.stat_decoder.sd_c3316++;
1102
1103 return 0;
1104}
1105
1106static bool get_info_simd(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1107{
1108#pragma unused(inst, ss, info)
1109 CANNOTDECODE("simd", inst);
1110 return false;
1111}
1112
1113// load register (literal)
1114static bool get_info_c335(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1115{
1116 uint32_t opc = BITS(inst, 31, 30);
1117 uint32_t V = BITS(inst, 26, 26);
1118 uint32_t imm19 = BITS(inst, 23, 5);
1119 uint32_t fields = (opc << 1) | V;
1120 uint8_t scale;
1121
1122 if (__builtin_expect(fields > 6, false)) {
1123 CANNOTDECODE("invalid", inst);
1124 return false;
1125 }
1126
1127 assert(fields <= 6);
1128
1129 if (V == 1) {
1130 scale = 2 + opc;
1131 } else {
1132 switch (opc) {
1133 case 0 ... 1:
1134 scale = 2 + opc;
1135 break;
1136 case 2:
1137 scale = 2;
1138 break;
1139 default:
1140 CANNOTDECODE("invalid", inst);
1141 return false;
1142 }
1143 }
1144
1145 info->bytes = 1 << scale;
1146 info->addr = ss->ss_64.pc + (SIGN_EXTEND_64(imm19, 19) << 2);
1147
1148 return true;
1149}
1150
1151// load/store exclusive
1152static bool get_info_c336(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1153{
1154 uint32_t size = BITS(inst, 31, 30);
1155 uint32_t o2 = BITS(inst, 23, 23);
1156 uint32_t L = BITS(inst, 22, 22);
1157 uint32_t o1 = BITS(inst, 21, 21);
1158 uint32_t o0 = BITS(inst, 15, 15);
1159 uint32_t Rn = BITS(inst, 9, 5);
1160 uint32_t fields = (size << 4) | (o2 << 3) | (L << 2) | (o1 << 1) | o0;
1161
1162 if (__builtin_expect((2 <= fields && fields <= 3) ||
1163 (6 <= fields && fields <= 8) ||
1164 (10 <= fields && fields <= 12) ||
1165 (14 <= fields && fields <= 15) ||
1166 (18 <= fields && fields <= 19) ||
1167 (22 <= fields && fields <= 24) ||
1168 (26 <= fields && fields <= 28) ||
1169 (30 <= fields && fields <= 31) ||
1170 (40 == fields) ||
1171 (42 <= fields && fields <= 44) ||
1172 (46 <= fields && fields <= 47) ||
1173 (56 == fields) ||
1174 (58 <= fields && fields <= 60) ||
1175 (62 <= fields), false)) {
1176 CANNOTDECODE("invalid", inst);
1177 return false;
1178 }
1179
1180 info->bytes = (1 << size) << o1;
1181 info->addr = ss->ss_64.x[Rn];
1182
1183 return true;
1184}
1185
1186// load/store no-allocate pair (offset)
1187bool get_info_c337(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1188{
1189 uint32_t opc = BITS(inst, 31, 30);
1190 uint32_t V = BITS(inst, 26, 26);
1191 uint32_t L = BITS(inst, 22, 22);
1192 uint32_t imm7 = BITS(inst, 21, 15);
1193 uint32_t Rn = BITS(inst, 9, 5);
1194 uint32_t fields = (opc << 2) | (V << 1) | L;
1195 uint8_t scale;
1196
1197 if (__builtin_expect((4 <= fields && fields <= 5) ||
1198 (12 <= fields), false)) {
1199 CANNOTDECODE("invalid", inst);
1200 return false;
1201 }
1202
1203 if (V == 1) {
1204 scale = opc + 2;
1205 } else {
1206 scale = BITS(opc, 1, 1) + 2;
1207 }
1208
1209 // double since it's pair
1210 info->bytes = 2 * (1 << scale);
1211 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1212
1213 return true;
1214}
1215
1216// load/store reigster (immediate post-indexed)
1217static bool get_info_c338(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1218{
1219 uint32_t size = BITS(inst, 31, 30);
1220 uint32_t V = BITS(inst, 26, 26);
1221 uint32_t opc = BITS(inst, 23, 22);
1222 uint32_t Rn = BITS(inst, 9, 5);
1223 uint32_t fields = (size << 3) | (V << 2) | opc;
1224 uint8_t scale;
1225
1226 if (__builtin_expect((14 <= fields && fields <= 15) ||
1227 (19 == fields) ||
1228 (22 <= fields && fields <= 23) ||
1229 (26 <= fields && fields <= 27) ||
1230 (30 <= fields), false)) {
1231 CANNOTDECODE("invalid", inst);
1232 return false;
1233 }
1234
1235 if (V == 1) {
1236 scale = BITS(opc, 1, 1) << 2 | size;
1237 } else {
1238 scale = size;
1239 }
1240
1241 info->bytes = 1 << scale;
1242 // post-indexed
1243 info->addr = ss->ss_64.x[Rn];
1244
1245 return true;
1246}
1247
1248// load/store register (immediate pre-indexed)
1249static bool get_info_c339(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1250{
1251 uint32_t size = BITS(inst, 31, 30);
1252 uint32_t V = BITS(inst, 26, 26);
1253 uint32_t opc = BITS(inst, 23, 22);
1254 uint32_t imm9 = BITS(inst, 20, 12);
1255 uint32_t Rn = BITS(inst, 9, 5);
1256 uint32_t fields = (size << 3) | (V << 2) | opc;
1257 uint8_t scale;
1258
1259 if (__builtin_expect((14 <= fields && fields <= 15) ||
1260 (19 == fields) ||
1261 (22 <= fields && fields <= 23) ||
1262 (26 <= fields && fields <= 27) ||
1263 (30 <= fields), false)) {
1264 CANNOTDECODE("invalid", inst);
1265 return false;
1266 }
1267
1268 if (V == 1) {
1269 scale = BITS(opc, 1, 1) << 2 | size;
1270 } else {
1271 scale = size;
1272 }
1273
1274 info->bytes = 1 << scale;
1275 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1276
1277 return true;
1278}
1279
1280// load/store register (register offset)
1281static bool get_info_c3310(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1282{
1283 uint32_t size = BITS(inst, 31, 30);
1284 uint32_t V = BITS(inst, 26, 26);
1285 uint32_t opc = BITS(inst, 23, 22);
1286 uint32_t Rm = BITS(inst, 20, 16);
1287 uint32_t option = BITS(inst, 15, 13);
1288 uint32_t S = BITS(inst, 12, 12);
1289 uint32_t Rn = BITS(inst, 9, 5);
1290 uint32_t fields = (size << 3) | (V << 2) | opc;
1291 uint32_t scale;
1292
1293 if (__builtin_expect((14 <= fields && fields <= 15) ||
1294 (19 == fields) ||
1295 (22 <= fields && fields <= 23) ||
1296 (27 == fields) ||
1297 (30 <= fields), false)) {
1298 CANNOTDECODE("invalid", inst);
1299 return false;
1300 }
1301
1302 if (V == 1) {
1303 scale = BITS(opc, 1, 1) | size;
1304 } else {
1305 scale = size;
1306 }
1307
1308 info->bytes = 1 << scale;
1309
1310 uint64_t m = ss->ss_64.x[Rm];
1311 uint8_t shift = (S == 1 ? scale : 0);
1312
1313 switch (option) {
1314 case 0 ... 3:
1315 info->addr = ss->ss_64.x[Rn] + (ZERO_EXTEND_64(m, 8 << option) << shift);
1316 break;
1317 case 4 ... 7:
1318 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(m, 8 << BITS(option, 1, 0)) << shift);
1319 break;
1320 default:
1321 CANNOTDECODE("invalid", inst);
1322 return false;
1323 }
1324
1325 return true;
1326}
1327
1328// load/store register (unprivileged)
1329static bool get_info_c3311(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1330{
1331 uint32_t size = BITS(inst, 31, 30);
1332 uint32_t V = BITS(inst, 26, 26);
1333 uint32_t opc = BITS(inst, 23, 22);
1334 uint32_t imm9 = BITS(inst, 20, 12);
1335 uint32_t Rn = BITS(inst, 9, 5);
1336 uint32_t fields = (size << 3) | (V << 2) | opc;
1337
1338 if (__builtin_expect((4 <= fields && fields <= 7) ||
1339 (12 <= fields && fields <= 15) ||
1340 (19 <= fields && fields <= 23) ||
1341 (26 <= fields), false)) {
1342 CANNOTDECODE("invalid", inst);
1343 return false;
1344 }
1345
1346 info->bytes = 1 << size;
1347 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1348
1349 return true;
1350}
1351
1352// load/store register (unscaled immediate)
1353static bool get_info_c3312(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1354{
1355 uint32_t size = BITS(inst, 31, 30);
1356 uint32_t V = BITS(inst, 26, 26);
1357 uint32_t opc = BITS(inst, 23, 22);
1358 uint32_t imm9 = BITS(inst, 20, 12);
1359 uint32_t Rn = BITS(inst, 9, 5);
1360 uint32_t fields = (size << 3) | (V << 2) | opc;
1361 uint32_t scale;
1362
1363 if (__builtin_expect((14 <= fields && fields <= 15) ||
1364 (19 == fields) ||
1365 (22 <= fields && fields <= 23) ||
1366 (27 == fields) ||
1367 (30 <= fields), false)) {
1368 CANNOTDECODE("invalid", inst);
1369 return false;
1370 }
1371
1372 if (V == 1) {
1373 scale = BITS(opc, 1, 1) << 2 | size;
1374 } else {
1375 scale = size;
1376 }
1377
1378 info->bytes = 1 < scale;
1379 info->addr = ss->ss_64.x[Rn] + SIGN_EXTEND_64(imm9, 9);
1380
1381 return true;
1382}
1383
1384// load/store register (unsigned immediate)
1385bool get_info_c3313(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1386{
1387 uint32_t size = BITS(inst, 31, 30);
1388 uint32_t V = BITS(inst, 26, 26);
1389 uint32_t opc = BITS(inst, 23, 22);
1390 uint32_t imm12 = BITS(inst, 21, 10);
1391 uint32_t Rn = BITS(inst, 9, 5);
1392 uint32_t fields = (size << 3) | (V << 2) | opc;
1393 uint32_t scale;
1394
1395 if (__builtin_expect((14 <= fields && fields <= 15) ||
1396 (19 == fields) ||
1397 (22 <= fields && fields <= 23) ||
1398 (27 == fields) ||
1399 (30 <= fields), false)) {
1400 CANNOTDECODE("invalid", inst);
1401 return false;
1402 }
1403
1404 if (V == 1) {
1405 scale = BITS(opc, 1, 1) << 2 | size;
1406 } else {
1407 scale = size;
1408 }
1409
1410 info->bytes = 1 << scale;
1411 info->addr = ss->ss_64.x[Rn] + (ZERO_EXTEND_64(imm12, 12) << scale);
1412
1413 return true;
1414}
1415
1416// load/store register pair (offset)
1417static bool get_info_c3314(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1418{
1419 uint32_t opc = BITS(inst, 31, 30);
1420 uint32_t V = BITS(inst, 26, 26);
1421 uint32_t L = BITS(inst, 22, 22);
1422 uint32_t imm7 = BITS(inst, 21, 15);
1423 uint32_t Rn = BITS(inst, 9, 5);
1424 uint32_t fields = (opc << 2) | (V << 1) | L;
1425 uint8_t scale = 2 + (opc >> 1);
1426
1427 if (__builtin_expect((4 == fields) ||
1428 (12 <= fields), false)) {
1429 CANNOTDECODE("invalid", inst);
1430 return false;
1431 }
1432
1433 if (V == 1) {
1434 scale = 2 + opc;
1435 } else {
1436 scale = 2 + BITS(opc, 1, 1);
1437 }
1438
1439 info->bytes = 2 * (1 << scale);
1440 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1441
1442 return true;
1443}
1444
1445// load/store register pair (post-indexed)
1446static bool get_info_c3315(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1447{
1448 uint32_t opc = BITS(inst, 31, 30);
1449 uint32_t V = BITS(inst, 26, 26);
1450 uint32_t L = BITS(inst, 22, 22);
1451 uint32_t Rn = BITS(inst, 9, 5);
1452 uint32_t fields = (opc << 2) | (V << 1) | L;
1453 uint8_t scale = 2 + (opc >> 1);
1454
1455 if (__builtin_expect((4 == fields) ||
1456 (12 <= fields), false)) {
1457 CANNOTDECODE("invalid", inst);
1458 return false;
1459 }
1460
1461 if (V == 1) {
1462 scale = 2 + opc;
1463 } else {
1464 scale = 2 + BITS(opc, 1, 1);
1465 }
1466
1467 info->bytes = 2 * (1 << scale);
1468 // post-indexed
1469 info->addr = ss->ss_64.x[Rn];
1470
1471 return true;
1472}
1473
1474// load/store register pair (pre-indexed)
1475static bool get_info_c3316(uint32_t inst, arm_saved_state_t *ss, instruction_info_t *info)
1476{
1477 uint32_t opc = BITS(inst, 31, 30);
1478 uint32_t V = BITS(inst, 26, 26);
1479 uint32_t L = BITS(inst, 22, 22);
1480 uint32_t imm7 = BITS(inst, 21, 15);
1481 uint32_t Rn = BITS(inst, 9, 5);
1482 uint32_t fields = (opc << 2) | (V << 1) | L;
1483 uint8_t scale = 2 + (opc >> 1);
1484
1485 if (__builtin_expect((4 == fields) ||
1486 (12 <= fields), false)) {
1487 CANNOTDECODE("invalid", inst);
1488 return false;
1489 }
1490
1491 if (V == 1) {
1492 scale = 2 + opc;
1493 } else {
1494 scale = 2 + BITS(opc, 1, 1);
1495 }
1496
1497 info->bytes = 2 * (1 << scale);
1498 info->addr = ss->ss_64.x[Rn] + (SIGN_EXTEND_64(imm7, 7) << scale);
1499
1500 return true;
1501}
1502
1503
1504//-------------------------------------------------------------------
1505// Globals
1506//
1507int pgtrace_decode_and_run(uint32_t inst, vm_offset_t fva, vm_map_offset_t *cva_page, arm_saved_state_t *ss, pgtrace_run_result_t *res)
1508{
1509 uint8_t len = sizeof(typetbl)/sizeof(type_entry_t);
1510 run_t run = NULL;
1511 get_info_t get_info = NULL;
1512 vm_offset_t pa, cva;
1513 vm_offset_t cva_front_page = cva_page[0];
1514 vm_offset_t cva_cur_page = cva_page[1];
1515 instruction_info_t info;
1516
1517 for (uint8_t i = 0; i < len; i++) {
1518 if ((typetbl[i].mask & inst) == typetbl[i].value) {
1519 run = typetbl[i].run;
1520 get_info = typetbl[i].get_info;
1521 break;
1522 }
1523 }
1524
1525 assert(run != NULL && get_info != NULL);
1526
1527 get_info(inst, ss, &info);
1528
1529 if (info.addr == fva) {
1530 cva = cva_cur_page + (fva & ARM_PGMASK);
1531 } else {
1532 // which means a front page is not a tracing page
1533 cva = cva_front_page + (fva & ARM_PGMASK);
1534 }
1535
1536 pa = mmu_kvtop(cva);
1537 if (!pa) {
1538 panic("%s: invalid address cva=%lx fva=%lx info.addr=%lx inst=%x", __func__, cva, fva, info.addr, inst);
1539 }
1540
1541 absolutetime_to_nanoseconds(mach_absolute_time(), &res->rr_time);
1542 run(inst, pa, cva, ss, res);
1543
1544 return 0;
1545}
1546
1547void pgtrace_decoder_get_stats(pgtrace_stats_t *s)
1548{
1549 memcpy((void *)&(s->stat_decoder), &(stats.stat_decoder), sizeof(stats.stat_decoder));
1550}
1551#endif