2 * Copyright (c) 2015 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
30 #include <kern/debug.h>
31 #include <kern/clock.h>
32 #include <pexpert/pexpert.h>
34 #include "pgtrace_decoder.h"
36 //-------------------------------------------------------------------
41 #define INLINE __attribute__((noinline))
46 #define BITS(v, msb, lsb) ((v) << (31-msb) >> (31-msb) >> (lsb))
47 #define READ_GPR_X(ss, n, v) { \
48 if (__builtin_expect(n < 31, 1)) (v) = (ss)->ss_64.x[(n)]; \
49 else if (n == 31) (v) = 0; \
50 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
52 #define READ_GPR_W(ss, n, v) { \
53 if (__builtin_expect(n < 31, 1)) (v) = *(uint32_t*)&((ss)->ss_64.x[(n)]); \
54 else if (n == 31) (v) = 0; \
55 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
57 #define WRITE_GPR_X(ss, n, v) { \
58 if (__builtin_expect(n < 31, 1)) (ss)->ss_64.x[(n)] = (v); \
59 else if (n == 31) {} \
60 else { panic("Invalid GPR x%d", n); __builtin_unreachable(); } \
62 #define WRITE_GPR_W(ss, n, v) { \
63 if (__builtin_expect(n < 31, 1)) *(uint32_t*)&((ss)->ss_64.x[(n)]) = (v); \
64 else if (n == 31) {} \
65 else { panic("Invalid GPR w%d", n); __builtin_unreachable(); } \
67 #define SIGN_EXTEND_64(val, width) (((int64_t)(val) << (64 - (width)) >> (64 - (width))))
68 #define ZERO_EXTEND_64(val, width) (((uint64_t)(val) << (64 - (width))) >> (64 - (width)))
70 //-------------------------------------------------------------------
73 typedef int (*run_t
)(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
80 typedef bool (*get_info_t
)(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
89 //-------------------------------------------------------------------
92 static int run_simd(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
93 static int run_c335(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
94 static int run_c336(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
95 static int run_c337(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
96 static int run_c338(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
97 static int run_c339(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
98 static int run_c3310(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
99 static int run_c3311(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
100 static int run_c3312(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
101 static int run_c3313(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
102 static int run_c3314(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
103 static int run_c3315(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
104 static int run_c3316(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
);
105 static bool get_info_simd(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
106 static bool get_info_c335(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
107 static bool get_info_c336(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
108 static bool get_info_c337(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
109 static bool get_info_c338(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
110 static bool get_info_c339(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
111 static bool get_info_c3310(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
112 static bool get_info_c3311(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
113 static bool get_info_c3312(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
114 static bool get_info_c3313(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
115 static bool get_info_c3314(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
116 static bool get_info_c3315(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
117 static bool get_info_c3316(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
);
119 // Table from ARM DDI 0487A.a C3.3
120 static type_entry_t typetbl
[] = {
121 { 0x3f000000, 0x08000000, run_c336
, get_info_c336
}, // Load/store exclusive
122 { 0x3b000000, 0x18000000, run_c335
, get_info_c335
}, // Load register (literal)
123 { 0x3b800000, 0x28000000, run_c337
, get_info_c337
}, // Load/store no-allocate pair (offset)
124 { 0x3b800000, 0x28800000, run_c3315
, get_info_c3315
}, // Load/store register pair (post-indexed)
125 { 0x3b800000, 0x29000000, run_c3314
, get_info_c3314
}, // Load/store register pair (offset)
126 { 0x3b800000, 0x29800000, run_c3316
, get_info_c3316
}, // Load/store register pair (pre-indexed)
127 { 0x3b200c00, 0x38000000, run_c3312
, get_info_c3312
}, // Load/store register (unscaled immediate)
128 { 0x3b200c00, 0x38000400, run_c338
, get_info_c338
}, // Load/store register (immediate post-indexed)
129 { 0x3b200c00, 0x38000800, run_c3311
, get_info_c3311
}, // Load/store register (unprivileged)
130 { 0x3b200c00, 0x38000c00, run_c339
, get_info_c339
}, // Load/store register (immediate pre-indexed)
131 { 0x3b200c00, 0x38200800, run_c3310
, get_info_c3310
}, // Load/store register (register offset)
132 { 0x3b000000, 0x39000000, run_c3313
, get_info_c3313
}, // Load/store register (unsigned immediate)
134 { 0xbfbf0000, 0x0c000000, run_simd
, get_info_simd
}, // AdvSIMD load/store multiple structures
135 { 0xbfa00000, 0x0c800000, run_simd
, get_info_simd
}, // AdvSIMD load/store multiple structures (post-indexed)
136 { 0xbf980000, 0x0d000000, run_simd
, get_info_simd
}, // AdvSIMD load/store single structure
137 { 0xbf800000, 0x0d800000, run_simd
, get_info_simd
} // AdvSIMD load/store single structure (post-indexed)
140 static pgtrace_stats_t stats
;
142 INLINE
static void do_str(uint8_t size
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
147 res
->rr_rw
= PGTRACE_RW_STORE
;
150 READ_GPR_X(ss
, Rt
, xt
);
151 res
->rr_addrdata
[0].ad_data
= xt
;
153 READ_GPR_W(ss
, Rt
, wt
);
154 res
->rr_addrdata
[0].ad_data
= wt
;
157 if (size
== 1) __asm__
volatile("strb %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
158 else if (size
== 2) __asm__
volatile("strh %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
159 else if (size
== 4) __asm__
volatile("str %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
160 else if (size
== 8) __asm__
volatile("str %x[xt], [%[va]]\n" :: [xt
] "r"(xt
), [va
] "r"(va
));
161 else panic("%s Invalid size %d\n", __func__
, size
);
163 stats
.stat_decoder
.sd_str
++;
166 INLINE
static void do_ldr(uint8_t size
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
171 res
->rr_rw
= PGTRACE_RW_LOAD
;
173 if (size
== 1) __asm__
volatile("ldrb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
174 else if (size
== 2) __asm__
volatile("ldrh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
175 else if (size
== 4) __asm__
volatile("ldr %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
176 else if (size
== 8) __asm__
volatile("ldr %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
177 else panic("%s Invalid size %d\n", __func__
, size
);
180 WRITE_GPR_X(ss
, Rt
, xt
);
181 res
->rr_addrdata
[0].ad_data
= xt
;
183 WRITE_GPR_W(ss
, Rt
, wt
);
184 res
->rr_addrdata
[0].ad_data
= wt
;
187 stats
.stat_decoder
.sd_ldr
++;
190 INLINE
static void do_stp(uint8_t size
, uint8_t Rt
, uint8_t Rt2
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
196 READ_GPR_W(ss
, Rt
, wt1
);
197 READ_GPR_W(ss
, Rt2
, wt2
);
198 __asm__
volatile("stp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1
] "r"(wt1
), [wt2
] "r"(wt2
), [va
] "r"(va
));
199 res
->rr_rw
= PGTRACE_RW_STORE
;
200 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
201 res
->rr_addrdata
[0].ad_data
= wt1
;
202 res
->rr_addrdata
[1].ad_data
= wt2
;
203 } else if (size
== 8) {
204 READ_GPR_X(ss
, Rt
, xt1
);
205 READ_GPR_X(ss
, Rt2
, xt2
);
206 __asm__
volatile("stp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1
] "r"(xt1
), [xt2
] "r"(xt2
), [va
] "r"(va
));
207 res
->rr_rw
= PGTRACE_RW_STORE
;
208 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
209 res
->rr_addrdata
[0].ad_data
= xt1
;
210 res
->rr_addrdata
[1].ad_data
= xt2
;
211 } else panic("%s Invalid size %d\n", __func__
, size
);
213 stats
.stat_decoder
.sd_stp
++;
216 INLINE
static void do_ldp(uint8_t size
, uint8_t Rt
, uint8_t Rt2
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
222 __asm__
volatile("ldp %w[wt1], %w[wt2], [%[va]]\n" : [wt1
] "=r"(wt1
), [wt2
] "=r"(wt2
) : [va
] "r"(va
));
223 WRITE_GPR_W(ss
, Rt
, wt1
);
224 WRITE_GPR_W(ss
, Rt2
, wt2
);
225 res
->rr_rw
= PGTRACE_RW_STORE
;
226 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
227 res
->rr_addrdata
[0].ad_data
= wt1
;
228 res
->rr_addrdata
[1].ad_data
= wt2
;
229 } else if (size
== 8) {
230 __asm__
volatile("ldp %x[xt1], %x[xt2], [%[va]]\n" : [xt1
] "=r"(xt1
), [xt2
] "=r"(xt2
) : [va
] "r"(va
));
231 WRITE_GPR_X(ss
, Rt
, xt1
);
232 WRITE_GPR_X(ss
, Rt2
, xt2
);
233 res
->rr_rw
= PGTRACE_RW_STORE
;
234 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
235 res
->rr_addrdata
[0].ad_data
= xt1
;
236 res
->rr_addrdata
[1].ad_data
= xt2
;
237 } else panic("%s Invalid size %d\n", __func__
, size
);
239 stats
.stat_decoder
.sd_ldp
++;
242 INLINE
static void do_ldpsw(uint8_t Rt
, uint8_t Rt2
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
246 __asm__
volatile("ldpsw %x[xt1], %x[xt2], [%[va]]\n" : [xt1
] "=r"(xt1
), [xt2
] "=r"(xt2
) : [va
] "r"(va
));
247 WRITE_GPR_X(ss
, Rt
, xt1
);
248 WRITE_GPR_X(ss
, Rt2
, xt2
);
249 res
->rr_rw
= PGTRACE_RW_LOAD
;
250 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(uint32_t);
251 res
->rr_addrdata
[0].ad_data
= xt1
;
252 res
->rr_addrdata
[1].ad_data
= xt2
;
254 stats
.stat_decoder
.sd_ldpsw
++;
257 INLINE
static void do_ldrs(uint8_t size
, uint8_t extsize
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
262 res
->rr_rw
= PGTRACE_RW_LOAD
;
264 if (size
== 1 && extsize
== 4) __asm__
volatile("ldrsb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
265 else if (size
== 1 && extsize
== 8) __asm__
volatile("ldrsb %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
266 else if (size
== 2 && extsize
== 4) __asm__
volatile("ldrsh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
267 else if (size
== 2 && extsize
== 8) __asm__
volatile("ldrsh %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
268 else if (size
== 4 && extsize
== 8) __asm__
volatile("ldrsw %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
269 else panic("%s Invalid size %d extsize=%d\n", __func__
, size
, extsize
);
272 WRITE_GPR_X(ss
, Rt
, xt
);
273 res
->rr_addrdata
[0].ad_data
= xt
;
275 WRITE_GPR_W(ss
, Rt
, wt
);
276 res
->rr_addrdata
[0].ad_data
= wt
;
279 stats
.stat_decoder
.sd_ldrs
++;
282 INLINE
static void do_ldtrs(uint8_t size
, uint8_t extsize
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
287 res
->rr_rw
= PGTRACE_RW_LOAD
;
289 if (size
== 1 && extsize
== 4) __asm__
volatile("ldtrsb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
290 else if (size
== 1 && extsize
== 8) __asm__
volatile("ldtrsb %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
291 else if (size
== 2 && extsize
== 4) __asm__
volatile("ldtrsh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
292 else if (size
== 2 && extsize
== 8) __asm__
volatile("ldtrsh %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
293 else if (size
== 4 && extsize
== 8) __asm__
volatile("ldtrsw %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
294 else panic("%s Invalid size %d extsize=%d\n", __func__
, size
, extsize
);
297 WRITE_GPR_X(ss
, Rt
, xt
);
298 res
->rr_addrdata
[0].ad_data
= xt
;
300 WRITE_GPR_W(ss
, Rt
, wt
);
301 res
->rr_addrdata
[0].ad_data
= wt
;
304 stats
.stat_decoder
.sd_ldtrs
++;
307 INLINE
static void do_ldtr(uint8_t size
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
312 res
->rr_rw
= PGTRACE_RW_LOAD
;
314 if (size
== 1) __asm__
volatile("ldtrb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
315 else if (size
== 2) __asm__
volatile("ldtrh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
316 else if (size
== 4) __asm__
volatile("ldtr %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
317 else if (size
== 8) __asm__
volatile("ldtr %x[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
318 else panic("%s Invalid size %d\n", __func__
, size
);
321 WRITE_GPR_X(ss
, Rt
, xt
);
322 res
->rr_addrdata
[0].ad_data
= xt
;
324 WRITE_GPR_W(ss
, Rt
, wt
);
325 res
->rr_addrdata
[0].ad_data
= wt
;
328 stats
.stat_decoder
.sd_ldtr
++;
331 INLINE
static void do_sttr(uint8_t size
, uint8_t Rt
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
336 res
->rr_rw
= PGTRACE_RW_STORE
;
339 READ_GPR_X(ss
, Rt
, xt
);
340 res
->rr_addrdata
[0].ad_data
= xt
;
342 READ_GPR_W(ss
, Rt
, wt
);
343 res
->rr_addrdata
[0].ad_data
= wt
;
346 if (size
== 1) __asm__
volatile("sttrb %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
347 else if (size
== 2) __asm__
volatile("sttrh %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
348 else if (size
== 4) __asm__
volatile("sttr %w[wt], [%[va]]\n" :: [wt
] "r"(wt
), [va
] "r"(va
));
349 else if (size
== 8) __asm__
volatile("sttr %x[xt], [%[va]]\n" :: [xt
] "r"(xt
), [va
] "r"(va
));
350 else panic("%s Invalid size %d\n", __func__
, size
);
352 stats
.stat_decoder
.sd_sttr
++;
355 INLINE
static void do_prfm(uint8_t Rt
, vm_offset_t va
, pgtrace_run_result_t
*res
)
357 if (Rt
== 0) __asm__
volatile("prfm pldl1keep, [%[va]]\n" : : [va
] "r"(va
));
358 else if (Rt
== 1) __asm__
volatile("prfm pldl1strm, [%[va]]\n" : : [va
] "r"(va
));
359 else if (Rt
== 2) __asm__
volatile("prfm pldl2keep, [%[va]]\n" : : [va
] "r"(va
));
360 else if (Rt
== 3) __asm__
volatile("prfm pldl2strm, [%[va]]\n" : : [va
] "r"(va
));
361 else if (Rt
== 4) __asm__
volatile("prfm pldl3keep, [%[va]]\n" : : [va
] "r"(va
));
362 else if (Rt
== 5) __asm__
volatile("prfm pldl3strm, [%[va]]\n" : : [va
] "r"(va
));
363 else if (Rt
== 6) __asm__
volatile("prfm #6, [%[va]]\n" : : [va
] "r"(va
));
364 else if (Rt
== 7) __asm__
volatile("prfm #7, [%[va]]\n" : : [va
] "r"(va
));
365 else if (Rt
== 8) __asm__
volatile("prfm #8, [%[va]]\n" : : [va
] "r"(va
));
366 else if (Rt
== 9) __asm__
volatile("prfm #9, [%[va]]\n" : : [va
] "r"(va
));
367 else if (Rt
== 10) __asm__
volatile("prfm #10, [%[va]]\n" : : [va
] "r"(va
));
368 else if (Rt
== 11) __asm__
volatile("prfm #11, [%[va]]\n" : : [va
] "r"(va
));
369 else if (Rt
== 12) __asm__
volatile("prfm #12, [%[va]]\n" : : [va
] "r"(va
));
370 else if (Rt
== 13) __asm__
volatile("prfm #13, [%[va]]\n" : : [va
] "r"(va
));
371 else if (Rt
== 14) __asm__
volatile("prfm #14, [%[va]]\n" : : [va
] "r"(va
));
372 else if (Rt
== 15) __asm__
volatile("prfm #15, [%[va]]\n" : : [va
] "r"(va
));
373 else if (Rt
== 16) __asm__
volatile("prfm pstl1keep, [%[va]]\n" : : [va
] "r"(va
));
374 else if (Rt
== 17) __asm__
volatile("prfm pstl1strm, [%[va]]\n" : : [va
] "r"(va
));
375 else if (Rt
== 18) __asm__
volatile("prfm pstl2keep, [%[va]]\n" : : [va
] "r"(va
));
376 else if (Rt
== 19) __asm__
volatile("prfm pstl2strm, [%[va]]\n" : : [va
] "r"(va
));
377 else if (Rt
== 20) __asm__
volatile("prfm pstl3keep, [%[va]]\n" : : [va
] "r"(va
));
378 else if (Rt
== 21) __asm__
volatile("prfm pstl3strm, [%[va]]\n" : : [va
] "r"(va
));
379 else if (Rt
== 22) __asm__
volatile("prfm #22, [%[va]]\n" : : [va
] "r"(va
));
380 else if (Rt
== 23) __asm__
volatile("prfm #23, [%[va]]\n" : : [va
] "r"(va
));
381 else if (Rt
== 24) __asm__
volatile("prfm #24, [%[va]]\n" : : [va
] "r"(va
));
382 else if (Rt
== 25) __asm__
volatile("prfm #25, [%[va]]\n" : : [va
] "r"(va
));
383 else if (Rt
== 26) __asm__
volatile("prfm #26, [%[va]]\n" : : [va
] "r"(va
));
384 else if (Rt
== 27) __asm__
volatile("prfm #27, [%[va]]\n" : : [va
] "r"(va
));
385 else if (Rt
== 28) __asm__
volatile("prfm #28, [%[va]]\n" : : [va
] "r"(va
));
386 else if (Rt
== 29) __asm__
volatile("prfm #29, [%[va]]\n" : : [va
] "r"(va
));
387 else if (Rt
== 30) __asm__
volatile("prfm #30, [%[va]]\n" : : [va
] "r"(va
));
388 else if (Rt
== 31) __asm__
volatile("prfm #31, [%[va]]\n" : : [va
] "r"(va
));
389 else panic("%s Invalid Rt %d\n", __func__
, Rt
);
392 res
->rr_rw
= PGTRACE_RW_PREFETCH
;
394 stats
.stat_decoder
.sd_prfm
++;
397 #define CANNOTDECODE(msg, inst) do {\
398 panic("%s: " msg " inst=%x not supported yet\n", __func__, inst);\
401 static int run_simd(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
403 #pragma unused(pa,va,ss,res)
404 CANNOTDECODE("simd", inst
);
408 static int run_c335(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
410 uint32_t opc
= BITS(inst
, 31, 30),
411 v
= BITS(inst
, 26, 26),
412 Rt
= BITS(inst
, 4, 0);
413 uint8_t fields
= (opc
<< 1) | v
;
416 res
->rr_addrdata
[0].ad_addr
= pa
;
418 if (fields
== 0) do_ldr(4, Rt
, va
, ss
, res
);
419 else if ((fields
== 1) ||
421 (fields
== 5)) CANNOTDECODE("simd", inst
);
422 else if (fields
== 2) do_ldr(8, Rt
, va
, ss
, res
);
423 else if (fields
== 4) do_ldrs(4, 8, Rt
, va
, ss
, res
);
424 else if (fields
== 6) do_prfm(Rt
, va
, res
);
425 else CANNOTDECODE("unknown", inst
);
427 stats
.stat_decoder
.sd_c335
++;
432 static int run_c336(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
434 uint32_t ws
, wt
, wt1
, wt2
;
435 uint64_t xt
, xt1
, xt2
;
436 uint32_t size
= BITS(inst
, 31, 30),
437 o2
= BITS(inst
, 23, 23),
438 L
= BITS(inst
, 22, 22),
439 o1
= BITS(inst
, 21, 21),
440 Rs
= BITS(inst
, 20, 16),
441 o0
= BITS(inst
, 15, 15),
442 Rt2
= BITS(inst
, 14, 10),
443 Rt
= BITS(inst
, 4, 0);
444 uint8_t fields
= (size
<< 4) | (o2
<< 3) | (L
<< 2) | (o1
<< 1) | o0
;
446 kprintf("%s Load/store exclusive on device memory???n", __func__
);
449 res
->rr_addrdata
[0].ad_addr
= pa
;
453 READ_GPR_W(ss
, Rt
, wt
);
454 __asm__
volatile("stxrb %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
455 WRITE_GPR_W(ss
, Rs
, ws
);
456 res
->rr_rw
= PGTRACE_RW_STORE
;
457 res
->rr_addrdata
[0].ad_data
= wt
;
460 READ_GPR_W(ss
, Rt
, wt
);
461 __asm__
volatile("stlxrb %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
462 WRITE_GPR_W(ss
, Rs
, ws
);
463 res
->rr_rw
= PGTRACE_RW_STORE
;
464 res
->rr_addrdata
[0].ad_data
= wt
;
467 __asm__
volatile("ldxrb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
468 WRITE_GPR_W(ss
, Rt
, wt
);
469 res
->rr_rw
= PGTRACE_RW_LOAD
;
470 res
->rr_addrdata
[0].ad_data
= wt
;
473 __asm__
volatile("ldaxrb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
474 WRITE_GPR_W(ss
, Rt
, wt
);
475 res
->rr_rw
= PGTRACE_RW_LOAD
;
476 res
->rr_addrdata
[0].ad_data
= wt
;
479 READ_GPR_W(ss
, Rt
, wt
);
480 __asm__
volatile("stlrb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
481 res
->rr_rw
= PGTRACE_RW_STORE
;
482 res
->rr_addrdata
[0].ad_data
= wt
;
485 __asm__
volatile("ldarb %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
486 WRITE_GPR_W(ss
, Rt
, wt
);
487 res
->rr_rw
= PGTRACE_RW_LOAD
;
488 res
->rr_addrdata
[0].ad_data
= wt
;
491 READ_GPR_W(ss
, Rt
, wt
);
492 __asm__
volatile("stxrh %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
493 WRITE_GPR_W(ss
, Rs
, ws
);
494 res
->rr_rw
= PGTRACE_RW_STORE
;
495 res
->rr_addrdata
[0].ad_data
= wt
;
498 READ_GPR_W(ss
, Rt
, wt
);
499 __asm__
volatile("stlxrh %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
500 WRITE_GPR_W(ss
, Rs
, ws
);
501 res
->rr_rw
= PGTRACE_RW_STORE
;
502 res
->rr_addrdata
[0].ad_data
= wt
;
505 __asm__
volatile("ldxrh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
506 WRITE_GPR_W(ss
, Rt
, wt
);
507 res
->rr_rw
= PGTRACE_RW_LOAD
;
508 res
->rr_addrdata
[0].ad_data
= wt
;
511 __asm__
volatile("ldaxrh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
512 WRITE_GPR_W(ss
, Rt
, wt
);
513 res
->rr_rw
= PGTRACE_RW_LOAD
;
514 res
->rr_addrdata
[0].ad_data
= wt
;
517 READ_GPR_W(ss
, Rt
, wt
);
518 __asm__
volatile("stlrh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
519 res
->rr_rw
= PGTRACE_RW_STORE
;
520 res
->rr_addrdata
[0].ad_data
= wt
;
523 __asm__
volatile("ldarh %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
524 WRITE_GPR_W(ss
, Rt
, wt
);
525 res
->rr_rw
= PGTRACE_RW_LOAD
;
526 res
->rr_addrdata
[0].ad_data
= wt
;
529 READ_GPR_W(ss
, Rt
, wt
);
530 __asm__
volatile("stxr %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
531 WRITE_GPR_W(ss
, Rs
, ws
);
532 res
->rr_rw
= PGTRACE_RW_STORE
;
533 res
->rr_addrdata
[0].ad_data
= wt
;
536 READ_GPR_W(ss
, Rt
, wt
);
537 __asm__
volatile("stlxr %w[ws], %w[wt], [%[va]]\n" : [ws
] "=r"(ws
) : [wt
] "r"(wt
), [va
] "r"(va
));
538 WRITE_GPR_W(ss
, Rs
, ws
);
539 res
->rr_rw
= PGTRACE_RW_STORE
;
540 res
->rr_addrdata
[0].ad_data
= wt
;
543 READ_GPR_W(ss
, Rt
, wt1
);
544 READ_GPR_W(ss
, Rt2
, wt2
);
545 __asm__
volatile("stxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws
] "=r"(ws
) : [wt1
] "r"(wt1
), [wt2
] "r"(wt2
), [va
] "r"(va
));
546 WRITE_GPR_W(ss
, Rs
, ws
);
547 res
->rr_rw
= PGTRACE_RW_STORE
;
549 res
->rr_addrdata
[0].ad_addr
= va
;
550 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
551 res
->rr_addrdata
[0].ad_data
= wt1
;
552 res
->rr_addrdata
[1].ad_data
= wt2
;
555 READ_GPR_W(ss
, Rt
, wt1
);
556 READ_GPR_W(ss
, Rt2
, wt2
);
557 __asm__
volatile("stlxp %w[ws], %w[wt1], %w[wt2], [%[va]]\n" : [ws
] "=r"(ws
) : [wt1
] "r"(wt1
), [wt2
] "r"(wt2
), [va
] "r"(va
));
558 WRITE_GPR_W(ss
, Rs
, ws
);
559 res
->rr_rw
= PGTRACE_RW_STORE
;
561 res
->rr_addrdata
[0].ad_addr
= va
;
562 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
563 res
->rr_addrdata
[0].ad_data
= wt1
;
564 res
->rr_addrdata
[1].ad_data
= wt2
;
567 __asm__
volatile("ldxr %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
568 WRITE_GPR_W(ss
, Rt
, wt
);
569 res
->rr_rw
= PGTRACE_RW_LOAD
;
570 res
->rr_addrdata
[0].ad_data
= wt
;
573 __asm__
volatile("ldaxr %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
574 WRITE_GPR_W(ss
, Rt
, wt
);
575 res
->rr_rw
= PGTRACE_RW_LOAD
;
576 res
->rr_addrdata
[0].ad_data
= wt
;
579 __asm__
volatile("ldxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1
] "=r"(wt1
), [wt2
] "=r"(wt2
) : [va
] "r"(va
));
580 WRITE_GPR_W(ss
, Rt
, wt1
);
581 WRITE_GPR_W(ss
, Rt2
, wt2
);
582 res
->rr_rw
= PGTRACE_RW_LOAD
;
584 res
->rr_addrdata
[0].ad_addr
= va
;
585 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
586 res
->rr_addrdata
[0].ad_data
= wt1
;
587 res
->rr_addrdata
[1].ad_data
= wt2
;
590 __asm__
volatile("ldaxp %w[wt1], %w[wt2], [%[va]]\n" : [wt1
] "=r"(wt1
), [wt2
] "=r"(wt2
) : [va
] "r"(va
));
591 WRITE_GPR_W(ss
, Rt
, wt1
);
592 WRITE_GPR_W(ss
, Rt2
, wt2
);
593 res
->rr_rw
= PGTRACE_RW_LOAD
;
595 res
->rr_addrdata
[0].ad_addr
= va
;
596 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(wt1
);
597 res
->rr_addrdata
[0].ad_data
= wt1
;
598 res
->rr_addrdata
[1].ad_data
= wt2
;
601 READ_GPR_W(ss
, Rt
, wt
);
602 __asm__
volatile("stlr %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
603 res
->rr_rw
= PGTRACE_RW_STORE
;
604 res
->rr_addrdata
[0].ad_data
= wt
;
607 __asm__
volatile("ldar %w[wt], [%[va]]\n" : [wt
] "=r"(wt
) : [va
] "r"(va
));
608 WRITE_GPR_W(ss
, Rt
, wt
);
609 res
->rr_rw
= PGTRACE_RW_LOAD
;
610 res
->rr_addrdata
[0].ad_data
= wt
;
613 READ_GPR_X(ss
, Rt
, xt
);
614 __asm__
volatile("stxr %w[ws], %[xt], [%[va]]\n" : [ws
] "=r"(ws
) : [xt
] "r"(xt
), [va
] "r"(va
));
615 WRITE_GPR_W(ss
, Rs
, ws
);
616 res
->rr_rw
= PGTRACE_RW_STORE
;
617 res
->rr_addrdata
[0].ad_data
= xt
;
620 READ_GPR_X(ss
, Rt
, xt
);
621 __asm__
volatile("stlxr %w[ws], %[xt], [%[va]]\n" : [ws
] "=r"(ws
) : [xt
] "r"(xt
), [va
] "r"(va
));
622 WRITE_GPR_W(ss
, Rs
, ws
);
623 res
->rr_rw
= PGTRACE_RW_STORE
;
624 res
->rr_addrdata
[0].ad_data
= xt
;
627 READ_GPR_X(ss
, Rt
, xt1
);
628 READ_GPR_X(ss
, Rt2
, xt2
);
629 __asm__
volatile("stxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws
] "=r"(ws
) : [xt1
] "r"(xt1
), [xt2
] "r"(xt2
), [va
] "r"(va
));
630 WRITE_GPR_W(ss
, Rs
, ws
);
631 res
->rr_rw
= PGTRACE_RW_STORE
;
633 res
->rr_addrdata
[0].ad_addr
= va
;
634 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
635 res
->rr_addrdata
[0].ad_data
= xt1
;
636 res
->rr_addrdata
[1].ad_data
= xt2
;
639 READ_GPR_X(ss
, Rt
, xt1
);
640 READ_GPR_X(ss
, Rt2
, xt2
);
641 __asm__
volatile("stlxp %w[ws], %[xt1], %[xt2], [%[va]]\n" : [ws
] "=r"(ws
) : [xt1
] "r"(xt1
), [xt2
] "r"(xt2
), [va
] "r"(va
));
642 WRITE_GPR_W(ss
, Rs
, ws
);
643 res
->rr_rw
= PGTRACE_RW_STORE
;
645 res
->rr_addrdata
[0].ad_addr
= va
;
646 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
647 res
->rr_addrdata
[0].ad_data
= xt1
;
648 res
->rr_addrdata
[1].ad_data
= xt2
;
651 __asm__
volatile("ldxr %[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
652 WRITE_GPR_X(ss
, Rt
, xt
);
653 res
->rr_rw
= PGTRACE_RW_LOAD
;
654 res
->rr_addrdata
[0].ad_data
= xt
;
657 __asm__
volatile("ldaxr %[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
658 WRITE_GPR_X(ss
, Rt
, xt
);
659 res
->rr_rw
= PGTRACE_RW_LOAD
;
660 res
->rr_addrdata
[0].ad_data
= xt
;
663 __asm__
volatile("ldxp %[xt1], %[xt2], [%[va]]\n" : [xt1
] "=r"(xt1
), [xt2
] "=r"(xt2
) : [va
] "r"(va
));
664 WRITE_GPR_X(ss
, Rt
, xt1
);
665 WRITE_GPR_X(ss
, Rt2
, xt2
);
666 res
->rr_rw
= PGTRACE_RW_LOAD
;
668 res
->rr_addrdata
[0].ad_addr
= va
;
669 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
670 res
->rr_addrdata
[0].ad_data
= xt1
;
671 res
->rr_addrdata
[0].ad_data
= xt2
;
674 __asm__
volatile("ldaxp %[xt1], %[xt2], [%[va]]\n" : [xt1
] "=r"(xt1
), [xt2
] "=r"(xt2
) : [va
] "r"(va
));
675 WRITE_GPR_X(ss
, Rt
, xt1
);
676 WRITE_GPR_X(ss
, Rt2
, xt2
);
677 res
->rr_rw
= PGTRACE_RW_LOAD
;
679 res
->rr_addrdata
[0].ad_addr
= va
;
680 res
->rr_addrdata
[1].ad_addr
= va
+sizeof(xt1
);
681 res
->rr_addrdata
[0].ad_data
= xt1
;
682 res
->rr_addrdata
[0].ad_data
= xt2
;
685 READ_GPR_X(ss
, Rt
, xt
);
686 __asm__
volatile("stlr %[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
687 res
->rr_rw
= PGTRACE_RW_STORE
;
688 res
->rr_addrdata
[0].ad_data
= xt
;
691 __asm__
volatile("ldar %[xt], [%[va]]\n" : [xt
] "=r"(xt
) : [va
] "r"(va
));
692 WRITE_GPR_X(ss
, Rt
, xt
);
693 res
->rr_rw
= PGTRACE_RW_LOAD
;
694 res
->rr_addrdata
[0].ad_data
= xt
;
697 CANNOTDECODE("unknown", inst
);
700 stats
.stat_decoder
.sd_c336
++;
705 static int run_c337(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
709 uint32_t opc
= BITS(inst
, 31, 30),
710 V
= BITS(inst
, 26, 26),
711 L
= BITS(inst
, 22, 22),
712 Rt
= BITS(inst
, 4, 0),
713 Rt2
= BITS(inst
, 14, 10);
714 uint8_t fields
= (opc
<< 2) | (V
<< 1) | L
;
718 READ_GPR_W(ss
, Rt
, wt1
);
719 READ_GPR_W(ss
, Rt2
, wt2
);
720 __asm__
volatile("stnp %w[wt1], %w[wt2], [%[va]]\n" :: [wt1
] "r"(wt1
), [wt2
] "r"(wt2
), [va
] "r"(va
));
721 res
->rr_rw
= PGTRACE_RW_STORE
;
723 res
->rr_addrdata
[0].ad_addr
= pa
;
724 res
->rr_addrdata
[1].ad_addr
= pa
+sizeof(wt1
);
725 res
->rr_addrdata
[0].ad_data
= wt1
;
726 res
->rr_addrdata
[1].ad_data
= wt2
;
729 __asm__
volatile("ldnp %w[wt1], %w[wt2], [%[va]]\n" : [wt1
] "=r"(wt1
), [wt2
] "=r"(wt2
) : [va
] "r"(va
));
730 WRITE_GPR_W(ss
, Rt
, wt1
);
731 WRITE_GPR_W(ss
, Rt2
, wt2
);
732 res
->rr_rw
= PGTRACE_RW_STORE
;
734 res
->rr_addrdata
[0].ad_addr
= pa
;
735 res
->rr_addrdata
[1].ad_addr
= pa
+sizeof(wt1
);
736 res
->rr_addrdata
[0].ad_data
= wt1
;
737 res
->rr_addrdata
[1].ad_data
= wt2
;
745 CANNOTDECODE("simd", inst
);
747 READ_GPR_X(ss
, Rt
, xt1
);
748 READ_GPR_X(ss
, Rt2
, xt2
);
749 __asm__
volatile("stnp %x[xt1], %x[xt2], [%[va]]\n" :: [xt1
] "r"(xt1
), [xt2
] "r"(xt2
), [va
] "r"(va
));
750 res
->rr_rw
= PGTRACE_RW_STORE
;
752 res
->rr_addrdata
[0].ad_addr
= pa
;
753 res
->rr_addrdata
[1].ad_addr
= pa
+sizeof(xt1
);
754 res
->rr_addrdata
[0].ad_data
= xt1
;
755 res
->rr_addrdata
[1].ad_data
= xt2
;
758 __asm__
volatile("ldnp %x[xt1], %x[xt2], [%[va]]\n" : [xt1
] "=r"(xt1
), [xt2
] "=r"(xt2
) : [va
] "r"(va
));
759 WRITE_GPR_X(ss
, Rt
, xt1
);
760 WRITE_GPR_X(ss
, Rt2
, xt2
);
761 res
->rr_rw
= PGTRACE_RW_STORE
;
763 res
->rr_addrdata
[0].ad_addr
= pa
;
764 res
->rr_addrdata
[1].ad_addr
= pa
+sizeof(xt1
);
765 res
->rr_addrdata
[0].ad_data
= xt1
;
766 res
->rr_addrdata
[1].ad_data
= xt2
;
769 CANNOTDECODE("simd", inst
);
772 stats
.stat_decoder
.sd_c337
++;
777 static int run_c338(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
779 uint32_t size
= BITS(inst
, 31, 30),
780 V
= BITS(inst
, 26, 26),
781 opc
= BITS(inst
, 23, 22),
782 Rt
= BITS(inst
, 4, 0);
783 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
786 res
->rr_addrdata
[0].ad_addr
= pa
;
788 if (fields
== 0) do_str(1, Rt
, va
, ss
, res
);
789 else if (fields
== 1) do_ldr(1, Rt
, va
, ss
, res
);
790 else if (fields
== 2) do_ldrs(1, 8, Rt
, va
, ss
, res
);
791 else if (fields
== 3) do_ldrs(1, 4, Rt
, va
, ss
, res
);
792 else if ((fields
== 4) ||
801 (fields
== 0x1d)) CANNOTDECODE("simd", inst
);
802 else if (fields
== 8) do_str(2, Rt
, va
, ss
, res
);
803 else if (fields
== 9) do_ldr(2, Rt
, va
, ss
, res
);
804 else if (fields
== 10) do_ldrs(2, 8, Rt
, va
, ss
, res
);
805 else if (fields
== 11) do_ldrs(2, 4, Rt
, va
, ss
, res
);
806 else if (fields
== 0x10) do_str(4, Rt
, va
, ss
, res
);
807 else if (fields
== 0x11) do_ldr(4, Rt
, va
, ss
, res
);
808 else if (fields
== 0x12) do_ldrs(4, 8, Rt
, va
, ss
, res
);
809 else if (fields
== 0x18) do_str(8, Rt
, va
, ss
, res
);
810 else if (fields
== 0x19) do_ldr(8, Rt
, va
, ss
, res
);
811 else CANNOTDECODE("unknown", inst
);
813 stats
.stat_decoder
.sd_c338
++;
818 static int run_c339(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
820 uint32_t size
= BITS(inst
, 31, 30),
821 V
= BITS(inst
, 26, 26),
822 opc
= BITS(inst
, 23, 22),
823 Rt
= BITS(inst
, 4, 0);
824 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
827 res
->rr_addrdata
[0].ad_addr
= pa
;
829 if (fields
== 0) do_str(1, Rt
, va
, ss
, res
);
830 else if (fields
== 1) do_ldr(1, Rt
, va
, ss
, res
);
831 else if (fields
== 2) do_ldrs(1, 8, Rt
, va
, ss
, res
);
832 else if (fields
== 3) do_ldrs(1, 4, Rt
, va
, ss
, res
);
833 else if ((fields
== 4) ||
842 (fields
== 0x1d)) CANNOTDECODE("simd", inst
);
843 else if (fields
== 8) do_str(2, Rt
, va
, ss
, res
);
844 else if (fields
== 9) do_ldr(2, Rt
, va
, ss
, res
);
845 else if (fields
== 10) do_ldrs(2, 8, Rt
, va
, ss
, res
);
846 else if (fields
== 11) do_ldrs(2, 4, Rt
, va
, ss
, res
);
847 else if (fields
== 0x10) do_str(4, Rt
, va
, ss
, res
);
848 else if (fields
== 0x11) do_ldr(4, Rt
, va
, ss
, res
);
849 else if (fields
== 0x12) do_ldrs(4, 8, Rt
, va
, ss
, res
);
850 else if (fields
== 0x18) do_str(8, Rt
, va
, ss
, res
);
851 else if (fields
== 0x19) do_ldr(8, Rt
, va
, ss
, res
);
852 else CANNOTDECODE("unknown", inst
);
854 stats
.stat_decoder
.sd_c339
++;
859 static int run_c3310(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
861 uint32_t size
= BITS(inst
, 31, 30),
862 V
= BITS(inst
, 26, 26),
863 opc
= BITS(inst
, 23, 22),
864 Rt
= BITS(inst
, 4, 0);
865 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
868 res
->rr_addrdata
[0].ad_addr
= pa
;
870 if (fields
== 0) do_str(1, Rt
, va
, ss
, res
);
871 else if (fields
== 1) do_ldr(1, Rt
, va
, ss
, res
);
872 else if (fields
== 2) do_ldrs(1, 8, Rt
, va
, ss
, res
);
873 else if (fields
== 3) do_ldrs(1, 4, Rt
, va
, ss
, res
);
874 else if ((fields
== 4) ||
883 (fields
== 0x1d)) CANNOTDECODE("simd", inst
);
884 else if (fields
== 8) do_str(2, Rt
, va
, ss
, res
);
885 else if (fields
== 9) do_ldr(2, Rt
, va
, ss
, res
);
886 else if (fields
== 10) do_ldrs(2, 8, Rt
, va
, ss
, res
);
887 else if (fields
== 11) do_ldrs(2, 4, Rt
, va
, ss
, res
);
888 else if (fields
== 0x10) do_str(4, Rt
, va
, ss
, res
);
889 else if (fields
== 0x11) do_ldr(4, Rt
, va
, ss
, res
);
890 else if (fields
== 0x12) do_ldrs(4, 8, Rt
, va
, ss
, res
);
891 else if (fields
== 0x18) do_str(8, Rt
, va
, ss
, res
);
892 else if (fields
== 0x19) do_ldr(8, Rt
, va
, ss
, res
);
893 else if (fields
== 0x1a) do_prfm(Rt
, va
, res
);
894 else CANNOTDECODE("unknown", inst
);
896 stats
.stat_decoder
.sd_c3310
++;
901 static int run_c3311(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
903 uint32_t size
= BITS(inst
, 31, 30),
904 V
= BITS(inst
, 26, 26),
905 opc
= BITS(inst
, 23, 22),
906 Rt
= BITS(inst
, 4, 0);
907 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
910 res
->rr_addrdata
[0].ad_addr
= pa
;
912 if (fields
== 0) do_sttr(1, Rt
, va
, ss
, res
);
913 else if (fields
== 1) do_ldtr(1, Rt
, va
, ss
, res
);
914 else if (fields
== 2) do_ldtrs(1, 8, Rt
, va
, ss
, res
);
915 else if (fields
== 3) do_ldtrs(1, 4, Rt
, va
, ss
, res
);
916 else if (fields
== 8) do_sttr(2, Rt
, va
, ss
, res
);
917 else if (fields
== 9) do_ldtr(2, Rt
, va
, ss
, res
);
918 else if (fields
== 10) do_ldtrs(2, 8, Rt
, va
, ss
, res
);
919 else if (fields
== 11) do_ldtrs(2, 4, Rt
, va
, ss
, res
);
920 else if (fields
== 0x10) do_sttr(4, Rt
, va
, ss
, res
);
921 else if (fields
== 0x11) do_ldtr(4, Rt
, va
, ss
, res
);
922 else if (fields
== 0x12) do_ldtrs(4, 8, Rt
, va
, ss
, res
);
923 else if (fields
== 0x18) do_sttr(8, Rt
, va
, ss
, res
);
924 else if (fields
== 0x19) do_ldtr(8, Rt
, va
, ss
, res
);
925 else CANNOTDECODE("unknown", inst
);
927 stats
.stat_decoder
.sd_c3311
++;
932 static int run_c3312(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
934 uint32_t size
= BITS(inst
, 31, 30),
935 V
= BITS(inst
, 26, 26),
936 opc
= BITS(inst
, 23, 22),
937 Rt
= BITS(inst
, 4, 0);
938 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
941 res
->rr_addrdata
[0].ad_addr
= pa
;
943 if (fields
== 0) do_str(1, Rt
, va
, ss
, res
);
944 else if (fields
== 1) do_ldr(1, Rt
, va
, ss
, res
);
945 else if (fields
== 2) do_ldrs(1, 8, Rt
, va
, ss
, res
);
946 else if (fields
== 3) do_ldrs(1, 4, Rt
, va
, ss
, res
);
947 else if ((fields
== 4) ||
956 (fields
== 0x1d)) CANNOTDECODE("simd", inst
);
957 else if (fields
== 8) do_str(2, Rt
, va
, ss
, res
);
958 else if (fields
== 9) do_ldr(2, Rt
, va
, ss
, res
);
959 else if (fields
== 10) do_ldrs(2, 8, Rt
, va
, ss
, res
);
960 else if (fields
== 11) do_ldrs(2, 4, Rt
, va
, ss
, res
);
961 else if (fields
== 0x10) do_str(4, Rt
, va
, ss
, res
);
962 else if (fields
== 0x11) do_ldr(4, Rt
, va
, ss
, res
);
963 else if (fields
== 0x12) do_ldrs(4, 8, Rt
, va
, ss
, res
);
964 else if (fields
== 0x18) do_str(8, Rt
, va
, ss
, res
);
965 else if (fields
== 0x19) do_ldr(8, Rt
, va
, ss
, res
);
966 else if (fields
== 0x1a) do_prfm(Rt
, va
, res
);
967 else CANNOTDECODE("unknown", inst
);
969 stats
.stat_decoder
.sd_c3312
++;
974 static int run_c3313(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
976 uint32_t size
= BITS(inst
, 31, 30),
977 V
= BITS(inst
, 26, 26),
978 opc
= BITS(inst
, 23, 22),
979 Rt
= BITS(inst
, 4, 0);
980 uint8_t fields
= (size
<< 3) | (V
<< 2) | opc
;
983 res
->rr_addrdata
[0].ad_addr
= pa
;
985 if (fields
== 0) do_str(1, Rt
, va
, ss
, res
);
986 else if (fields
== 1) do_ldr(1, Rt
, va
, ss
, res
);
987 else if (fields
== 2) do_ldrs(1, 8, Rt
, va
, ss
, res
);
988 else if (fields
== 3) do_ldrs(1, 4, Rt
, va
, ss
, res
);
989 else if ((fields
== 4) ||
998 (fields
== 0x1d)) CANNOTDECODE("simd", inst
);
999 else if (fields
== 8) do_str(2, Rt
, va
, ss
, res
);
1000 else if (fields
== 9) do_ldr(2, Rt
, va
, ss
, res
);
1001 else if (fields
== 10) do_ldrs(2, 8, Rt
, va
, ss
, res
);
1002 else if (fields
== 11) do_ldrs(2, 4, Rt
, va
, ss
, res
);
1003 else if (fields
== 0x10) do_str(4, Rt
, va
, ss
, res
);
1004 else if (fields
== 0x11) do_ldr(4, Rt
, va
, ss
, res
);
1005 else if (fields
== 0x12) do_ldrs(4, 8, Rt
, va
, ss
, res
);
1006 else if (fields
== 0x18) do_str(8, Rt
, va
, ss
, res
);
1007 else if (fields
== 0x19) do_ldr(8, Rt
, va
, ss
, res
);
1008 else if (fields
== 0x1a) do_prfm(Rt
, va
, res
);
1009 else CANNOTDECODE("unknown", inst
);
1011 stats
.stat_decoder
.sd_c3313
++;
1016 static int run_c3314(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
1018 uint32_t opc
= BITS(inst
, 31, 30),
1019 V
= BITS(inst
, 26, 26),
1020 L
= BITS(inst
, 22, 22),
1021 Rt
= BITS(inst
, 4, 0),
1022 Rt2
= BITS(inst
, 14, 10);
1023 uint8_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1026 res
->rr_addrdata
[0].ad_addr
= pa
;
1028 if (fields
== 0) do_stp(4, Rt
, Rt2
, va
, ss
, res
);
1029 else if (fields
== 1) do_ldp(4, Rt
, Rt2
, va
, ss
, res
);
1030 else if ((fields
== 2) ||
1035 (fields
== 11)) CANNOTDECODE("simd", inst
);
1036 else if (fields
== 5) do_ldpsw(Rt
, Rt2
, va
, ss
, res
);
1037 else if (fields
== 8) do_stp(8, Rt
, Rt2
, va
, ss
, res
);
1038 else if (fields
== 9) do_ldp(8, Rt
, Rt2
, va
, ss
, res
);
1039 else CANNOTDECODE("unknown", inst
);
1041 stats
.stat_decoder
.sd_c3314
++;
1046 static int run_c3315(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
1048 uint32_t opc
= BITS(inst
, 31, 30),
1049 V
= BITS(inst
, 26, 26),
1050 L
= BITS(inst
, 22, 22),
1051 Rt
= BITS(inst
, 4, 0),
1052 Rt2
= BITS(inst
, 14, 10);
1053 uint8_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1056 res
->rr_addrdata
[0].ad_addr
= pa
;
1058 if (fields
== 0) do_stp(4, Rt
, Rt2
, va
, ss
, res
);
1059 else if (fields
== 1) do_ldp(4, Rt
, Rt2
, va
, ss
, res
);
1060 else if ((fields
== 2) ||
1065 (fields
== 11)) CANNOTDECODE("simd", inst
);
1066 else if (fields
== 5) do_ldpsw(Rt
, Rt2
, va
, ss
, res
);
1067 else if (fields
== 8) do_stp(8, Rt
, Rt2
, va
, ss
, res
);
1068 else if (fields
== 9) do_ldp(8, Rt
, Rt2
, va
, ss
, res
);
1069 else CANNOTDECODE("unknown", inst
);
1071 stats
.stat_decoder
.sd_c3315
++;
1076 static int run_c3316(uint32_t inst
, vm_offset_t pa
, vm_offset_t va
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
1078 uint32_t opc
= BITS(inst
, 31, 30),
1079 V
= BITS(inst
, 26, 26),
1080 L
= BITS(inst
, 22, 22),
1081 Rt
= BITS(inst
, 4, 0),
1082 Rt2
= BITS(inst
, 14, 10);
1083 uint8_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1086 res
->rr_addrdata
[0].ad_addr
= pa
;
1088 if (fields
== 0) do_stp(4, Rt
, Rt2
, va
, ss
, res
);
1089 else if (fields
== 1) do_ldp(4, Rt
, Rt2
, va
, ss
, res
);
1090 else if ((fields
== 2) ||
1095 (fields
== 11)) CANNOTDECODE("simd", inst
);
1096 else if (fields
== 5) do_ldpsw(Rt
, Rt2
, va
, ss
, res
);
1097 else if (fields
== 8) do_stp(8, Rt
, Rt2
, va
, ss
, res
);
1098 else if (fields
== 9) do_ldp(8, Rt
, Rt2
, va
, ss
, res
);
1099 else CANNOTDECODE("unknown", inst
);
1101 stats
.stat_decoder
.sd_c3316
++;
1106 static bool get_info_simd(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1108 #pragma unused(inst, ss, info)
1109 CANNOTDECODE("simd", inst
);
1113 // load register (literal)
1114 static bool get_info_c335(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1116 uint32_t opc
= BITS(inst
, 31, 30);
1117 uint32_t V
= BITS(inst
, 26, 26);
1118 uint32_t imm19
= BITS(inst
, 23, 5);
1119 uint32_t fields
= (opc
<< 1) | V
;
1122 if (__builtin_expect(fields
> 6, false)) {
1123 CANNOTDECODE("invalid", inst
);
1127 assert(fields
<= 6);
1140 CANNOTDECODE("invalid", inst
);
1145 info
->bytes
= 1 << scale
;
1146 info
->addr
= ss
->ss_64
.pc
+ (SIGN_EXTEND_64(imm19
, 19) << 2);
1151 // load/store exclusive
1152 static bool get_info_c336(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1154 uint32_t size
= BITS(inst
, 31, 30);
1155 uint32_t o2
= BITS(inst
, 23, 23);
1156 uint32_t L
= BITS(inst
, 22, 22);
1157 uint32_t o1
= BITS(inst
, 21, 21);
1158 uint32_t o0
= BITS(inst
, 15, 15);
1159 uint32_t Rn
= BITS(inst
, 9, 5);
1160 uint32_t fields
= (size
<< 4) | (o2
<< 3) | (L
<< 2) | (o1
<< 1) | o0
;
1162 if (__builtin_expect((2 <= fields
&& fields
<= 3) ||
1163 (6 <= fields
&& fields
<= 8) ||
1164 (10 <= fields
&& fields
<= 12) ||
1165 (14 <= fields
&& fields
<= 15) ||
1166 (18 <= fields
&& fields
<= 19) ||
1167 (22 <= fields
&& fields
<= 24) ||
1168 (26 <= fields
&& fields
<= 28) ||
1169 (30 <= fields
&& fields
<= 31) ||
1171 (42 <= fields
&& fields
<= 44) ||
1172 (46 <= fields
&& fields
<= 47) ||
1174 (58 <= fields
&& fields
<= 60) ||
1175 (62 <= fields
), false)) {
1176 CANNOTDECODE("invalid", inst
);
1180 info
->bytes
= (1 << size
) << o1
;
1181 info
->addr
= ss
->ss_64
.x
[Rn
];
1186 // load/store no-allocate pair (offset)
1187 bool get_info_c337(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1189 uint32_t opc
= BITS(inst
, 31, 30);
1190 uint32_t V
= BITS(inst
, 26, 26);
1191 uint32_t L
= BITS(inst
, 22, 22);
1192 uint32_t imm7
= BITS(inst
, 21, 15);
1193 uint32_t Rn
= BITS(inst
, 9, 5);
1194 uint32_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1197 if (__builtin_expect((4 <= fields
&& fields
<= 5) ||
1198 (12 <= fields
), false)) {
1199 CANNOTDECODE("invalid", inst
);
1206 scale
= BITS(opc
, 1, 1) + 2;
1209 // double since it's pair
1210 info
->bytes
= 2 * (1 << scale
);
1211 info
->addr
= ss
->ss_64
.x
[Rn
] + (SIGN_EXTEND_64(imm7
, 7) << scale
);
1216 // load/store reigster (immediate post-indexed)
1217 static bool get_info_c338(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1219 uint32_t size
= BITS(inst
, 31, 30);
1220 uint32_t V
= BITS(inst
, 26, 26);
1221 uint32_t opc
= BITS(inst
, 23, 22);
1222 uint32_t Rn
= BITS(inst
, 9, 5);
1223 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1226 if (__builtin_expect((14 <= fields
&& fields
<= 15) ||
1228 (22 <= fields
&& fields
<= 23) ||
1229 (26 <= fields
&& fields
<= 27) ||
1230 (30 <= fields
), false)) {
1231 CANNOTDECODE("invalid", inst
);
1236 scale
= BITS(opc
, 1, 1) << 2 | size
;
1241 info
->bytes
= 1 << scale
;
1243 info
->addr
= ss
->ss_64
.x
[Rn
];
1248 // load/store register (immediate pre-indexed)
1249 static bool get_info_c339(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1251 uint32_t size
= BITS(inst
, 31, 30);
1252 uint32_t V
= BITS(inst
, 26, 26);
1253 uint32_t opc
= BITS(inst
, 23, 22);
1254 uint32_t imm9
= BITS(inst
, 20, 12);
1255 uint32_t Rn
= BITS(inst
, 9, 5);
1256 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1259 if (__builtin_expect((14 <= fields
&& fields
<= 15) ||
1261 (22 <= fields
&& fields
<= 23) ||
1262 (26 <= fields
&& fields
<= 27) ||
1263 (30 <= fields
), false)) {
1264 CANNOTDECODE("invalid", inst
);
1269 scale
= BITS(opc
, 1, 1) << 2 | size
;
1274 info
->bytes
= 1 << scale
;
1275 info
->addr
= ss
->ss_64
.x
[Rn
] + SIGN_EXTEND_64(imm9
, 9);
1280 // load/store register (register offset)
1281 static bool get_info_c3310(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1283 uint32_t size
= BITS(inst
, 31, 30);
1284 uint32_t V
= BITS(inst
, 26, 26);
1285 uint32_t opc
= BITS(inst
, 23, 22);
1286 uint32_t Rm
= BITS(inst
, 20, 16);
1287 uint32_t option
= BITS(inst
, 15, 13);
1288 uint32_t S
= BITS(inst
, 12, 12);
1289 uint32_t Rn
= BITS(inst
, 9, 5);
1290 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1293 if (__builtin_expect((14 <= fields
&& fields
<= 15) ||
1295 (22 <= fields
&& fields
<= 23) ||
1297 (30 <= fields
), false)) {
1298 CANNOTDECODE("invalid", inst
);
1303 scale
= BITS(opc
, 1, 1) | size
;
1308 info
->bytes
= 1 << scale
;
1310 uint64_t m
= ss
->ss_64
.x
[Rm
];
1311 uint8_t shift
= (S
== 1 ? scale
: 0);
1315 info
->addr
= ss
->ss_64
.x
[Rn
] + (ZERO_EXTEND_64(m
, 8 << option
) << shift
);
1318 info
->addr
= ss
->ss_64
.x
[Rn
] + (SIGN_EXTEND_64(m
, 8 << BITS(option
, 1, 0)) << shift
);
1321 CANNOTDECODE("invalid", inst
);
1328 // load/store register (unprivileged)
1329 static bool get_info_c3311(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1331 uint32_t size
= BITS(inst
, 31, 30);
1332 uint32_t V
= BITS(inst
, 26, 26);
1333 uint32_t opc
= BITS(inst
, 23, 22);
1334 uint32_t imm9
= BITS(inst
, 20, 12);
1335 uint32_t Rn
= BITS(inst
, 9, 5);
1336 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1338 if (__builtin_expect((4 <= fields
&& fields
<= 7) ||
1339 (12 <= fields
&& fields
<= 15) ||
1340 (19 <= fields
&& fields
<= 23) ||
1341 (26 <= fields
), false)) {
1342 CANNOTDECODE("invalid", inst
);
1346 info
->bytes
= 1 << size
;
1347 info
->addr
= ss
->ss_64
.x
[Rn
] + SIGN_EXTEND_64(imm9
, 9);
1352 // load/store register (unscaled immediate)
1353 static bool get_info_c3312(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1355 uint32_t size
= BITS(inst
, 31, 30);
1356 uint32_t V
= BITS(inst
, 26, 26);
1357 uint32_t opc
= BITS(inst
, 23, 22);
1358 uint32_t imm9
= BITS(inst
, 20, 12);
1359 uint32_t Rn
= BITS(inst
, 9, 5);
1360 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1363 if (__builtin_expect((14 <= fields
&& fields
<= 15) ||
1365 (22 <= fields
&& fields
<= 23) ||
1367 (30 <= fields
), false)) {
1368 CANNOTDECODE("invalid", inst
);
1373 scale
= BITS(opc
, 1, 1) << 2 | size
;
1378 info
->bytes
= 1 < scale
;
1379 info
->addr
= ss
->ss_64
.x
[Rn
] + SIGN_EXTEND_64(imm9
, 9);
1384 // load/store register (unsigned immediate)
1385 bool get_info_c3313(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1387 uint32_t size
= BITS(inst
, 31, 30);
1388 uint32_t V
= BITS(inst
, 26, 26);
1389 uint32_t opc
= BITS(inst
, 23, 22);
1390 uint32_t imm12
= BITS(inst
, 21, 10);
1391 uint32_t Rn
= BITS(inst
, 9, 5);
1392 uint32_t fields
= (size
<< 3) | (V
<< 2) | opc
;
1395 if (__builtin_expect((14 <= fields
&& fields
<= 15) ||
1397 (22 <= fields
&& fields
<= 23) ||
1399 (30 <= fields
), false)) {
1400 CANNOTDECODE("invalid", inst
);
1405 scale
= BITS(opc
, 1, 1) << 2 | size
;
1410 info
->bytes
= 1 << scale
;
1411 info
->addr
= ss
->ss_64
.x
[Rn
] + (ZERO_EXTEND_64(imm12
, 12) << scale
);
1416 // load/store register pair (offset)
1417 static bool get_info_c3314(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1419 uint32_t opc
= BITS(inst
, 31, 30);
1420 uint32_t V
= BITS(inst
, 26, 26);
1421 uint32_t L
= BITS(inst
, 22, 22);
1422 uint32_t imm7
= BITS(inst
, 21, 15);
1423 uint32_t Rn
= BITS(inst
, 9, 5);
1424 uint32_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1425 uint8_t scale
= 2 + (opc
>> 1);
1427 if (__builtin_expect((4 == fields
) ||
1428 (12 <= fields
), false)) {
1429 CANNOTDECODE("invalid", inst
);
1436 scale
= 2 + BITS(opc
, 1, 1);
1439 info
->bytes
= 2 * (1 << scale
);
1440 info
->addr
= ss
->ss_64
.x
[Rn
] + (SIGN_EXTEND_64(imm7
, 7) << scale
);
1445 // load/store register pair (post-indexed)
1446 static bool get_info_c3315(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1448 uint32_t opc
= BITS(inst
, 31, 30);
1449 uint32_t V
= BITS(inst
, 26, 26);
1450 uint32_t L
= BITS(inst
, 22, 22);
1451 uint32_t Rn
= BITS(inst
, 9, 5);
1452 uint32_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1453 uint8_t scale
= 2 + (opc
>> 1);
1455 if (__builtin_expect((4 == fields
) ||
1456 (12 <= fields
), false)) {
1457 CANNOTDECODE("invalid", inst
);
1464 scale
= 2 + BITS(opc
, 1, 1);
1467 info
->bytes
= 2 * (1 << scale
);
1469 info
->addr
= ss
->ss_64
.x
[Rn
];
1474 // load/store register pair (pre-indexed)
1475 static bool get_info_c3316(uint32_t inst
, arm_saved_state_t
*ss
, instruction_info_t
*info
)
1477 uint32_t opc
= BITS(inst
, 31, 30);
1478 uint32_t V
= BITS(inst
, 26, 26);
1479 uint32_t L
= BITS(inst
, 22, 22);
1480 uint32_t imm7
= BITS(inst
, 21, 15);
1481 uint32_t Rn
= BITS(inst
, 9, 5);
1482 uint32_t fields
= (opc
<< 2) | (V
<< 1) | L
;
1483 uint8_t scale
= 2 + (opc
>> 1);
1485 if (__builtin_expect((4 == fields
) ||
1486 (12 <= fields
), false)) {
1487 CANNOTDECODE("invalid", inst
);
1494 scale
= 2 + BITS(opc
, 1, 1);
1497 info
->bytes
= 2 * (1 << scale
);
1498 info
->addr
= ss
->ss_64
.x
[Rn
] + (SIGN_EXTEND_64(imm7
, 7) << scale
);
1504 //-------------------------------------------------------------------
1507 int pgtrace_decode_and_run(uint32_t inst
, vm_offset_t fva
, vm_map_offset_t
*cva_page
, arm_saved_state_t
*ss
, pgtrace_run_result_t
*res
)
1509 uint8_t len
= sizeof(typetbl
)/sizeof(type_entry_t
);
1511 get_info_t get_info
= NULL
;
1512 vm_offset_t pa
, cva
;
1513 vm_offset_t cva_front_page
= cva_page
[0];
1514 vm_offset_t cva_cur_page
= cva_page
[1];
1515 instruction_info_t info
;
1517 for (uint8_t i
= 0; i
< len
; i
++) {
1518 if ((typetbl
[i
].mask
& inst
) == typetbl
[i
].value
) {
1519 run
= typetbl
[i
].run
;
1520 get_info
= typetbl
[i
].get_info
;
1525 assert(run
!= NULL
&& get_info
!= NULL
);
1527 get_info(inst
, ss
, &info
);
1529 if (info
.addr
== fva
) {
1530 cva
= cva_cur_page
+ (fva
& ARM_PGMASK
);
1532 // which means a front page is not a tracing page
1533 cva
= cva_front_page
+ (fva
& ARM_PGMASK
);
1536 pa
= mmu_kvtop(cva
);
1538 panic("%s: invalid address cva=%lx fva=%lx info.addr=%lx inst=%x", __func__
, cva
, fva
, info
.addr
, inst
);
1541 absolutetime_to_nanoseconds(mach_absolute_time(), &res
->rr_time
);
1542 run(inst
, pa
, cva
, ss
, res
);
1547 void pgtrace_decoder_get_stats(pgtrace_stats_t
*s
)
1549 memcpy((void *)&(s
->stat_decoder
), &(stats
.stat_decoder
), sizeof(stats
.stat_decoder
));