2 * Copyright (c) 2017 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
28 #include <arm/cpuid.h>
29 #include <arm/cpuid_internal.h>
30 #include <machine/atomic.h>
31 #include <machine/machine_cpuid.h>
32 #include <arm/cpu_data_internal.h>
34 static arm_mvfp_info_t cpuid_mvfp_info
;
35 static arm_debug_info_t cpuid_debug_info
;
38 machine_read_midr(void)
41 uint32_t midr
= __builtin_arm_mrc(15,0,0,0,0);
44 __asm__
volatile("mrs %0, MIDR_EL1" : "=r" (midr
));
46 return (uint32_t)midr
;
50 machine_read_clidr(void)
53 uint32_t clidr
= __builtin_arm_mrc(15,1,0,0,1);
56 __asm__
volatile("mrs %0, CLIDR_EL1" : "=r" (clidr
));
58 return (uint32_t)clidr
;
62 machine_read_ccsidr(void)
65 uint32_t ccsidr
= __builtin_arm_mrc(15,1,0,0,0);
68 __asm__
volatile("mrs %0, CCSIDR_EL1" : "=r" (ccsidr
));
70 return (uint32_t)ccsidr
;
75 machine_read_isa_feat1(void)
77 arm_isa_feat1_reg isa
;
78 isa
.value
= __builtin_arm_mrc(15,0,0,2,1);
84 machine_write_csselr(csselr_cache_level level
, csselr_cache_type type
)
87 uint32_t csselr
= (level
| type
);
88 __builtin_arm_mcr(15,2,csselr
,0,0,0);
90 uint64_t csselr
= (level
| type
);
91 __asm__
volatile("msr CSSELR_EL1, %0" : : "r" (csselr
));
93 __builtin_arm_isb(ISB_SY
);
97 machine_do_debugid(void)
100 arm_cpuid_id_dfr0 id_dfr0
;
101 arm_debug_dbgdidr dbgdidr
;
103 /* read CPUID ID_DFR0 */
104 id_dfr0
.value
= __builtin_arm_mrc(15,0,0,1,2);
106 dbgdidr
.value
= __builtin_arm_mrc(14,0,0,0,0);
108 cpuid_debug_info
.coprocessor_core_debug
= id_dfr0
.debug_feature
.coprocessor_core_debug
!= 0;
109 cpuid_debug_info
.memory_mapped_core_debug
= (id_dfr0
.debug_feature
.memory_mapped_core_debug
!= 0)
110 && (getCpuDatap()->cpu_debug_interface_map
!= 0);
112 if (cpuid_debug_info
.coprocessor_core_debug
|| cpuid_debug_info
.memory_mapped_core_debug
) {
113 cpuid_debug_info
.num_watchpoint_pairs
= dbgdidr
.debug_id
.wrps
+ 1;
114 cpuid_debug_info
.num_breakpoint_pairs
= dbgdidr
.debug_id
.brps
+ 1;
117 arm_cpuid_id_aa64dfr0_el1 id_dfr0
;
119 /* read ID_AA64DFR0_EL1 */
120 __asm__
volatile("mrs %0, ID_AA64DFR0_EL1" : "=r"(id_dfr0
.value
));
122 if (id_dfr0
.debug_feature
.debug_arch_version
) {
123 cpuid_debug_info
.num_watchpoint_pairs
= id_dfr0
.debug_feature
.wrps
+ 1;
124 cpuid_debug_info
.num_breakpoint_pairs
= id_dfr0
.debug_feature
.brps
+ 1;
130 machine_arm_debug_info(void)
132 return &cpuid_debug_info
;
139 arm_mvfr0_info_t arm_mvfr0_info
;
140 arm_mvfr1_info_t arm_mvfr1_info
;
142 __asm__
volatile("vmrs %0, mvfr0":"=r"(arm_mvfr0_info
.value
));
143 __asm__
volatile("vmrs %0, mvfr1":"=r"(arm_mvfr1_info
.value
));
145 cpuid_mvfp_info
.neon
= arm_mvfr1_info
.bits
.SP
;
146 cpuid_mvfp_info
.neon_hpfp
= arm_mvfr1_info
.bits
.HPFP
;
148 cpuid_mvfp_info
.neon
= 1;
149 cpuid_mvfp_info
.neon_hpfp
= 1;
155 machine_arm_mvfp_info(void)
157 return &cpuid_mvfp_info
;