]>
Commit | Line | Data |
---|---|---|
1c79356b | 1 | /* |
6d2010ae | 2 | * Copyright (c) 2000-2010 Apple Inc. All rights reserved. |
1c79356b | 3 | * |
2d21ac55 | 4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ |
1c79356b | 5 | * |
2d21ac55 A |
6 | * This file contains Original Code and/or Modifications of Original Code |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
8f6c56a5 | 14 | * |
2d21ac55 A |
15 | * Please obtain a copy of the License at |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
8f6c56a5 A |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
2d21ac55 A |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. |
23 | * Please see the License for the specific language governing rights and | |
24 | * limitations under the License. | |
8f6c56a5 | 25 | * |
2d21ac55 | 26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ |
1c79356b A |
27 | */ |
28 | /* | |
29 | * @OSF_COPYRIGHT@ | |
30 | */ | |
31 | /* CMU_ENDHIST */ | |
32 | /* | |
33 | * Mach Operating System | |
34 | * Copyright (c) 1991,1990 Carnegie Mellon University | |
35 | * All Rights Reserved. | |
36 | * | |
37 | * Permission to use, copy, modify and distribute this software and its | |
38 | * documentation is hereby granted, provided that both the copyright | |
39 | * notice and this permission notice appear in all copies of the | |
40 | * software, derivative works or modified versions, and any portions | |
41 | * thereof, and that both notices appear in supporting documentation. | |
42 | * | |
43 | * CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS" | |
44 | * CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR | |
45 | * ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE. | |
46 | * | |
47 | * Carnegie Mellon requests users of this software to return to | |
48 | * | |
49 | * Software Distribution Coordinator or Software.Distribution@CS.CMU.EDU | |
50 | * School of Computer Science | |
51 | * Carnegie Mellon University | |
52 | * Pittsburgh PA 15213-3890 | |
53 | * | |
54 | * any improvements or extensions that they make and grant Carnegie Mellon | |
55 | * the rights to redistribute these changes. | |
56 | */ | |
57 | ||
58 | /* | |
59 | */ | |
60 | ||
61 | /* | |
62 | * Processor registers for i386 and i486. | |
63 | */ | |
64 | #ifndef _I386_PROC_REG_H_ | |
65 | #define _I386_PROC_REG_H_ | |
66 | ||
67 | /* | |
68 | * Model Specific Registers | |
69 | */ | |
70 | #define MSR_P5_TSC 0x10 /* Time Stamp Register */ | |
71 | #define MSR_P5_CESR 0x11 /* Control and Event Select Register */ | |
72 | #define MSR_P5_CTR0 0x12 /* Counter #0 */ | |
73 | #define MSR_P5_CTR1 0x13 /* Counter #1 */ | |
74 | ||
75 | #define MSR_P5_CESR_PC 0x0200 /* Pin Control */ | |
76 | #define MSR_P5_CESR_CC 0x01C0 /* Counter Control mask */ | |
77 | #define MSR_P5_CESR_ES 0x003F /* Event Control mask */ | |
78 | ||
79 | #define MSR_P5_CESR_SHIFT 16 /* Shift to get Counter 1 */ | |
80 | #define MSR_P5_CESR_MASK (MSR_P5_CESR_PC|\ | |
81 | MSR_P5_CESR_CC|\ | |
82 | MSR_P5_CESR_ES) /* Mask Counter */ | |
83 | ||
84 | #define MSR_P5_CESR_CC_CLOCK 0x0100 /* Clock Counting (otherwise Event) */ | |
85 | #define MSR_P5_CESR_CC_DISABLE 0x0000 /* Disable counter */ | |
86 | #define MSR_P5_CESR_CC_CPL012 0x0040 /* Count if the CPL == 0, 1, 2 */ | |
87 | #define MSR_P5_CESR_CC_CPL3 0x0080 /* Count if the CPL == 3 */ | |
88 | #define MSR_P5_CESR_CC_CPL 0x00C0 /* Count regardless of the CPL */ | |
89 | ||
90 | #define MSR_P5_CESR_ES_DATA_READ 0x000000 /* Data Read */ | |
91 | #define MSR_P5_CESR_ES_DATA_WRITE 0x000001 /* Data Write */ | |
92 | #define MSR_P5_CESR_ES_DATA_RW 0x101000 /* Data Read or Write */ | |
93 | #define MSR_P5_CESR_ES_DATA_TLB_MISS 0x000010 /* Data TLB Miss */ | |
94 | #define MSR_P5_CESR_ES_DATA_READ_MISS 0x000011 /* Data Read Miss */ | |
95 | #define MSR_P5_CESR_ES_DATA_WRITE_MISS 0x000100 /* Data Write Miss */ | |
96 | #define MSR_P5_CESR_ES_DATA_RW_MISS 0x101001 /* Data Read or Write Miss */ | |
97 | #define MSR_P5_CESR_ES_HIT_EM 0x000101 /* Write (hit) to M|E state */ | |
98 | #define MSR_P5_CESR_ES_DATA_CACHE_WB 0x000110 /* Cache lines written back */ | |
99 | #define MSR_P5_CESR_ES_EXTERNAL_SNOOP 0x000111 /* External Snoop */ | |
100 | #define MSR_P5_CESR_ES_CACHE_SNOOP_HIT 0x001000 /* Data cache snoop hits */ | |
101 | #define MSR_P5_CESR_ES_MEM_ACCESS_PIPE 0x001001 /* Mem. access in both pipes */ | |
102 | #define MSR_P5_CESR_ES_BANK_CONFLICTS 0x001010 /* Bank conflicts */ | |
103 | #define MSR_P5_CESR_ES_MISALIGNED 0x001011 /* Misaligned Memory or I/O */ | |
104 | #define MSR_P5_CESR_ES_CODE_READ 0x001100 /* Code Read */ | |
105 | #define MSR_P5_CESR_ES_CODE_TLB_MISS 0x001101 /* Code TLB miss */ | |
106 | #define MSR_P5_CESR_ES_CODE_CACHE_MISS 0x001110 /* Code Cache miss */ | |
107 | #define MSR_P5_CESR_ES_SEGMENT_LOADED 0x001111 /* Any segment reg. loaded */ | |
108 | #define MSR_P5_CESR_ES_BRANCHE 0x010010 /* Branches */ | |
109 | #define MSR_P5_CESR_ES_BTB_HIT 0x010011 /* BTB Hits */ | |
110 | #define MSR_P5_CESR_ES_BRANCHE_BTB 0x010100 /* Taken branch or BTB Hit */ | |
111 | #define MSR_P5_CESR_ES_PIPELINE_FLUSH 0x010101 /* Pipeline Flushes */ | |
112 | #define MSR_P5_CESR_ES_INSTRUCTION 0x010110 /* Instruction executed */ | |
113 | #define MSR_P5_CESR_ES_INSTRUCTION_V 0x010111 /* Inst. executed (v-pipe) */ | |
114 | #define MSR_P5_CESR_ES_BUS_CYCLE 0x011000 /* Clocks while bus cycle */ | |
115 | #define MSR_P5_CESR_ES_FULL_WRITE_BUF 0x011001 /* Clocks while full wrt buf. */ | |
116 | #define MSR_P5_CESR_ES_DATA_MEM_READ 0x011010 /* Pipeline waiting for read */ | |
117 | #define MSR_P5_CESR_ES_WRITE_EM 0x011011 /* Stall on write E|M state */ | |
118 | #define MSR_P5_CESR_ES_LOCKED_CYCLE 0x011100 /* Locked bus cycles */ | |
119 | #define MSR_P5_CESR_ES_IO_CYCLE 0x011101 /* I/O Read or Write cycles */ | |
120 | #define MSR_P5_CESR_ES_NON_CACHEABLE 0x011110 /* Non-cacheable Mem. read */ | |
121 | #define MSR_P5_CESR_ES_AGI 0x011111 /* Stall because of AGI */ | |
122 | #define MSR_P5_CESR_ES_FLOP 0x100010 /* Floating Point operations */ | |
123 | #define MSR_P5_CESR_ES_BREAK_DR0 0x100011 /* Breakpoint matches on DR0 */ | |
124 | #define MSR_P5_CESR_ES_BREAK_DR1 0x100100 /* Breakpoint matches on DR1 */ | |
125 | #define MSR_P5_CESR_ES_BREAK_DR2 0x100101 /* Breakpoint matches on DR2 */ | |
126 | #define MSR_P5_CESR_ES_BREAK_DR3 0x100110 /* Breakpoint matches on DR3 */ | |
127 | #define MSR_P5_CESR_ES_HARDWARE_IT 0x100111 /* Hardware interrupts */ | |
128 | ||
129 | /* | |
130 | * CR0 | |
131 | */ | |
132 | #define CR0_PG 0x80000000 /* Enable paging */ | |
133 | #define CR0_CD 0x40000000 /* i486: Cache disable */ | |
134 | #define CR0_NW 0x20000000 /* i486: No write-through */ | |
135 | #define CR0_AM 0x00040000 /* i486: Alignment check mask */ | |
136 | #define CR0_WP 0x00010000 /* i486: Write-protect kernel access */ | |
137 | #define CR0_NE 0x00000020 /* i486: Handle numeric exceptions */ | |
138 | #define CR0_ET 0x00000010 /* Extension type is 80387 */ | |
139 | /* (not official) */ | |
140 | #define CR0_TS 0x00000008 /* Task switch */ | |
141 | #define CR0_EM 0x00000004 /* Emulate coprocessor */ | |
142 | #define CR0_MP 0x00000002 /* Monitor coprocessor */ | |
143 | #define CR0_PE 0x00000001 /* Enable protected mode */ | |
144 | ||
145 | /* | |
146 | * CR4 | |
147 | */ | |
7ddcb079 A |
148 | #define CR4_SMEP 0x00100000 /* Supervisor-Mode Execute Protect */ |
149 | #define CR4_OSXSAVE 0x00040000 /* OS supports XSAVE */ | |
150 | #define CR4_PCIDE 0x00020000 /* PCID Enable */ | |
151 | #define CR4_RDWRFSGS 0x00010000 /* RDWRFSGS Enable */ | |
152 | #define CR4_SMXE 0x00004000 /* Enable SMX operation */ | |
153 | #define CR4_VMXE 0x00002000 /* Enable VMX operation */ | |
154 | #define CR4_OSXMM 0x00000400 /* SSE/SSE2 exception support in OS */ | |
155 | #define CR4_OSFXS 0x00000200 /* SSE/SSE2 OS supports FXSave */ | |
156 | #define CR4_PCE 0x00000100 /* Performance-Monitor Count Enable */ | |
157 | #define CR4_PGE 0x00000080 /* Page Global Enable */ | |
158 | #define CR4_MCE 0x00000040 /* Machine Check Exceptions */ | |
159 | #define CR4_PAE 0x00000020 /* Physical Address Extensions */ | |
160 | #define CR4_PSE 0x00000010 /* Page Size Extensions */ | |
161 | #define CR4_DE 0x00000008 /* Debugging Extensions */ | |
162 | #define CR4_TSD 0x00000004 /* Time Stamp Disable */ | |
163 | #define CR4_PVI 0x00000002 /* Protected-mode Virtual Interrupts */ | |
164 | #define CR4_VME 0x00000001 /* Virtual-8086 Mode Extensions */ | |
1c79356b | 165 | |
060df5ea A |
166 | /* |
167 | * XCR0 - XFEATURE_ENABLED_MASK (a.k.a. XFEM) register | |
168 | */ | |
169 | #define XCR0_YMM 0x0000000000000004ULL /* YMM state available */ | |
170 | #define XFEM_YMM XCR0_YMM | |
171 | #define XCR0_SSE 0x0000000000000002ULL /* SSE supported by XSAVE/XRESTORE */ | |
172 | #define XCR0_X87 0x0000000000000001ULL /* x87, FPU/MMX (always set) */ | |
173 | #define XFEM_SSE XCR0_SSE | |
174 | #define XFEM_X87 XCR0_X87 | |
175 | #define XCR0 (0) | |
6d2010ae A |
176 | |
177 | #define PMAP_PCID_PRESERVE (1ULL << 63) | |
178 | #define PMAP_PCID_MASK (0xFFF) | |
1c79356b | 179 | #ifndef ASSEMBLER |
91447636 A |
180 | |
181 | #include <sys/cdefs.h> | |
b0d623f7 A |
182 | #include <stdint.h> |
183 | ||
91447636 | 184 | __BEGIN_DECLS |
1c79356b | 185 | |
b0d623f7 | 186 | #define set_ts() set_cr0(get_cr0() | CR0_TS) |
1c79356b | 187 | |
6d2010ae A |
188 | static inline uint16_t get_es(void) |
189 | { | |
190 | uint16_t es; | |
191 | __asm__ volatile("mov %%es, %0" : "=r" (es)); | |
192 | return es; | |
193 | } | |
194 | ||
195 | static inline void set_es(uint16_t es) | |
196 | { | |
197 | __asm__ volatile("mov %0, %%es" : : "r" (es)); | |
198 | } | |
199 | ||
200 | static inline uint16_t get_ds(void) | |
201 | { | |
202 | uint16_t ds; | |
203 | __asm__ volatile("mov %%ds, %0" : "=r" (ds)); | |
204 | return ds; | |
205 | } | |
206 | ||
207 | static inline void set_ds(uint16_t ds) | |
208 | { | |
209 | __asm__ volatile("mov %0, %%ds" : : "r" (ds)); | |
210 | } | |
211 | ||
212 | static inline uint16_t get_fs(void) | |
213 | { | |
214 | uint16_t fs; | |
215 | __asm__ volatile("mov %%fs, %0" : "=r" (fs)); | |
216 | return fs; | |
217 | } | |
218 | ||
219 | static inline void set_fs(uint16_t fs) | |
220 | { | |
221 | __asm__ volatile("mov %0, %%fs" : : "r" (fs)); | |
222 | } | |
223 | ||
224 | static inline uint16_t get_gs(void) | |
225 | { | |
226 | uint16_t gs; | |
227 | __asm__ volatile("mov %%gs, %0" : "=r" (gs)); | |
228 | return gs; | |
229 | } | |
230 | ||
231 | static inline void set_gs(uint16_t gs) | |
232 | { | |
233 | __asm__ volatile("mov %0, %%gs" : : "r" (gs)); | |
234 | } | |
235 | ||
236 | static inline uint16_t get_ss(void) | |
237 | { | |
238 | uint16_t ss; | |
239 | __asm__ volatile("mov %%ss, %0" : "=r" (ss)); | |
240 | return ss; | |
241 | } | |
242 | ||
243 | static inline void set_ss(uint16_t ss) | |
244 | { | |
245 | __asm__ volatile("mov %0, %%ss" : : "r" (ss)); | |
246 | } | |
247 | ||
b0d623f7 | 248 | static inline uintptr_t get_cr0(void) |
1c79356b | 249 | { |
b0d623f7 | 250 | uintptr_t cr0; |
1c79356b A |
251 | __asm__ volatile("mov %%cr0, %0" : "=r" (cr0)); |
252 | return(cr0); | |
253 | } | |
254 | ||
b0d623f7 | 255 | static inline void set_cr0(uintptr_t value) |
1c79356b A |
256 | { |
257 | __asm__ volatile("mov %0, %%cr0" : : "r" (value)); | |
258 | } | |
259 | ||
b0d623f7 | 260 | static inline uintptr_t get_cr2(void) |
1c79356b | 261 | { |
b0d623f7 | 262 | uintptr_t cr2; |
1c79356b A |
263 | __asm__ volatile("mov %%cr2, %0" : "=r" (cr2)); |
264 | return(cr2); | |
265 | } | |
266 | ||
6d2010ae A |
267 | static inline uintptr_t get_cr3_raw(void) |
268 | { | |
269 | register uintptr_t cr3; | |
270 | __asm__ volatile("mov %%cr3, %0" : "=r" (cr3)); | |
271 | return(cr3); | |
272 | } | |
273 | ||
274 | static inline void set_cr3_raw(uintptr_t value) | |
275 | { | |
276 | __asm__ volatile("mov %0, %%cr3" : : "r" (value)); | |
277 | } | |
278 | ||
279 | #if defined(__i386__) | |
b0d623f7 | 280 | static inline uintptr_t get_cr3(void) |
1c79356b | 281 | { |
b0d623f7 | 282 | register uintptr_t cr3; |
1c79356b A |
283 | __asm__ volatile("mov %%cr3, %0" : "=r" (cr3)); |
284 | return(cr3); | |
285 | } | |
286 | ||
b0d623f7 | 287 | static inline void set_cr3(uintptr_t value) |
1c79356b A |
288 | { |
289 | __asm__ volatile("mov %0, %%cr3" : : "r" (value)); | |
290 | } | |
6d2010ae A |
291 | #else |
292 | static inline uintptr_t get_cr3_base(void) | |
293 | { | |
294 | register uintptr_t cr3; | |
295 | __asm__ volatile("mov %%cr3, %0" : "=r" (cr3)); | |
296 | return(cr3 & ~(0xFFFULL)); | |
297 | } | |
298 | ||
299 | static inline void set_cr3_composed(uintptr_t base, uint16_t pcid, uint32_t preserve) | |
300 | { | |
301 | __asm__ volatile("mov %0, %%cr3" : : "r" (base | pcid | ( ( (uint64_t)preserve) << 63) ) ); | |
302 | } | |
1c79356b | 303 | |
6d2010ae | 304 | #endif |
b0d623f7 | 305 | static inline uintptr_t get_cr4(void) |
0c530ab8 | 306 | { |
b0d623f7 | 307 | uintptr_t cr4; |
0c530ab8 A |
308 | __asm__ volatile("mov %%cr4, %0" : "=r" (cr4)); |
309 | return(cr4); | |
310 | } | |
311 | ||
b0d623f7 | 312 | static inline void set_cr4(uintptr_t value) |
0c530ab8 A |
313 | { |
314 | __asm__ volatile("mov %0, %%cr4" : : "r" (value)); | |
315 | } | |
91447636 | 316 | |
6d2010ae A |
317 | static inline uintptr_t x86_get_flags(void) |
318 | { | |
319 | uintptr_t erflags; | |
320 | __asm__ volatile("pushf; pop %0" : "=r" (erflags)); | |
321 | return erflags; | |
322 | } | |
323 | ||
91447636 | 324 | static inline void clear_ts(void) |
1c79356b A |
325 | { |
326 | __asm__ volatile("clts"); | |
327 | } | |
328 | ||
91447636 | 329 | static inline unsigned short get_tr(void) |
1c79356b A |
330 | { |
331 | unsigned short seg; | |
332 | __asm__ volatile("str %0" : "=rm" (seg)); | |
333 | return(seg); | |
334 | } | |
335 | ||
91447636 | 336 | static inline void set_tr(unsigned int seg) |
1c79356b A |
337 | { |
338 | __asm__ volatile("ltr %0" : : "rm" ((unsigned short)(seg))); | |
339 | } | |
340 | ||
0c530ab8 | 341 | static inline unsigned short sldt(void) |
1c79356b A |
342 | { |
343 | unsigned short seg; | |
344 | __asm__ volatile("sldt %0" : "=rm" (seg)); | |
345 | return(seg); | |
346 | } | |
347 | ||
0c530ab8 | 348 | static inline void lldt(unsigned int seg) |
1c79356b A |
349 | { |
350 | __asm__ volatile("lldt %0" : : "rm" ((unsigned short)(seg))); | |
351 | } | |
352 | ||
b0d623f7 A |
353 | static inline void lgdt(uintptr_t *desc) |
354 | { | |
355 | __asm__ volatile("lgdt %0" : : "m" (*desc)); | |
356 | } | |
357 | ||
358 | static inline void lidt(uintptr_t *desc) | |
359 | { | |
360 | __asm__ volatile("lidt %0" : : "m" (*desc)); | |
361 | } | |
362 | ||
363 | static inline void swapgs(void) | |
364 | { | |
365 | __asm__ volatile("swapgs"); | |
366 | } | |
367 | ||
0c530ab8 | 368 | #ifdef MACH_KERNEL_PRIVATE |
b0d623f7 | 369 | |
b0d623f7 A |
370 | #ifdef __i386__ |
371 | ||
372 | #include <i386/cpu_data.h> | |
373 | ||
374 | extern void cpuid64(uint32_t); | |
0c530ab8 | 375 | extern void flush_tlb64(void); |
2d21ac55 A |
376 | extern uint64_t get64_cr3(void); |
377 | extern void set64_cr3(uint64_t); | |
91447636 | 378 | static inline void flush_tlb(void) |
1c79356b | 379 | { |
0c530ab8 A |
380 | if (cpu_mode_is64bit()) { |
381 | flush_tlb64(); | |
b0d623f7 A |
382 | } else { |
383 | set_cr3(get_cr3()); | |
0c530ab8 | 384 | } |
1c79356b | 385 | } |
6d2010ae A |
386 | static inline void flush_tlb_raw(void) |
387 | { | |
388 | flush_tlb(); | |
389 | } | |
390 | ||
b0d623f7 | 391 | #elif defined(__x86_64__) |
6d2010ae | 392 | static inline void flush_tlb_raw(void) |
b0d623f7 | 393 | { |
6d2010ae | 394 | set_cr3_raw(get_cr3_raw()); |
b0d623f7 | 395 | } |
b0d623f7 | 396 | #endif |
0c530ab8 | 397 | #endif /* MACH_KERNEL_PRIVATE */ |
1c79356b | 398 | |
91447636 A |
399 | static inline void wbinvd(void) |
400 | { | |
401 | __asm__ volatile("wbinvd"); | |
402 | } | |
403 | ||
b0d623f7 | 404 | static inline void invlpg(uintptr_t addr) |
1c79356b A |
405 | { |
406 | __asm__ volatile("invlpg (%0)" :: "r" (addr) : "memory"); | |
407 | } | |
55e303ae A |
408 | |
409 | /* | |
410 | * Access to machine-specific registers (available on 586 and better only) | |
411 | * Note: the rd* operations modify the parameters directly (without using | |
412 | * pointer indirection), this allows gcc to optimize better | |
413 | */ | |
414 | ||
415 | #define rdmsr(msr,lo,hi) \ | |
416 | __asm__ volatile("rdmsr" : "=a" (lo), "=d" (hi) : "c" (msr)) | |
417 | ||
418 | #define wrmsr(msr,lo,hi) \ | |
419 | __asm__ volatile("wrmsr" : : "c" (msr), "a" (lo), "d" (hi)) | |
420 | ||
421 | #define rdtsc(lo,hi) \ | |
c910b4d9 | 422 | __asm__ volatile("lfence; rdtsc; lfence" : "=a" (lo), "=d" (hi)) |
55e303ae A |
423 | |
424 | #define write_tsc(lo,hi) wrmsr(0x10, lo, hi) | |
425 | ||
426 | #define rdpmc(counter,lo,hi) \ | |
427 | __asm__ volatile("rdpmc" : "=a" (lo), "=d" (hi) : "c" (counter)) | |
428 | ||
b0d623f7 A |
429 | #ifdef __i386__ |
430 | ||
91447636 | 431 | static inline uint64_t rdmsr64(uint32_t msr) |
55e303ae A |
432 | { |
433 | uint64_t ret; | |
434 | __asm__ volatile("rdmsr" : "=A" (ret) : "c" (msr)); | |
435 | return ret; | |
436 | } | |
437 | ||
91447636 | 438 | static inline void wrmsr64(uint32_t msr, uint64_t val) |
55e303ae A |
439 | { |
440 | __asm__ volatile("wrmsr" : : "c" (msr), "A" (val)); | |
441 | } | |
442 | ||
91447636 | 443 | static inline uint64_t rdtsc64(void) |
55e303ae A |
444 | { |
445 | uint64_t ret; | |
c910b4d9 A |
446 | __asm__ volatile("lfence; rdtsc; lfence" : "=A" (ret)); |
447 | return ret; | |
448 | } | |
449 | ||
450 | static inline uint64_t rdtscp64(uint32_t *aux) | |
451 | { | |
452 | uint64_t ret; | |
453 | __asm__ volatile("rdtscp; mov %%ecx, %1" | |
454 | : "=A" (ret), "=m" (*aux) | |
455 | : | |
456 | : "ecx"); | |
55e303ae A |
457 | return ret; |
458 | } | |
91447636 | 459 | |
b0d623f7 A |
460 | #elif defined(__x86_64__) |
461 | ||
462 | static inline uint64_t rdmsr64(uint32_t msr) | |
463 | { | |
464 | uint32_t lo=0, hi=0; | |
465 | rdmsr(msr, lo, hi); | |
466 | return (((uint64_t)hi) << 32) | ((uint64_t)lo); | |
467 | } | |
468 | ||
469 | static inline void wrmsr64(uint32_t msr, uint64_t val) | |
470 | { | |
471 | wrmsr(msr, (val & 0xFFFFFFFFUL), ((val >> 32) & 0xFFFFFFFFUL)); | |
472 | } | |
473 | ||
474 | static inline uint64_t rdtsc64(void) | |
475 | { | |
6d2010ae | 476 | uint64_t lo, hi; |
b0d623f7 | 477 | rdtsc(lo, hi); |
6d2010ae | 478 | return ((hi) << 32) | (lo); |
b0d623f7 A |
479 | } |
480 | ||
481 | static inline uint64_t rdtscp64(uint32_t *aux) | |
482 | { | |
6d2010ae | 483 | uint64_t lo, hi; |
b0d623f7 A |
484 | __asm__ volatile("rdtscp; mov %%ecx, %1" |
485 | : "=a" (lo), "=d" (hi), "=m" (*aux) | |
486 | : | |
487 | : "ecx"); | |
6d2010ae | 488 | return ((hi) << 32) | (lo); |
b0d623f7 A |
489 | } |
490 | ||
491 | #else | |
492 | #error Unsupported architecture | |
493 | #endif | |
494 | ||
91447636 A |
495 | /* |
496 | * rdmsr_carefully() returns 0 when the MSR has been read successfully, | |
497 | * or non-zero (1) if the MSR does not exist. | |
498 | * The implementation is in locore.s. | |
499 | */ | |
500 | extern int rdmsr_carefully(uint32_t msr, uint32_t *lo, uint32_t *hi); | |
501 | ||
502 | __END_DECLS | |
503 | ||
1c79356b A |
504 | #endif /* ASSEMBLER */ |
505 | ||
060df5ea A |
506 | #define MSR_IA32_P5_MC_ADDR 0 |
507 | #define MSR_IA32_P5_MC_TYPE 1 | |
508 | #define MSR_IA32_PLATFORM_ID 0x17 | |
509 | #define MSR_IA32_EBL_CR_POWERON 0x2a | |
510 | ||
511 | #define MSR_IA32_APIC_BASE 0x1b | |
512 | #define MSR_IA32_APIC_BASE_BSP (1<<8) | |
513 | #define MSR_IA32_APIC_BASE_EXTENDED (1<<10) | |
514 | #define MSR_IA32_APIC_BASE_ENABLE (1<<11) | |
515 | #define MSR_IA32_APIC_BASE_BASE (0xfffff<<12) | |
516 | ||
517 | #define MSR_CORE_THREAD_COUNT 0x35 | |
518 | ||
519 | #define MSR_IA32_FEATURE_CONTROL 0x3a | |
520 | #define MSR_IA32_FEATCTL_LOCK (1<<0) | |
521 | #define MSR_IA32_FEATCTL_VMXON_SMX (1<<1) | |
522 | #define MSR_IA32_FEATCTL_VMXON (1<<2) | |
523 | #define MSR_IA32_FEATCTL_CSTATE_SMI (1<<16) | |
524 | ||
525 | #define MSR_IA32_UPDT_TRIG 0x79 | |
526 | #define MSR_IA32_BIOS_SIGN_ID 0x8b | |
527 | #define MSR_IA32_UCODE_WRITE MSR_IA32_UPDT_TRIG | |
528 | #define MSR_IA32_UCODE_REV MSR_IA32_BIOS_SIGN_ID | |
529 | ||
530 | #define MSR_IA32_PERFCTR0 0xc1 | |
531 | #define MSR_IA32_PERFCTR1 0xc2 | |
532 | ||
533 | #define MSR_PLATFORM_INFO 0xce | |
534 | ||
535 | #define MSR_PMG_CST_CONFIG_CONTROL 0xe2 | |
536 | ||
537 | #define MSR_IA32_BBL_CR_CTL 0x119 | |
538 | ||
539 | #define MSR_IA32_SYSENTER_CS 0x174 | |
540 | #define MSR_IA32_SYSENTER_ESP 0x175 | |
541 | #define MSR_IA32_SYSENTER_EIP 0x176 | |
542 | ||
543 | #define MSR_IA32_MCG_CAP 0x179 | |
544 | #define MSR_IA32_MCG_STATUS 0x17a | |
545 | #define MSR_IA32_MCG_CTL 0x17b | |
546 | ||
547 | #define MSR_IA32_EVNTSEL0 0x186 | |
548 | #define MSR_IA32_EVNTSEL1 0x187 | |
549 | ||
550 | #define MSR_FLEX_RATIO 0x194 | |
551 | #define MSR_IA32_PERF_STS 0x198 | |
552 | #define MSR_IA32_PERF_CTL 0x199 | |
553 | #define MSR_IA32_CLOCK_MODULATION 0x19a | |
554 | ||
555 | #define MSR_IA32_MISC_ENABLE 0x1a0 | |
556 | ||
557 | #define MSR_IA32_ENERGY_PERFORMANCE_BIAS 0x1b0 | |
558 | #define MSR_IA32_PACKAGE_THERM_STATUS 0x1b1 | |
559 | #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x1b2 | |
560 | ||
561 | #define MSR_IA32_DEBUGCTLMSR 0x1d9 | |
562 | #define MSR_IA32_LASTBRANCHFROMIP 0x1db | |
563 | #define MSR_IA32_LASTBRANCHTOIP 0x1dc | |
564 | #define MSR_IA32_LASTINTFROMIP 0x1dd | |
565 | #define MSR_IA32_LASTINTTOIP 0x1de | |
566 | ||
567 | #define MSR_IA32_CR_PAT 0x277 | |
568 | ||
569 | #define MSR_IA32_MTRRCAP 0xfe | |
570 | #define MSR_IA32_MTRR_DEF_TYPE 0x2ff | |
571 | #define MSR_IA32_MTRR_PHYSBASE(n) (0x200 + 2*(n)) | |
572 | #define MSR_IA32_MTRR_PHYSMASK(n) (0x200 + 2*(n) + 1) | |
573 | #define MSR_IA32_MTRR_FIX64K_00000 0x250 | |
574 | #define MSR_IA32_MTRR_FIX16K_80000 0x258 | |
575 | #define MSR_IA32_MTRR_FIX16K_A0000 0x259 | |
576 | #define MSR_IA32_MTRR_FIX4K_C0000 0x268 | |
577 | #define MSR_IA32_MTRR_FIX4K_C8000 0x269 | |
578 | #define MSR_IA32_MTRR_FIX4K_D0000 0x26a | |
579 | #define MSR_IA32_MTRR_FIX4K_D8000 0x26b | |
580 | #define MSR_IA32_MTRR_FIX4K_E0000 0x26c | |
581 | #define MSR_IA32_MTRR_FIX4K_E8000 0x26d | |
582 | #define MSR_IA32_MTRR_FIX4K_F0000 0x26e | |
583 | #define MSR_IA32_MTRR_FIX4K_F8000 0x26f | |
584 | ||
585 | #define MSR_IA32_MC0_CTL 0x400 | |
586 | #define MSR_IA32_MC0_STATUS 0x401 | |
587 | #define MSR_IA32_MC0_ADDR 0x402 | |
588 | #define MSR_IA32_MC0_MISC 0x403 | |
589 | ||
590 | #define MSR_IA32_VMX_BASE 0x480 | |
591 | #define MSR_IA32_VMX_BASIC MSR_IA32_VMX_BASE | |
2d21ac55 | 592 | #define MSR_IA32_VMXPINBASED_CTLS MSR_IA32_VMX_BASE+1 |
060df5ea A |
593 | #define MSR_IA32_PROCBASED_CTLS MSR_IA32_VMX_BASE+2 |
594 | #define MSR_IA32_VMX_EXIT_CTLS MSR_IA32_VMX_BASE+3 | |
595 | #define MSR_IA32_VMX_ENTRY_CTLS MSR_IA32_VMX_BASE+4 | |
596 | #define MSR_IA32_VMX_MISC MSR_IA32_VMX_BASE+5 | |
597 | #define MSR_IA32_VMX_CR0_FIXED0 MSR_IA32_VMX_BASE+6 | |
598 | #define MSR_IA32_VMX_CR0_FIXED1 MSR_IA32_VMX_BASE+7 | |
599 | #define MSR_IA32_VMX_CR4_FIXED0 MSR_IA32_VMX_BASE+8 | |
600 | #define MSR_IA32_VMX_CR4_FIXED1 MSR_IA32_VMX_BASE+9 | |
601 | ||
602 | #define MSR_IA32_DS_AREA 0x600 | |
603 | ||
604 | #define MSR_IA32_PACKAGE_POWER_SKU_UNIT 0x606 | |
605 | #define MSR_IA32_PACKAGE_ENERY_STATUS 0x611 | |
606 | #define MSR_IA32_PRIMARY_PLANE_ENERY_STATUS 0x639 | |
607 | #define MSR_IA32_SECONDARY_PLANE_ENERY_STATUS 0x641 | |
608 | #define MSR_IA32_TSC_DEADLINE 0x6e0 | |
609 | ||
610 | #define MSR_IA32_EFER 0xC0000080 | |
611 | #define MSR_IA32_EFER_SCE 0x00000001 | |
612 | #define MSR_IA32_EFER_LME 0x00000100 | |
613 | #define MSR_IA32_EFER_LMA 0x00000400 | |
614 | #define MSR_IA32_EFER_NXE 0x00000800 | |
615 | ||
616 | #define MSR_IA32_STAR 0xC0000081 | |
617 | #define MSR_IA32_LSTAR 0xC0000082 | |
618 | #define MSR_IA32_CSTAR 0xC0000083 | |
619 | #define MSR_IA32_FMASK 0xC0000084 | |
620 | ||
621 | #define MSR_IA32_FS_BASE 0xC0000100 | |
622 | #define MSR_IA32_GS_BASE 0xC0000101 | |
623 | #define MSR_IA32_KERNEL_GS_BASE 0xC0000102 | |
624 | #define MSR_IA32_TSC_AUX 0xC0000103 | |
c910b4d9 | 625 | |
1c79356b | 626 | #endif /* _I386_PROC_REG_H_ */ |