]> git.saurik.com Git - apple/xnu.git/blame - osfmk/i386/proc_reg.h
xnu-1699.24.8.tar.gz
[apple/xnu.git] / osfmk / i386 / proc_reg.h
CommitLineData
1c79356b 1/*
6d2010ae 2 * Copyright (c) 2000-2010 Apple Inc. All rights reserved.
1c79356b 3 *
2d21ac55 4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
1c79356b 5 *
2d21ac55
A
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
8f6c56a5 14 *
2d21ac55
A
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
8f6c56a5
A
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
2d21ac55
A
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
8f6c56a5 25 *
2d21ac55 26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
1c79356b
A
27 */
28/*
29 * @OSF_COPYRIGHT@
30 */
31/* CMU_ENDHIST */
32/*
33 * Mach Operating System
34 * Copyright (c) 1991,1990 Carnegie Mellon University
35 * All Rights Reserved.
36 *
37 * Permission to use, copy, modify and distribute this software and its
38 * documentation is hereby granted, provided that both the copyright
39 * notice and this permission notice appear in all copies of the
40 * software, derivative works or modified versions, and any portions
41 * thereof, and that both notices appear in supporting documentation.
42 *
43 * CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
44 * CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
45 * ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
46 *
47 * Carnegie Mellon requests users of this software to return to
48 *
49 * Software Distribution Coordinator or Software.Distribution@CS.CMU.EDU
50 * School of Computer Science
51 * Carnegie Mellon University
52 * Pittsburgh PA 15213-3890
53 *
54 * any improvements or extensions that they make and grant Carnegie Mellon
55 * the rights to redistribute these changes.
56 */
57
58/*
59 */
60
61/*
62 * Processor registers for i386 and i486.
63 */
64#ifndef _I386_PROC_REG_H_
65#define _I386_PROC_REG_H_
66
67/*
68 * Model Specific Registers
69 */
70#define MSR_P5_TSC 0x10 /* Time Stamp Register */
71#define MSR_P5_CESR 0x11 /* Control and Event Select Register */
72#define MSR_P5_CTR0 0x12 /* Counter #0 */
73#define MSR_P5_CTR1 0x13 /* Counter #1 */
74
75#define MSR_P5_CESR_PC 0x0200 /* Pin Control */
76#define MSR_P5_CESR_CC 0x01C0 /* Counter Control mask */
77#define MSR_P5_CESR_ES 0x003F /* Event Control mask */
78
79#define MSR_P5_CESR_SHIFT 16 /* Shift to get Counter 1 */
80#define MSR_P5_CESR_MASK (MSR_P5_CESR_PC|\
81 MSR_P5_CESR_CC|\
82 MSR_P5_CESR_ES) /* Mask Counter */
83
84#define MSR_P5_CESR_CC_CLOCK 0x0100 /* Clock Counting (otherwise Event) */
85#define MSR_P5_CESR_CC_DISABLE 0x0000 /* Disable counter */
86#define MSR_P5_CESR_CC_CPL012 0x0040 /* Count if the CPL == 0, 1, 2 */
87#define MSR_P5_CESR_CC_CPL3 0x0080 /* Count if the CPL == 3 */
88#define MSR_P5_CESR_CC_CPL 0x00C0 /* Count regardless of the CPL */
89
90#define MSR_P5_CESR_ES_DATA_READ 0x000000 /* Data Read */
91#define MSR_P5_CESR_ES_DATA_WRITE 0x000001 /* Data Write */
92#define MSR_P5_CESR_ES_DATA_RW 0x101000 /* Data Read or Write */
93#define MSR_P5_CESR_ES_DATA_TLB_MISS 0x000010 /* Data TLB Miss */
94#define MSR_P5_CESR_ES_DATA_READ_MISS 0x000011 /* Data Read Miss */
95#define MSR_P5_CESR_ES_DATA_WRITE_MISS 0x000100 /* Data Write Miss */
96#define MSR_P5_CESR_ES_DATA_RW_MISS 0x101001 /* Data Read or Write Miss */
97#define MSR_P5_CESR_ES_HIT_EM 0x000101 /* Write (hit) to M|E state */
98#define MSR_P5_CESR_ES_DATA_CACHE_WB 0x000110 /* Cache lines written back */
99#define MSR_P5_CESR_ES_EXTERNAL_SNOOP 0x000111 /* External Snoop */
100#define MSR_P5_CESR_ES_CACHE_SNOOP_HIT 0x001000 /* Data cache snoop hits */
101#define MSR_P5_CESR_ES_MEM_ACCESS_PIPE 0x001001 /* Mem. access in both pipes */
102#define MSR_P5_CESR_ES_BANK_CONFLICTS 0x001010 /* Bank conflicts */
103#define MSR_P5_CESR_ES_MISALIGNED 0x001011 /* Misaligned Memory or I/O */
104#define MSR_P5_CESR_ES_CODE_READ 0x001100 /* Code Read */
105#define MSR_P5_CESR_ES_CODE_TLB_MISS 0x001101 /* Code TLB miss */
106#define MSR_P5_CESR_ES_CODE_CACHE_MISS 0x001110 /* Code Cache miss */
107#define MSR_P5_CESR_ES_SEGMENT_LOADED 0x001111 /* Any segment reg. loaded */
108#define MSR_P5_CESR_ES_BRANCHE 0x010010 /* Branches */
109#define MSR_P5_CESR_ES_BTB_HIT 0x010011 /* BTB Hits */
110#define MSR_P5_CESR_ES_BRANCHE_BTB 0x010100 /* Taken branch or BTB Hit */
111#define MSR_P5_CESR_ES_PIPELINE_FLUSH 0x010101 /* Pipeline Flushes */
112#define MSR_P5_CESR_ES_INSTRUCTION 0x010110 /* Instruction executed */
113#define MSR_P5_CESR_ES_INSTRUCTION_V 0x010111 /* Inst. executed (v-pipe) */
114#define MSR_P5_CESR_ES_BUS_CYCLE 0x011000 /* Clocks while bus cycle */
115#define MSR_P5_CESR_ES_FULL_WRITE_BUF 0x011001 /* Clocks while full wrt buf. */
116#define MSR_P5_CESR_ES_DATA_MEM_READ 0x011010 /* Pipeline waiting for read */
117#define MSR_P5_CESR_ES_WRITE_EM 0x011011 /* Stall on write E|M state */
118#define MSR_P5_CESR_ES_LOCKED_CYCLE 0x011100 /* Locked bus cycles */
119#define MSR_P5_CESR_ES_IO_CYCLE 0x011101 /* I/O Read or Write cycles */
120#define MSR_P5_CESR_ES_NON_CACHEABLE 0x011110 /* Non-cacheable Mem. read */
121#define MSR_P5_CESR_ES_AGI 0x011111 /* Stall because of AGI */
122#define MSR_P5_CESR_ES_FLOP 0x100010 /* Floating Point operations */
123#define MSR_P5_CESR_ES_BREAK_DR0 0x100011 /* Breakpoint matches on DR0 */
124#define MSR_P5_CESR_ES_BREAK_DR1 0x100100 /* Breakpoint matches on DR1 */
125#define MSR_P5_CESR_ES_BREAK_DR2 0x100101 /* Breakpoint matches on DR2 */
126#define MSR_P5_CESR_ES_BREAK_DR3 0x100110 /* Breakpoint matches on DR3 */
127#define MSR_P5_CESR_ES_HARDWARE_IT 0x100111 /* Hardware interrupts */
128
129/*
130 * CR0
131 */
132#define CR0_PG 0x80000000 /* Enable paging */
133#define CR0_CD 0x40000000 /* i486: Cache disable */
134#define CR0_NW 0x20000000 /* i486: No write-through */
135#define CR0_AM 0x00040000 /* i486: Alignment check mask */
136#define CR0_WP 0x00010000 /* i486: Write-protect kernel access */
137#define CR0_NE 0x00000020 /* i486: Handle numeric exceptions */
138#define CR0_ET 0x00000010 /* Extension type is 80387 */
139 /* (not official) */
140#define CR0_TS 0x00000008 /* Task switch */
141#define CR0_EM 0x00000004 /* Emulate coprocessor */
142#define CR0_MP 0x00000002 /* Monitor coprocessor */
143#define CR0_PE 0x00000001 /* Enable protected mode */
144
145/*
146 * CR4
147 */
060df5ea 148#define CR4_OSXSAVE 0x00040000 /* OS supports XSAVE */
6d2010ae 149#define CR4_PCIDE 0x00020000 /* PCID Enable */
060df5ea
A
150#define CR4_SMXE 0x00004000 /* Enable SMX operation */
151#define CR4_VMXE 0x00002000 /* Enable VMX operation */
152#define CR4_OSXMM 0x00000400 /* SSE/SSE2 exceptions supported in OS */
153#define CR4_OSFXS 0x00000200 /* SSE/SSE2 OS supports FXSave */
154#define CR4_PCE 0x00000100 /* Performance-Monitor Count Enable */
155#define CR4_PGE 0x00000080 /* Page Global Enable */
156#define CR4_MCE 0x00000040 /* Machine Check Exceptions */
157#define CR4_PAE 0x00000020 /* Physical Address Extensions */
158#define CR4_PSE 0x00000010 /* Page Size Extensions */
159#define CR4_DE 0x00000008 /* Debugging Extensions */
160#define CR4_TSD 0x00000004 /* Time Stamp Disable */
161#define CR4_PVI 0x00000002 /* Protected-mode Virtual Interrupts */
162#define CR4_VME 0x00000001 /* Virtual-8086 Mode Extensions */
1c79356b 163
060df5ea
A
164/*
165 * XCR0 - XFEATURE_ENABLED_MASK (a.k.a. XFEM) register
166 */
167#define XCR0_YMM 0x0000000000000004ULL /* YMM state available */
168#define XFEM_YMM XCR0_YMM
169#define XCR0_SSE 0x0000000000000002ULL /* SSE supported by XSAVE/XRESTORE */
170#define XCR0_X87 0x0000000000000001ULL /* x87, FPU/MMX (always set) */
171#define XFEM_SSE XCR0_SSE
172#define XFEM_X87 XCR0_X87
173#define XCR0 (0)
6d2010ae
A
174
175#define PMAP_PCID_PRESERVE (1ULL << 63)
176#define PMAP_PCID_MASK (0xFFF)
1c79356b 177#ifndef ASSEMBLER
91447636
A
178
179#include <sys/cdefs.h>
b0d623f7
A
180#include <stdint.h>
181
91447636 182__BEGIN_DECLS
1c79356b 183
b0d623f7 184#define set_ts() set_cr0(get_cr0() | CR0_TS)
1c79356b 185
6d2010ae
A
186static inline uint16_t get_es(void)
187{
188 uint16_t es;
189 __asm__ volatile("mov %%es, %0" : "=r" (es));
190 return es;
191}
192
193static inline void set_es(uint16_t es)
194{
195 __asm__ volatile("mov %0, %%es" : : "r" (es));
196}
197
198static inline uint16_t get_ds(void)
199{
200 uint16_t ds;
201 __asm__ volatile("mov %%ds, %0" : "=r" (ds));
202 return ds;
203}
204
205static inline void set_ds(uint16_t ds)
206{
207 __asm__ volatile("mov %0, %%ds" : : "r" (ds));
208}
209
210static inline uint16_t get_fs(void)
211{
212 uint16_t fs;
213 __asm__ volatile("mov %%fs, %0" : "=r" (fs));
214 return fs;
215}
216
217static inline void set_fs(uint16_t fs)
218{
219 __asm__ volatile("mov %0, %%fs" : : "r" (fs));
220}
221
222static inline uint16_t get_gs(void)
223{
224 uint16_t gs;
225 __asm__ volatile("mov %%gs, %0" : "=r" (gs));
226 return gs;
227}
228
229static inline void set_gs(uint16_t gs)
230{
231 __asm__ volatile("mov %0, %%gs" : : "r" (gs));
232}
233
234static inline uint16_t get_ss(void)
235{
236 uint16_t ss;
237 __asm__ volatile("mov %%ss, %0" : "=r" (ss));
238 return ss;
239}
240
241static inline void set_ss(uint16_t ss)
242{
243 __asm__ volatile("mov %0, %%ss" : : "r" (ss));
244}
245
b0d623f7 246static inline uintptr_t get_cr0(void)
1c79356b 247{
b0d623f7 248 uintptr_t cr0;
1c79356b
A
249 __asm__ volatile("mov %%cr0, %0" : "=r" (cr0));
250 return(cr0);
251}
252
b0d623f7 253static inline void set_cr0(uintptr_t value)
1c79356b
A
254{
255 __asm__ volatile("mov %0, %%cr0" : : "r" (value));
256}
257
b0d623f7 258static inline uintptr_t get_cr2(void)
1c79356b 259{
b0d623f7 260 uintptr_t cr2;
1c79356b
A
261 __asm__ volatile("mov %%cr2, %0" : "=r" (cr2));
262 return(cr2);
263}
264
6d2010ae
A
265static inline uintptr_t get_cr3_raw(void)
266{
267 register uintptr_t cr3;
268 __asm__ volatile("mov %%cr3, %0" : "=r" (cr3));
269 return(cr3);
270}
271
272static inline void set_cr3_raw(uintptr_t value)
273{
274 __asm__ volatile("mov %0, %%cr3" : : "r" (value));
275}
276
277#if defined(__i386__)
b0d623f7 278static inline uintptr_t get_cr3(void)
1c79356b 279{
b0d623f7 280 register uintptr_t cr3;
1c79356b
A
281 __asm__ volatile("mov %%cr3, %0" : "=r" (cr3));
282 return(cr3);
283}
284
b0d623f7 285static inline void set_cr3(uintptr_t value)
1c79356b
A
286{
287 __asm__ volatile("mov %0, %%cr3" : : "r" (value));
288}
6d2010ae
A
289#else
290static inline uintptr_t get_cr3_base(void)
291{
292 register uintptr_t cr3;
293 __asm__ volatile("mov %%cr3, %0" : "=r" (cr3));
294 return(cr3 & ~(0xFFFULL));
295}
296
297static inline void set_cr3_composed(uintptr_t base, uint16_t pcid, uint32_t preserve)
298{
299 __asm__ volatile("mov %0, %%cr3" : : "r" (base | pcid | ( ( (uint64_t)preserve) << 63) ) );
300}
1c79356b 301
6d2010ae 302#endif
b0d623f7 303static inline uintptr_t get_cr4(void)
0c530ab8 304{
b0d623f7 305 uintptr_t cr4;
0c530ab8
A
306 __asm__ volatile("mov %%cr4, %0" : "=r" (cr4));
307 return(cr4);
308}
309
b0d623f7 310static inline void set_cr4(uintptr_t value)
0c530ab8
A
311{
312 __asm__ volatile("mov %0, %%cr4" : : "r" (value));
313}
91447636 314
6d2010ae
A
315static inline uintptr_t x86_get_flags(void)
316{
317 uintptr_t erflags;
318 __asm__ volatile("pushf; pop %0" : "=r" (erflags));
319 return erflags;
320}
321
91447636 322static inline void clear_ts(void)
1c79356b
A
323{
324 __asm__ volatile("clts");
325}
326
91447636 327static inline unsigned short get_tr(void)
1c79356b
A
328{
329 unsigned short seg;
330 __asm__ volatile("str %0" : "=rm" (seg));
331 return(seg);
332}
333
91447636 334static inline void set_tr(unsigned int seg)
1c79356b
A
335{
336 __asm__ volatile("ltr %0" : : "rm" ((unsigned short)(seg)));
337}
338
0c530ab8 339static inline unsigned short sldt(void)
1c79356b
A
340{
341 unsigned short seg;
342 __asm__ volatile("sldt %0" : "=rm" (seg));
343 return(seg);
344}
345
0c530ab8 346static inline void lldt(unsigned int seg)
1c79356b
A
347{
348 __asm__ volatile("lldt %0" : : "rm" ((unsigned short)(seg)));
349}
350
b0d623f7
A
351static inline void lgdt(uintptr_t *desc)
352{
353 __asm__ volatile("lgdt %0" : : "m" (*desc));
354}
355
356static inline void lidt(uintptr_t *desc)
357{
358 __asm__ volatile("lidt %0" : : "m" (*desc));
359}
360
361static inline void swapgs(void)
362{
363 __asm__ volatile("swapgs");
364}
365
0c530ab8 366#ifdef MACH_KERNEL_PRIVATE
b0d623f7 367
b0d623f7
A
368#ifdef __i386__
369
370#include <i386/cpu_data.h>
371
372extern void cpuid64(uint32_t);
0c530ab8 373extern void flush_tlb64(void);
2d21ac55
A
374extern uint64_t get64_cr3(void);
375extern void set64_cr3(uint64_t);
91447636 376static inline void flush_tlb(void)
1c79356b 377{
0c530ab8
A
378 if (cpu_mode_is64bit()) {
379 flush_tlb64();
b0d623f7
A
380 } else {
381 set_cr3(get_cr3());
0c530ab8 382 }
1c79356b 383}
6d2010ae
A
384static inline void flush_tlb_raw(void)
385{
386 flush_tlb();
387}
388
b0d623f7 389#elif defined(__x86_64__)
6d2010ae 390static inline void flush_tlb_raw(void)
b0d623f7 391{
6d2010ae 392 set_cr3_raw(get_cr3_raw());
b0d623f7 393}
b0d623f7 394#endif
0c530ab8 395#endif /* MACH_KERNEL_PRIVATE */
1c79356b 396
91447636
A
397static inline void wbinvd(void)
398{
399 __asm__ volatile("wbinvd");
400}
401
b0d623f7 402static inline void invlpg(uintptr_t addr)
1c79356b
A
403{
404 __asm__ volatile("invlpg (%0)" :: "r" (addr) : "memory");
405}
55e303ae
A
406
407/*
408 * Access to machine-specific registers (available on 586 and better only)
409 * Note: the rd* operations modify the parameters directly (without using
410 * pointer indirection), this allows gcc to optimize better
411 */
412
413#define rdmsr(msr,lo,hi) \
414 __asm__ volatile("rdmsr" : "=a" (lo), "=d" (hi) : "c" (msr))
415
416#define wrmsr(msr,lo,hi) \
417 __asm__ volatile("wrmsr" : : "c" (msr), "a" (lo), "d" (hi))
418
419#define rdtsc(lo,hi) \
c910b4d9 420 __asm__ volatile("lfence; rdtsc; lfence" : "=a" (lo), "=d" (hi))
55e303ae
A
421
422#define write_tsc(lo,hi) wrmsr(0x10, lo, hi)
423
424#define rdpmc(counter,lo,hi) \
425 __asm__ volatile("rdpmc" : "=a" (lo), "=d" (hi) : "c" (counter))
426
b0d623f7
A
427#ifdef __i386__
428
91447636 429static inline uint64_t rdmsr64(uint32_t msr)
55e303ae
A
430{
431 uint64_t ret;
432 __asm__ volatile("rdmsr" : "=A" (ret) : "c" (msr));
433 return ret;
434}
435
91447636 436static inline void wrmsr64(uint32_t msr, uint64_t val)
55e303ae
A
437{
438 __asm__ volatile("wrmsr" : : "c" (msr), "A" (val));
439}
440
91447636 441static inline uint64_t rdtsc64(void)
55e303ae
A
442{
443 uint64_t ret;
c910b4d9
A
444 __asm__ volatile("lfence; rdtsc; lfence" : "=A" (ret));
445 return ret;
446}
447
448static inline uint64_t rdtscp64(uint32_t *aux)
449{
450 uint64_t ret;
451 __asm__ volatile("rdtscp; mov %%ecx, %1"
452 : "=A" (ret), "=m" (*aux)
453 :
454 : "ecx");
55e303ae
A
455 return ret;
456}
91447636 457
b0d623f7
A
458#elif defined(__x86_64__)
459
460static inline uint64_t rdmsr64(uint32_t msr)
461{
462 uint32_t lo=0, hi=0;
463 rdmsr(msr, lo, hi);
464 return (((uint64_t)hi) << 32) | ((uint64_t)lo);
465}
466
467static inline void wrmsr64(uint32_t msr, uint64_t val)
468{
469 wrmsr(msr, (val & 0xFFFFFFFFUL), ((val >> 32) & 0xFFFFFFFFUL));
470}
471
472static inline uint64_t rdtsc64(void)
473{
6d2010ae 474 uint64_t lo, hi;
b0d623f7 475 rdtsc(lo, hi);
6d2010ae 476 return ((hi) << 32) | (lo);
b0d623f7
A
477}
478
479static inline uint64_t rdtscp64(uint32_t *aux)
480{
6d2010ae 481 uint64_t lo, hi;
b0d623f7
A
482 __asm__ volatile("rdtscp; mov %%ecx, %1"
483 : "=a" (lo), "=d" (hi), "=m" (*aux)
484 :
485 : "ecx");
6d2010ae 486 return ((hi) << 32) | (lo);
b0d623f7
A
487}
488
489#else
490#error Unsupported architecture
491#endif
492
91447636
A
493/*
494 * rdmsr_carefully() returns 0 when the MSR has been read successfully,
495 * or non-zero (1) if the MSR does not exist.
496 * The implementation is in locore.s.
497 */
498extern int rdmsr_carefully(uint32_t msr, uint32_t *lo, uint32_t *hi);
499
500__END_DECLS
501
1c79356b
A
502#endif /* ASSEMBLER */
503
060df5ea
A
504#define MSR_IA32_P5_MC_ADDR 0
505#define MSR_IA32_P5_MC_TYPE 1
506#define MSR_IA32_PLATFORM_ID 0x17
507#define MSR_IA32_EBL_CR_POWERON 0x2a
508
509#define MSR_IA32_APIC_BASE 0x1b
510#define MSR_IA32_APIC_BASE_BSP (1<<8)
511#define MSR_IA32_APIC_BASE_EXTENDED (1<<10)
512#define MSR_IA32_APIC_BASE_ENABLE (1<<11)
513#define MSR_IA32_APIC_BASE_BASE (0xfffff<<12)
514
515#define MSR_CORE_THREAD_COUNT 0x35
516
517#define MSR_IA32_FEATURE_CONTROL 0x3a
518#define MSR_IA32_FEATCTL_LOCK (1<<0)
519#define MSR_IA32_FEATCTL_VMXON_SMX (1<<1)
520#define MSR_IA32_FEATCTL_VMXON (1<<2)
521#define MSR_IA32_FEATCTL_CSTATE_SMI (1<<16)
522
523#define MSR_IA32_UPDT_TRIG 0x79
524#define MSR_IA32_BIOS_SIGN_ID 0x8b
525#define MSR_IA32_UCODE_WRITE MSR_IA32_UPDT_TRIG
526#define MSR_IA32_UCODE_REV MSR_IA32_BIOS_SIGN_ID
527
528#define MSR_IA32_PERFCTR0 0xc1
529#define MSR_IA32_PERFCTR1 0xc2
530
531#define MSR_PLATFORM_INFO 0xce
532
533#define MSR_PMG_CST_CONFIG_CONTROL 0xe2
534
535#define MSR_IA32_BBL_CR_CTL 0x119
536
537#define MSR_IA32_SYSENTER_CS 0x174
538#define MSR_IA32_SYSENTER_ESP 0x175
539#define MSR_IA32_SYSENTER_EIP 0x176
540
541#define MSR_IA32_MCG_CAP 0x179
542#define MSR_IA32_MCG_STATUS 0x17a
543#define MSR_IA32_MCG_CTL 0x17b
544
545#define MSR_IA32_EVNTSEL0 0x186
546#define MSR_IA32_EVNTSEL1 0x187
547
548#define MSR_FLEX_RATIO 0x194
549#define MSR_IA32_PERF_STS 0x198
550#define MSR_IA32_PERF_CTL 0x199
551#define MSR_IA32_CLOCK_MODULATION 0x19a
552
553#define MSR_IA32_MISC_ENABLE 0x1a0
554
555#define MSR_IA32_ENERGY_PERFORMANCE_BIAS 0x1b0
556#define MSR_IA32_PACKAGE_THERM_STATUS 0x1b1
557#define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x1b2
558
559#define MSR_IA32_DEBUGCTLMSR 0x1d9
560#define MSR_IA32_LASTBRANCHFROMIP 0x1db
561#define MSR_IA32_LASTBRANCHTOIP 0x1dc
562#define MSR_IA32_LASTINTFROMIP 0x1dd
563#define MSR_IA32_LASTINTTOIP 0x1de
564
565#define MSR_IA32_CR_PAT 0x277
566
567#define MSR_IA32_MTRRCAP 0xfe
568#define MSR_IA32_MTRR_DEF_TYPE 0x2ff
569#define MSR_IA32_MTRR_PHYSBASE(n) (0x200 + 2*(n))
570#define MSR_IA32_MTRR_PHYSMASK(n) (0x200 + 2*(n) + 1)
571#define MSR_IA32_MTRR_FIX64K_00000 0x250
572#define MSR_IA32_MTRR_FIX16K_80000 0x258
573#define MSR_IA32_MTRR_FIX16K_A0000 0x259
574#define MSR_IA32_MTRR_FIX4K_C0000 0x268
575#define MSR_IA32_MTRR_FIX4K_C8000 0x269
576#define MSR_IA32_MTRR_FIX4K_D0000 0x26a
577#define MSR_IA32_MTRR_FIX4K_D8000 0x26b
578#define MSR_IA32_MTRR_FIX4K_E0000 0x26c
579#define MSR_IA32_MTRR_FIX4K_E8000 0x26d
580#define MSR_IA32_MTRR_FIX4K_F0000 0x26e
581#define MSR_IA32_MTRR_FIX4K_F8000 0x26f
582
583#define MSR_IA32_MC0_CTL 0x400
584#define MSR_IA32_MC0_STATUS 0x401
585#define MSR_IA32_MC0_ADDR 0x402
586#define MSR_IA32_MC0_MISC 0x403
587
588#define MSR_IA32_VMX_BASE 0x480
589#define MSR_IA32_VMX_BASIC MSR_IA32_VMX_BASE
2d21ac55 590#define MSR_IA32_VMXPINBASED_CTLS MSR_IA32_VMX_BASE+1
060df5ea
A
591#define MSR_IA32_PROCBASED_CTLS MSR_IA32_VMX_BASE+2
592#define MSR_IA32_VMX_EXIT_CTLS MSR_IA32_VMX_BASE+3
593#define MSR_IA32_VMX_ENTRY_CTLS MSR_IA32_VMX_BASE+4
594#define MSR_IA32_VMX_MISC MSR_IA32_VMX_BASE+5
595#define MSR_IA32_VMX_CR0_FIXED0 MSR_IA32_VMX_BASE+6
596#define MSR_IA32_VMX_CR0_FIXED1 MSR_IA32_VMX_BASE+7
597#define MSR_IA32_VMX_CR4_FIXED0 MSR_IA32_VMX_BASE+8
598#define MSR_IA32_VMX_CR4_FIXED1 MSR_IA32_VMX_BASE+9
599
600#define MSR_IA32_DS_AREA 0x600
601
602#define MSR_IA32_PACKAGE_POWER_SKU_UNIT 0x606
603#define MSR_IA32_PACKAGE_ENERY_STATUS 0x611
604#define MSR_IA32_PRIMARY_PLANE_ENERY_STATUS 0x639
605#define MSR_IA32_SECONDARY_PLANE_ENERY_STATUS 0x641
606#define MSR_IA32_TSC_DEADLINE 0x6e0
607
608#define MSR_IA32_EFER 0xC0000080
609#define MSR_IA32_EFER_SCE 0x00000001
610#define MSR_IA32_EFER_LME 0x00000100
611#define MSR_IA32_EFER_LMA 0x00000400
612#define MSR_IA32_EFER_NXE 0x00000800
613
614#define MSR_IA32_STAR 0xC0000081
615#define MSR_IA32_LSTAR 0xC0000082
616#define MSR_IA32_CSTAR 0xC0000083
617#define MSR_IA32_FMASK 0xC0000084
618
619#define MSR_IA32_FS_BASE 0xC0000100
620#define MSR_IA32_GS_BASE 0xC0000101
621#define MSR_IA32_KERNEL_GS_BASE 0xC0000102
622#define MSR_IA32_TSC_AUX 0xC0000103
c910b4d9 623
1c79356b 624#endif /* _I386_PROC_REG_H_ */