• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * tools/testing/selftests/kvm/include/x86_64/processor.h
4  *
5  * Copyright (C) 2018, Google LLC.
6  */
7 
8 #ifndef SELFTEST_KVM_PROCESSOR_H
9 #define SELFTEST_KVM_PROCESSOR_H
10 
11 #include <assert.h>
12 #include <stdint.h>
13 
14 #define X86_EFLAGS_FIXED	 (1u << 1)
15 
16 #define X86_CR4_VME		(1ul << 0)
17 #define X86_CR4_PVI		(1ul << 1)
18 #define X86_CR4_TSD		(1ul << 2)
19 #define X86_CR4_DE		(1ul << 3)
20 #define X86_CR4_PSE		(1ul << 4)
21 #define X86_CR4_PAE		(1ul << 5)
22 #define X86_CR4_MCE		(1ul << 6)
23 #define X86_CR4_PGE		(1ul << 7)
24 #define X86_CR4_PCE		(1ul << 8)
25 #define X86_CR4_OSFXSR		(1ul << 9)
26 #define X86_CR4_OSXMMEXCPT	(1ul << 10)
27 #define X86_CR4_UMIP		(1ul << 11)
28 #define X86_CR4_VMXE		(1ul << 13)
29 #define X86_CR4_SMXE		(1ul << 14)
30 #define X86_CR4_FSGSBASE	(1ul << 16)
31 #define X86_CR4_PCIDE		(1ul << 17)
32 #define X86_CR4_OSXSAVE		(1ul << 18)
33 #define X86_CR4_SMEP		(1ul << 20)
34 #define X86_CR4_SMAP		(1ul << 21)
35 #define X86_CR4_PKE		(1ul << 22)
36 
37 /* The enum values match the intruction encoding of each register */
38 enum x86_register {
39 	RAX = 0,
40 	RCX,
41 	RDX,
42 	RBX,
43 	RSP,
44 	RBP,
45 	RSI,
46 	RDI,
47 	R8,
48 	R9,
49 	R10,
50 	R11,
51 	R12,
52 	R13,
53 	R14,
54 	R15,
55 };
56 
57 struct desc64 {
58 	uint16_t limit0;
59 	uint16_t base0;
60 	unsigned base1:8, s:1, type:4, dpl:2, p:1;
61 	unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8;
62 	uint32_t base3;
63 	uint32_t zero1;
64 } __attribute__((packed));
65 
66 struct desc_ptr {
67 	uint16_t size;
68 	uint64_t address;
69 } __attribute__((packed));
70 
get_desc64_base(const struct desc64 * desc)71 static inline uint64_t get_desc64_base(const struct desc64 *desc)
72 {
73 	return ((uint64_t)desc->base3 << 32) |
74 		(desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24));
75 }
76 
rdtsc(void)77 static inline uint64_t rdtsc(void)
78 {
79 	uint32_t eax, edx;
80 
81 	/*
82 	 * The lfence is to wait (on Intel CPUs) until all previous
83 	 * instructions have been executed.
84 	 */
85 	__asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx));
86 	return ((uint64_t)edx) << 32 | eax;
87 }
88 
rdtscp(uint32_t * aux)89 static inline uint64_t rdtscp(uint32_t *aux)
90 {
91 	uint32_t eax, edx;
92 
93 	__asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux));
94 	return ((uint64_t)edx) << 32 | eax;
95 }
96 
rdmsr(uint32_t msr)97 static inline uint64_t rdmsr(uint32_t msr)
98 {
99 	uint32_t a, d;
100 
101 	__asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory");
102 
103 	return a | ((uint64_t) d << 32);
104 }
105 
wrmsr(uint32_t msr,uint64_t value)106 static inline void wrmsr(uint32_t msr, uint64_t value)
107 {
108 	uint32_t a = value;
109 	uint32_t d = value >> 32;
110 
111 	__asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory");
112 }
113 
114 
inw(uint16_t port)115 static inline uint16_t inw(uint16_t port)
116 {
117 	uint16_t tmp;
118 
119 	__asm__ __volatile__("in %%dx, %%ax"
120 		: /* output */ "=a" (tmp)
121 		: /* input */ "d" (port));
122 
123 	return tmp;
124 }
125 
get_es(void)126 static inline uint16_t get_es(void)
127 {
128 	uint16_t es;
129 
130 	__asm__ __volatile__("mov %%es, %[es]"
131 			     : /* output */ [es]"=rm"(es));
132 	return es;
133 }
134 
get_cs(void)135 static inline uint16_t get_cs(void)
136 {
137 	uint16_t cs;
138 
139 	__asm__ __volatile__("mov %%cs, %[cs]"
140 			     : /* output */ [cs]"=rm"(cs));
141 	return cs;
142 }
143 
get_ss(void)144 static inline uint16_t get_ss(void)
145 {
146 	uint16_t ss;
147 
148 	__asm__ __volatile__("mov %%ss, %[ss]"
149 			     : /* output */ [ss]"=rm"(ss));
150 	return ss;
151 }
152 
get_ds(void)153 static inline uint16_t get_ds(void)
154 {
155 	uint16_t ds;
156 
157 	__asm__ __volatile__("mov %%ds, %[ds]"
158 			     : /* output */ [ds]"=rm"(ds));
159 	return ds;
160 }
161 
get_fs(void)162 static inline uint16_t get_fs(void)
163 {
164 	uint16_t fs;
165 
166 	__asm__ __volatile__("mov %%fs, %[fs]"
167 			     : /* output */ [fs]"=rm"(fs));
168 	return fs;
169 }
170 
get_gs(void)171 static inline uint16_t get_gs(void)
172 {
173 	uint16_t gs;
174 
175 	__asm__ __volatile__("mov %%gs, %[gs]"
176 			     : /* output */ [gs]"=rm"(gs));
177 	return gs;
178 }
179 
get_tr(void)180 static inline uint16_t get_tr(void)
181 {
182 	uint16_t tr;
183 
184 	__asm__ __volatile__("str %[tr]"
185 			     : /* output */ [tr]"=rm"(tr));
186 	return tr;
187 }
188 
get_cr0(void)189 static inline uint64_t get_cr0(void)
190 {
191 	uint64_t cr0;
192 
193 	__asm__ __volatile__("mov %%cr0, %[cr0]"
194 			     : /* output */ [cr0]"=r"(cr0));
195 	return cr0;
196 }
197 
get_cr3(void)198 static inline uint64_t get_cr3(void)
199 {
200 	uint64_t cr3;
201 
202 	__asm__ __volatile__("mov %%cr3, %[cr3]"
203 			     : /* output */ [cr3]"=r"(cr3));
204 	return cr3;
205 }
206 
get_cr4(void)207 static inline uint64_t get_cr4(void)
208 {
209 	uint64_t cr4;
210 
211 	__asm__ __volatile__("mov %%cr4, %[cr4]"
212 			     : /* output */ [cr4]"=r"(cr4));
213 	return cr4;
214 }
215 
set_cr4(uint64_t val)216 static inline void set_cr4(uint64_t val)
217 {
218 	__asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory");
219 }
220 
get_gdt_base(void)221 static inline uint64_t get_gdt_base(void)
222 {
223 	struct desc_ptr gdt;
224 	__asm__ __volatile__("sgdt %[gdt]"
225 			     : /* output */ [gdt]"=m"(gdt));
226 	return gdt.address;
227 }
228 
get_idt_base(void)229 static inline uint64_t get_idt_base(void)
230 {
231 	struct desc_ptr idt;
232 	__asm__ __volatile__("sidt %[idt]"
233 			     : /* output */ [idt]"=m"(idt));
234 	return idt.address;
235 }
236 
237 #define SET_XMM(__var, __xmm) \
238 	asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm)
239 
set_xmm(int n,unsigned long val)240 static inline void set_xmm(int n, unsigned long val)
241 {
242 	switch (n) {
243 	case 0:
244 		SET_XMM(val, xmm0);
245 		break;
246 	case 1:
247 		SET_XMM(val, xmm1);
248 		break;
249 	case 2:
250 		SET_XMM(val, xmm2);
251 		break;
252 	case 3:
253 		SET_XMM(val, xmm3);
254 		break;
255 	case 4:
256 		SET_XMM(val, xmm4);
257 		break;
258 	case 5:
259 		SET_XMM(val, xmm5);
260 		break;
261 	case 6:
262 		SET_XMM(val, xmm6);
263 		break;
264 	case 7:
265 		SET_XMM(val, xmm7);
266 		break;
267 	}
268 }
269 
270 typedef unsigned long v1di __attribute__ ((vector_size (8)));
get_xmm(int n)271 static inline unsigned long get_xmm(int n)
272 {
273 	assert(n >= 0 && n <= 7);
274 
275 	register v1di xmm0 __asm__("%xmm0");
276 	register v1di xmm1 __asm__("%xmm1");
277 	register v1di xmm2 __asm__("%xmm2");
278 	register v1di xmm3 __asm__("%xmm3");
279 	register v1di xmm4 __asm__("%xmm4");
280 	register v1di xmm5 __asm__("%xmm5");
281 	register v1di xmm6 __asm__("%xmm6");
282 	register v1di xmm7 __asm__("%xmm7");
283 	switch (n) {
284 	case 0:
285 		return (unsigned long)xmm0;
286 	case 1:
287 		return (unsigned long)xmm1;
288 	case 2:
289 		return (unsigned long)xmm2;
290 	case 3:
291 		return (unsigned long)xmm3;
292 	case 4:
293 		return (unsigned long)xmm4;
294 	case 5:
295 		return (unsigned long)xmm5;
296 	case 6:
297 		return (unsigned long)xmm6;
298 	case 7:
299 		return (unsigned long)xmm7;
300 	}
301 	return 0;
302 }
303 
304 bool is_intel_cpu(void);
305 
306 struct kvm_x86_state;
307 struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid);
308 void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid,
309 		     struct kvm_x86_state *state);
310 
311 struct kvm_msr_list *kvm_get_msr_index_list(void);
312 
313 struct kvm_cpuid2 *kvm_get_supported_cpuid(void);
314 void vcpu_set_cpuid(struct kvm_vm *vm, uint32_t vcpuid,
315 		    struct kvm_cpuid2 *cpuid);
316 
317 struct kvm_cpuid_entry2 *
318 kvm_get_supported_cpuid_index(uint32_t function, uint32_t index);
319 
320 static inline struct kvm_cpuid_entry2 *
kvm_get_supported_cpuid_entry(uint32_t function)321 kvm_get_supported_cpuid_entry(uint32_t function)
322 {
323 	return kvm_get_supported_cpuid_index(function, 0);
324 }
325 
326 uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index);
327 int _vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index,
328 		  uint64_t msr_value);
329 void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index,
330 	  	  uint64_t msr_value);
331 
332 uint32_t kvm_get_cpuid_max_basic(void);
333 uint32_t kvm_get_cpuid_max_extended(void);
334 void kvm_get_cpu_address_width(unsigned int *pa_bits, unsigned int *va_bits);
335 
336 /*
337  * Basic CPU control in CR0
338  */
339 #define X86_CR0_PE          (1UL<<0) /* Protection Enable */
340 #define X86_CR0_MP          (1UL<<1) /* Monitor Coprocessor */
341 #define X86_CR0_EM          (1UL<<2) /* Emulation */
342 #define X86_CR0_TS          (1UL<<3) /* Task Switched */
343 #define X86_CR0_ET          (1UL<<4) /* Extension Type */
344 #define X86_CR0_NE          (1UL<<5) /* Numeric Error */
345 #define X86_CR0_WP          (1UL<<16) /* Write Protect */
346 #define X86_CR0_AM          (1UL<<18) /* Alignment Mask */
347 #define X86_CR0_NW          (1UL<<29) /* Not Write-through */
348 #define X86_CR0_CD          (1UL<<30) /* Cache Disable */
349 #define X86_CR0_PG          (1UL<<31) /* Paging */
350 
351 /*
352  * CPU model specific register (MSR) numbers.
353  */
354 
355 /* x86-64 specific MSRs */
356 #define MSR_EFER		0xc0000080 /* extended feature register */
357 #define MSR_STAR		0xc0000081 /* legacy mode SYSCALL target */
358 #define MSR_LSTAR		0xc0000082 /* long mode SYSCALL target */
359 #define MSR_CSTAR		0xc0000083 /* compat mode SYSCALL target */
360 #define MSR_SYSCALL_MASK	0xc0000084 /* EFLAGS mask for syscall */
361 #define MSR_FS_BASE		0xc0000100 /* 64bit FS base */
362 #define MSR_GS_BASE		0xc0000101 /* 64bit GS base */
363 #define MSR_KERNEL_GS_BASE	0xc0000102 /* SwapGS GS shadow */
364 #define MSR_TSC_AUX		0xc0000103 /* Auxiliary TSC */
365 
366 /* EFER bits: */
367 #define EFER_SCE		(1<<0)  /* SYSCALL/SYSRET */
368 #define EFER_LME		(1<<8)  /* Long mode enable */
369 #define EFER_LMA		(1<<10) /* Long mode active (read-only) */
370 #define EFER_NX			(1<<11) /* No execute enable */
371 #define EFER_SVME		(1<<12) /* Enable virtualization */
372 #define EFER_LMSLE		(1<<13) /* Long Mode Segment Limit Enable */
373 #define EFER_FFXSR		(1<<14) /* Enable Fast FXSAVE/FXRSTOR */
374 
375 /* Intel MSRs. Some also available on other CPUs */
376 
377 #define MSR_PPIN_CTL			0x0000004e
378 #define MSR_PPIN			0x0000004f
379 
380 #define MSR_IA32_PERFCTR0		0x000000c1
381 #define MSR_IA32_PERFCTR1		0x000000c2
382 #define MSR_FSB_FREQ			0x000000cd
383 #define MSR_PLATFORM_INFO		0x000000ce
384 #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT	31
385 #define MSR_PLATFORM_INFO_CPUID_FAULT		BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT)
386 
387 #define MSR_PKG_CST_CONFIG_CONTROL	0x000000e2
388 #define NHM_C3_AUTO_DEMOTE		(1UL << 25)
389 #define NHM_C1_AUTO_DEMOTE		(1UL << 26)
390 #define ATM_LNC_C6_AUTO_DEMOTE		(1UL << 25)
391 #define SNB_C1_AUTO_UNDEMOTE		(1UL << 27)
392 #define SNB_C3_AUTO_UNDEMOTE		(1UL << 28)
393 
394 #define MSR_MTRRcap			0x000000fe
395 #define MSR_IA32_BBL_CR_CTL		0x00000119
396 #define MSR_IA32_BBL_CR_CTL3		0x0000011e
397 
398 #define MSR_IA32_SYSENTER_CS		0x00000174
399 #define MSR_IA32_SYSENTER_ESP		0x00000175
400 #define MSR_IA32_SYSENTER_EIP		0x00000176
401 
402 #define MSR_IA32_MCG_CAP		0x00000179
403 #define MSR_IA32_MCG_STATUS		0x0000017a
404 #define MSR_IA32_MCG_CTL		0x0000017b
405 #define MSR_IA32_MCG_EXT_CTL		0x000004d0
406 
407 #define MSR_OFFCORE_RSP_0		0x000001a6
408 #define MSR_OFFCORE_RSP_1		0x000001a7
409 #define MSR_TURBO_RATIO_LIMIT		0x000001ad
410 #define MSR_TURBO_RATIO_LIMIT1		0x000001ae
411 #define MSR_TURBO_RATIO_LIMIT2		0x000001af
412 
413 #define MSR_LBR_SELECT			0x000001c8
414 #define MSR_LBR_TOS			0x000001c9
415 #define MSR_LBR_NHM_FROM		0x00000680
416 #define MSR_LBR_NHM_TO			0x000006c0
417 #define MSR_LBR_CORE_FROM		0x00000040
418 #define MSR_LBR_CORE_TO			0x00000060
419 
420 #define MSR_LBR_INFO_0			0x00000dc0 /* ... 0xddf for _31 */
421 #define LBR_INFO_MISPRED		BIT_ULL(63)
422 #define LBR_INFO_IN_TX			BIT_ULL(62)
423 #define LBR_INFO_ABORT			BIT_ULL(61)
424 #define LBR_INFO_CYCLES			0xffff
425 
426 #define MSR_IA32_PEBS_ENABLE		0x000003f1
427 #define MSR_IA32_DS_AREA		0x00000600
428 #define MSR_IA32_PERF_CAPABILITIES	0x00000345
429 #define MSR_PEBS_LD_LAT_THRESHOLD	0x000003f6
430 
431 #define MSR_IA32_RTIT_CTL		0x00000570
432 #define MSR_IA32_RTIT_STATUS		0x00000571
433 #define MSR_IA32_RTIT_ADDR0_A		0x00000580
434 #define MSR_IA32_RTIT_ADDR0_B		0x00000581
435 #define MSR_IA32_RTIT_ADDR1_A		0x00000582
436 #define MSR_IA32_RTIT_ADDR1_B		0x00000583
437 #define MSR_IA32_RTIT_ADDR2_A		0x00000584
438 #define MSR_IA32_RTIT_ADDR2_B		0x00000585
439 #define MSR_IA32_RTIT_ADDR3_A		0x00000586
440 #define MSR_IA32_RTIT_ADDR3_B		0x00000587
441 #define MSR_IA32_RTIT_CR3_MATCH		0x00000572
442 #define MSR_IA32_RTIT_OUTPUT_BASE	0x00000560
443 #define MSR_IA32_RTIT_OUTPUT_MASK	0x00000561
444 
445 #define MSR_MTRRfix64K_00000		0x00000250
446 #define MSR_MTRRfix16K_80000		0x00000258
447 #define MSR_MTRRfix16K_A0000		0x00000259
448 #define MSR_MTRRfix4K_C0000		0x00000268
449 #define MSR_MTRRfix4K_C8000		0x00000269
450 #define MSR_MTRRfix4K_D0000		0x0000026a
451 #define MSR_MTRRfix4K_D8000		0x0000026b
452 #define MSR_MTRRfix4K_E0000		0x0000026c
453 #define MSR_MTRRfix4K_E8000		0x0000026d
454 #define MSR_MTRRfix4K_F0000		0x0000026e
455 #define MSR_MTRRfix4K_F8000		0x0000026f
456 #define MSR_MTRRdefType			0x000002ff
457 
458 #define MSR_IA32_CR_PAT			0x00000277
459 
460 #define MSR_IA32_DEBUGCTLMSR		0x000001d9
461 #define MSR_IA32_LASTBRANCHFROMIP	0x000001db
462 #define MSR_IA32_LASTBRANCHTOIP		0x000001dc
463 #define MSR_IA32_LASTINTFROMIP		0x000001dd
464 #define MSR_IA32_LASTINTTOIP		0x000001de
465 
466 /* DEBUGCTLMSR bits (others vary by model): */
467 #define DEBUGCTLMSR_LBR			(1UL <<  0) /* last branch recording */
468 #define DEBUGCTLMSR_BTF_SHIFT		1
469 #define DEBUGCTLMSR_BTF			(1UL <<  1) /* single-step on branches */
470 #define DEBUGCTLMSR_TR			(1UL <<  6)
471 #define DEBUGCTLMSR_BTS			(1UL <<  7)
472 #define DEBUGCTLMSR_BTINT		(1UL <<  8)
473 #define DEBUGCTLMSR_BTS_OFF_OS		(1UL <<  9)
474 #define DEBUGCTLMSR_BTS_OFF_USR		(1UL << 10)
475 #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI	(1UL << 11)
476 #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT	14
477 #define DEBUGCTLMSR_FREEZE_IN_SMM	(1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT)
478 
479 #define MSR_PEBS_FRONTEND		0x000003f7
480 
481 #define MSR_IA32_POWER_CTL		0x000001fc
482 
483 #define MSR_IA32_MC0_CTL		0x00000400
484 #define MSR_IA32_MC0_STATUS		0x00000401
485 #define MSR_IA32_MC0_ADDR		0x00000402
486 #define MSR_IA32_MC0_MISC		0x00000403
487 
488 /* C-state Residency Counters */
489 #define MSR_PKG_C3_RESIDENCY		0x000003f8
490 #define MSR_PKG_C6_RESIDENCY		0x000003f9
491 #define MSR_ATOM_PKG_C6_RESIDENCY	0x000003fa
492 #define MSR_PKG_C7_RESIDENCY		0x000003fa
493 #define MSR_CORE_C3_RESIDENCY		0x000003fc
494 #define MSR_CORE_C6_RESIDENCY		0x000003fd
495 #define MSR_CORE_C7_RESIDENCY		0x000003fe
496 #define MSR_KNL_CORE_C6_RESIDENCY	0x000003ff
497 #define MSR_PKG_C2_RESIDENCY		0x0000060d
498 #define MSR_PKG_C8_RESIDENCY		0x00000630
499 #define MSR_PKG_C9_RESIDENCY		0x00000631
500 #define MSR_PKG_C10_RESIDENCY		0x00000632
501 
502 /* Interrupt Response Limit */
503 #define MSR_PKGC3_IRTL			0x0000060a
504 #define MSR_PKGC6_IRTL			0x0000060b
505 #define MSR_PKGC7_IRTL			0x0000060c
506 #define MSR_PKGC8_IRTL			0x00000633
507 #define MSR_PKGC9_IRTL			0x00000634
508 #define MSR_PKGC10_IRTL			0x00000635
509 
510 /* Run Time Average Power Limiting (RAPL) Interface */
511 
512 #define MSR_RAPL_POWER_UNIT		0x00000606
513 
514 #define MSR_PKG_POWER_LIMIT		0x00000610
515 #define MSR_PKG_ENERGY_STATUS		0x00000611
516 #define MSR_PKG_PERF_STATUS		0x00000613
517 #define MSR_PKG_POWER_INFO		0x00000614
518 
519 #define MSR_DRAM_POWER_LIMIT		0x00000618
520 #define MSR_DRAM_ENERGY_STATUS		0x00000619
521 #define MSR_DRAM_PERF_STATUS		0x0000061b
522 #define MSR_DRAM_POWER_INFO		0x0000061c
523 
524 #define MSR_PP0_POWER_LIMIT		0x00000638
525 #define MSR_PP0_ENERGY_STATUS		0x00000639
526 #define MSR_PP0_POLICY			0x0000063a
527 #define MSR_PP0_PERF_STATUS		0x0000063b
528 
529 #define MSR_PP1_POWER_LIMIT		0x00000640
530 #define MSR_PP1_ENERGY_STATUS		0x00000641
531 #define MSR_PP1_POLICY			0x00000642
532 
533 /* Config TDP MSRs */
534 #define MSR_CONFIG_TDP_NOMINAL		0x00000648
535 #define MSR_CONFIG_TDP_LEVEL_1		0x00000649
536 #define MSR_CONFIG_TDP_LEVEL_2		0x0000064A
537 #define MSR_CONFIG_TDP_CONTROL		0x0000064B
538 #define MSR_TURBO_ACTIVATION_RATIO	0x0000064C
539 
540 #define MSR_PLATFORM_ENERGY_STATUS	0x0000064D
541 
542 #define MSR_PKG_WEIGHTED_CORE_C0_RES	0x00000658
543 #define MSR_PKG_ANY_CORE_C0_RES		0x00000659
544 #define MSR_PKG_ANY_GFXE_C0_RES		0x0000065A
545 #define MSR_PKG_BOTH_CORE_GFXE_C0_RES	0x0000065B
546 
547 #define MSR_CORE_C1_RES			0x00000660
548 #define MSR_MODULE_C6_RES_MS		0x00000664
549 
550 #define MSR_CC6_DEMOTION_POLICY_CONFIG	0x00000668
551 #define MSR_MC6_DEMOTION_POLICY_CONFIG	0x00000669
552 
553 #define MSR_ATOM_CORE_RATIOS		0x0000066a
554 #define MSR_ATOM_CORE_VIDS		0x0000066b
555 #define MSR_ATOM_CORE_TURBO_RATIOS	0x0000066c
556 #define MSR_ATOM_CORE_TURBO_VIDS	0x0000066d
557 
558 
559 #define MSR_CORE_PERF_LIMIT_REASONS	0x00000690
560 #define MSR_GFX_PERF_LIMIT_REASONS	0x000006B0
561 #define MSR_RING_PERF_LIMIT_REASONS	0x000006B1
562 
563 /* Hardware P state interface */
564 #define MSR_PPERF			0x0000064e
565 #define MSR_PERF_LIMIT_REASONS		0x0000064f
566 #define MSR_PM_ENABLE			0x00000770
567 #define MSR_HWP_CAPABILITIES		0x00000771
568 #define MSR_HWP_REQUEST_PKG		0x00000772
569 #define MSR_HWP_INTERRUPT		0x00000773
570 #define MSR_HWP_REQUEST			0x00000774
571 #define MSR_HWP_STATUS			0x00000777
572 
573 /* CPUID.6.EAX */
574 #define HWP_BASE_BIT			(1<<7)
575 #define HWP_NOTIFICATIONS_BIT		(1<<8)
576 #define HWP_ACTIVITY_WINDOW_BIT		(1<<9)
577 #define HWP_ENERGY_PERF_PREFERENCE_BIT	(1<<10)
578 #define HWP_PACKAGE_LEVEL_REQUEST_BIT	(1<<11)
579 
580 /* IA32_HWP_CAPABILITIES */
581 #define HWP_HIGHEST_PERF(x)		(((x) >> 0) & 0xff)
582 #define HWP_GUARANTEED_PERF(x)		(((x) >> 8) & 0xff)
583 #define HWP_MOSTEFFICIENT_PERF(x)	(((x) >> 16) & 0xff)
584 #define HWP_LOWEST_PERF(x)		(((x) >> 24) & 0xff)
585 
586 /* IA32_HWP_REQUEST */
587 #define HWP_MIN_PERF(x)			(x & 0xff)
588 #define HWP_MAX_PERF(x)			((x & 0xff) << 8)
589 #define HWP_DESIRED_PERF(x)		((x & 0xff) << 16)
590 #define HWP_ENERGY_PERF_PREFERENCE(x)	(((unsigned long long) x & 0xff) << 24)
591 #define HWP_EPP_PERFORMANCE		0x00
592 #define HWP_EPP_BALANCE_PERFORMANCE	0x80
593 #define HWP_EPP_BALANCE_POWERSAVE	0xC0
594 #define HWP_EPP_POWERSAVE		0xFF
595 #define HWP_ACTIVITY_WINDOW(x)		((unsigned long long)(x & 0xff3) << 32)
596 #define HWP_PACKAGE_CONTROL(x)		((unsigned long long)(x & 0x1) << 42)
597 
598 /* IA32_HWP_STATUS */
599 #define HWP_GUARANTEED_CHANGE(x)	(x & 0x1)
600 #define HWP_EXCURSION_TO_MINIMUM(x)	(x & 0x4)
601 
602 /* IA32_HWP_INTERRUPT */
603 #define HWP_CHANGE_TO_GUARANTEED_INT(x)	(x & 0x1)
604 #define HWP_EXCURSION_TO_MINIMUM_INT(x)	(x & 0x2)
605 
606 #define MSR_AMD64_MC0_MASK		0xc0010044
607 
608 #define MSR_IA32_MCx_CTL(x)		(MSR_IA32_MC0_CTL + 4*(x))
609 #define MSR_IA32_MCx_STATUS(x)		(MSR_IA32_MC0_STATUS + 4*(x))
610 #define MSR_IA32_MCx_ADDR(x)		(MSR_IA32_MC0_ADDR + 4*(x))
611 #define MSR_IA32_MCx_MISC(x)		(MSR_IA32_MC0_MISC + 4*(x))
612 
613 #define MSR_AMD64_MCx_MASK(x)		(MSR_AMD64_MC0_MASK + (x))
614 
615 /* These are consecutive and not in the normal 4er MCE bank block */
616 #define MSR_IA32_MC0_CTL2		0x00000280
617 #define MSR_IA32_MCx_CTL2(x)		(MSR_IA32_MC0_CTL2 + (x))
618 
619 #define MSR_P6_PERFCTR0			0x000000c1
620 #define MSR_P6_PERFCTR1			0x000000c2
621 #define MSR_P6_EVNTSEL0			0x00000186
622 #define MSR_P6_EVNTSEL1			0x00000187
623 
624 #define MSR_KNC_PERFCTR0               0x00000020
625 #define MSR_KNC_PERFCTR1               0x00000021
626 #define MSR_KNC_EVNTSEL0               0x00000028
627 #define MSR_KNC_EVNTSEL1               0x00000029
628 
629 /* Alternative perfctr range with full access. */
630 #define MSR_IA32_PMC0			0x000004c1
631 
632 /* AMD64 MSRs. Not complete. See the architecture manual for a more
633    complete list. */
634 
635 #define MSR_AMD64_PATCH_LEVEL		0x0000008b
636 #define MSR_AMD64_TSC_RATIO		0xc0000104
637 #define MSR_AMD64_NB_CFG		0xc001001f
638 #define MSR_AMD64_PATCH_LOADER		0xc0010020
639 #define MSR_AMD64_OSVW_ID_LENGTH	0xc0010140
640 #define MSR_AMD64_OSVW_STATUS		0xc0010141
641 #define MSR_AMD64_LS_CFG		0xc0011020
642 #define MSR_AMD64_DC_CFG		0xc0011022
643 #define MSR_AMD64_BU_CFG2		0xc001102a
644 #define MSR_AMD64_IBSFETCHCTL		0xc0011030
645 #define MSR_AMD64_IBSFETCHLINAD		0xc0011031
646 #define MSR_AMD64_IBSFETCHPHYSAD	0xc0011032
647 #define MSR_AMD64_IBSFETCH_REG_COUNT	3
648 #define MSR_AMD64_IBSFETCH_REG_MASK	((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1)
649 #define MSR_AMD64_IBSOPCTL		0xc0011033
650 #define MSR_AMD64_IBSOPRIP		0xc0011034
651 #define MSR_AMD64_IBSOPDATA		0xc0011035
652 #define MSR_AMD64_IBSOPDATA2		0xc0011036
653 #define MSR_AMD64_IBSOPDATA3		0xc0011037
654 #define MSR_AMD64_IBSDCLINAD		0xc0011038
655 #define MSR_AMD64_IBSDCPHYSAD		0xc0011039
656 #define MSR_AMD64_IBSOP_REG_COUNT	7
657 #define MSR_AMD64_IBSOP_REG_MASK	((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1)
658 #define MSR_AMD64_IBSCTL		0xc001103a
659 #define MSR_AMD64_IBSBRTARGET		0xc001103b
660 #define MSR_AMD64_IBSOPDATA4		0xc001103d
661 #define MSR_AMD64_IBS_REG_COUNT_MAX	8 /* includes MSR_AMD64_IBSBRTARGET */
662 #define MSR_AMD64_SEV			0xc0010131
663 #define MSR_AMD64_SEV_ENABLED_BIT	0
664 #define MSR_AMD64_SEV_ENABLED		BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT)
665 
666 /* Fam 17h MSRs */
667 #define MSR_F17H_IRPERF			0xc00000e9
668 
669 /* Fam 16h MSRs */
670 #define MSR_F16H_L2I_PERF_CTL		0xc0010230
671 #define MSR_F16H_L2I_PERF_CTR		0xc0010231
672 #define MSR_F16H_DR1_ADDR_MASK		0xc0011019
673 #define MSR_F16H_DR2_ADDR_MASK		0xc001101a
674 #define MSR_F16H_DR3_ADDR_MASK		0xc001101b
675 #define MSR_F16H_DR0_ADDR_MASK		0xc0011027
676 
677 /* Fam 15h MSRs */
678 #define MSR_F15H_PERF_CTL		0xc0010200
679 #define MSR_F15H_PERF_CTR		0xc0010201
680 #define MSR_F15H_NB_PERF_CTL		0xc0010240
681 #define MSR_F15H_NB_PERF_CTR		0xc0010241
682 #define MSR_F15H_PTSC			0xc0010280
683 #define MSR_F15H_IC_CFG			0xc0011021
684 
685 /* Fam 10h MSRs */
686 #define MSR_FAM10H_MMIO_CONF_BASE	0xc0010058
687 #define FAM10H_MMIO_CONF_ENABLE		(1<<0)
688 #define FAM10H_MMIO_CONF_BUSRANGE_MASK	0xf
689 #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2
690 #define FAM10H_MMIO_CONF_BASE_MASK	0xfffffffULL
691 #define FAM10H_MMIO_CONF_BASE_SHIFT	20
692 #define MSR_FAM10H_NODE_ID		0xc001100c
693 #define MSR_F10H_DECFG			0xc0011029
694 #define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT	1
695 #define MSR_F10H_DECFG_LFENCE_SERIALIZE		BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT)
696 
697 /* K8 MSRs */
698 #define MSR_K8_TOP_MEM1			0xc001001a
699 #define MSR_K8_TOP_MEM2			0xc001001d
700 #define MSR_K8_SYSCFG			0xc0010010
701 #define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT	23
702 #define MSR_K8_SYSCFG_MEM_ENCRYPT	BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT)
703 #define MSR_K8_INT_PENDING_MSG		0xc0010055
704 /* C1E active bits in int pending message */
705 #define K8_INTP_C1E_ACTIVE_MASK		0x18000000
706 #define MSR_K8_TSEG_ADDR		0xc0010112
707 #define MSR_K8_TSEG_MASK		0xc0010113
708 #define K8_MTRRFIXRANGE_DRAM_ENABLE	0x00040000 /* MtrrFixDramEn bit    */
709 #define K8_MTRRFIXRANGE_DRAM_MODIFY	0x00080000 /* MtrrFixDramModEn bit */
710 #define K8_MTRR_RDMEM_WRMEM_MASK	0x18181818 /* Mask: RdMem|WrMem    */
711 
712 /* K7 MSRs */
713 #define MSR_K7_EVNTSEL0			0xc0010000
714 #define MSR_K7_PERFCTR0			0xc0010004
715 #define MSR_K7_EVNTSEL1			0xc0010001
716 #define MSR_K7_PERFCTR1			0xc0010005
717 #define MSR_K7_EVNTSEL2			0xc0010002
718 #define MSR_K7_PERFCTR2			0xc0010006
719 #define MSR_K7_EVNTSEL3			0xc0010003
720 #define MSR_K7_PERFCTR3			0xc0010007
721 #define MSR_K7_CLK_CTL			0xc001001b
722 #define MSR_K7_HWCR			0xc0010015
723 #define MSR_K7_HWCR_SMMLOCK_BIT		0
724 #define MSR_K7_HWCR_SMMLOCK		BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT)
725 #define MSR_K7_FID_VID_CTL		0xc0010041
726 #define MSR_K7_FID_VID_STATUS		0xc0010042
727 
728 /* K6 MSRs */
729 #define MSR_K6_WHCR			0xc0000082
730 #define MSR_K6_UWCCR			0xc0000085
731 #define MSR_K6_EPMR			0xc0000086
732 #define MSR_K6_PSOR			0xc0000087
733 #define MSR_K6_PFIR			0xc0000088
734 
735 /* Centaur-Hauls/IDT defined MSRs. */
736 #define MSR_IDT_FCR1			0x00000107
737 #define MSR_IDT_FCR2			0x00000108
738 #define MSR_IDT_FCR3			0x00000109
739 #define MSR_IDT_FCR4			0x0000010a
740 
741 #define MSR_IDT_MCR0			0x00000110
742 #define MSR_IDT_MCR1			0x00000111
743 #define MSR_IDT_MCR2			0x00000112
744 #define MSR_IDT_MCR3			0x00000113
745 #define MSR_IDT_MCR4			0x00000114
746 #define MSR_IDT_MCR5			0x00000115
747 #define MSR_IDT_MCR6			0x00000116
748 #define MSR_IDT_MCR7			0x00000117
749 #define MSR_IDT_MCR_CTRL		0x00000120
750 
751 /* VIA Cyrix defined MSRs*/
752 #define MSR_VIA_FCR			0x00001107
753 #define MSR_VIA_LONGHAUL		0x0000110a
754 #define MSR_VIA_RNG			0x0000110b
755 #define MSR_VIA_BCR2			0x00001147
756 
757 /* Transmeta defined MSRs */
758 #define MSR_TMTA_LONGRUN_CTRL		0x80868010
759 #define MSR_TMTA_LONGRUN_FLAGS		0x80868011
760 #define MSR_TMTA_LRTI_READOUT		0x80868018
761 #define MSR_TMTA_LRTI_VOLT_MHZ		0x8086801a
762 
763 /* Intel defined MSRs. */
764 #define MSR_IA32_P5_MC_ADDR		0x00000000
765 #define MSR_IA32_P5_MC_TYPE		0x00000001
766 #define MSR_IA32_TSC			0x00000010
767 #define MSR_IA32_PLATFORM_ID		0x00000017
768 #define MSR_IA32_EBL_CR_POWERON		0x0000002a
769 #define MSR_EBC_FREQUENCY_ID		0x0000002c
770 #define MSR_SMI_COUNT			0x00000034
771 #define MSR_IA32_FEATURE_CONTROL        0x0000003a
772 #define MSR_IA32_TSC_ADJUST             0x0000003b
773 #define MSR_IA32_BNDCFGS		0x00000d90
774 
775 #define MSR_IA32_BNDCFGS_RSVD		0x00000ffc
776 
777 #define MSR_IA32_XSS			0x00000da0
778 
779 #define FEATURE_CONTROL_LOCKED				(1<<0)
780 #define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX	(1<<1)
781 #define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX	(1<<2)
782 #define FEATURE_CONTROL_LMCE				(1<<20)
783 
784 #define MSR_IA32_APICBASE		0x0000001b
785 #define MSR_IA32_APICBASE_BSP		(1<<8)
786 #define MSR_IA32_APICBASE_ENABLE	(1<<11)
787 #define MSR_IA32_APICBASE_BASE		(0xfffff<<12)
788 
789 #define APIC_BASE_MSR	0x800
790 #define X2APIC_ENABLE	(1UL << 10)
791 #define	APIC_ICR	0x300
792 #define		APIC_DEST_SELF		0x40000
793 #define		APIC_DEST_ALLINC	0x80000
794 #define		APIC_DEST_ALLBUT	0xC0000
795 #define		APIC_ICR_RR_MASK	0x30000
796 #define		APIC_ICR_RR_INVALID	0x00000
797 #define		APIC_ICR_RR_INPROG	0x10000
798 #define		APIC_ICR_RR_VALID	0x20000
799 #define		APIC_INT_LEVELTRIG	0x08000
800 #define		APIC_INT_ASSERT		0x04000
801 #define		APIC_ICR_BUSY		0x01000
802 #define		APIC_DEST_LOGICAL	0x00800
803 #define		APIC_DEST_PHYSICAL	0x00000
804 #define		APIC_DM_FIXED		0x00000
805 #define		APIC_DM_FIXED_MASK	0x00700
806 #define		APIC_DM_LOWEST		0x00100
807 #define		APIC_DM_SMI		0x00200
808 #define		APIC_DM_REMRD		0x00300
809 #define		APIC_DM_NMI		0x00400
810 #define		APIC_DM_INIT		0x00500
811 #define		APIC_DM_STARTUP		0x00600
812 #define		APIC_DM_EXTINT		0x00700
813 #define		APIC_VECTOR_MASK	0x000FF
814 #define	APIC_ICR2	0x310
815 
816 #define MSR_IA32_TSCDEADLINE		0x000006e0
817 
818 #define MSR_IA32_UCODE_WRITE		0x00000079
819 #define MSR_IA32_UCODE_REV		0x0000008b
820 
821 #define MSR_IA32_SMM_MONITOR_CTL	0x0000009b
822 #define MSR_IA32_SMBASE			0x0000009e
823 
824 #define MSR_IA32_PERF_STATUS		0x00000198
825 #define MSR_IA32_PERF_CTL		0x00000199
826 #define INTEL_PERF_CTL_MASK		0xffff
827 #define MSR_AMD_PSTATE_DEF_BASE		0xc0010064
828 #define MSR_AMD_PERF_STATUS		0xc0010063
829 #define MSR_AMD_PERF_CTL		0xc0010062
830 
831 #define MSR_IA32_MPERF			0x000000e7
832 #define MSR_IA32_APERF			0x000000e8
833 
834 #define MSR_IA32_THERM_CONTROL		0x0000019a
835 #define MSR_IA32_THERM_INTERRUPT	0x0000019b
836 
837 #define THERM_INT_HIGH_ENABLE		(1 << 0)
838 #define THERM_INT_LOW_ENABLE		(1 << 1)
839 #define THERM_INT_PLN_ENABLE		(1 << 24)
840 
841 #define MSR_IA32_THERM_STATUS		0x0000019c
842 
843 #define THERM_STATUS_PROCHOT		(1 << 0)
844 #define THERM_STATUS_POWER_LIMIT	(1 << 10)
845 
846 #define MSR_THERM2_CTL			0x0000019d
847 
848 #define MSR_THERM2_CTL_TM_SELECT	(1ULL << 16)
849 
850 #define MSR_IA32_MISC_ENABLE		0x000001a0
851 
852 #define MSR_IA32_TEMPERATURE_TARGET	0x000001a2
853 
854 #define MSR_MISC_FEATURE_CONTROL	0x000001a4
855 #define MSR_MISC_PWR_MGMT		0x000001aa
856 
857 #define MSR_IA32_ENERGY_PERF_BIAS	0x000001b0
858 #define ENERGY_PERF_BIAS_PERFORMANCE		0
859 #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE	4
860 #define ENERGY_PERF_BIAS_NORMAL			6
861 #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE	8
862 #define ENERGY_PERF_BIAS_POWERSAVE		15
863 
864 #define MSR_IA32_PACKAGE_THERM_STATUS		0x000001b1
865 
866 #define PACKAGE_THERM_STATUS_PROCHOT		(1 << 0)
867 #define PACKAGE_THERM_STATUS_POWER_LIMIT	(1 << 10)
868 
869 #define MSR_IA32_PACKAGE_THERM_INTERRUPT	0x000001b2
870 
871 #define PACKAGE_THERM_INT_HIGH_ENABLE		(1 << 0)
872 #define PACKAGE_THERM_INT_LOW_ENABLE		(1 << 1)
873 #define PACKAGE_THERM_INT_PLN_ENABLE		(1 << 24)
874 
875 /* Thermal Thresholds Support */
876 #define THERM_INT_THRESHOLD0_ENABLE    (1 << 15)
877 #define THERM_SHIFT_THRESHOLD0        8
878 #define THERM_MASK_THRESHOLD0          (0x7f << THERM_SHIFT_THRESHOLD0)
879 #define THERM_INT_THRESHOLD1_ENABLE    (1 << 23)
880 #define THERM_SHIFT_THRESHOLD1        16
881 #define THERM_MASK_THRESHOLD1          (0x7f << THERM_SHIFT_THRESHOLD1)
882 #define THERM_STATUS_THRESHOLD0        (1 << 6)
883 #define THERM_LOG_THRESHOLD0           (1 << 7)
884 #define THERM_STATUS_THRESHOLD1        (1 << 8)
885 #define THERM_LOG_THRESHOLD1           (1 << 9)
886 
887 /* MISC_ENABLE bits: architectural */
888 #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT		0
889 #define MSR_IA32_MISC_ENABLE_FAST_STRING		(1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT)
890 #define MSR_IA32_MISC_ENABLE_TCC_BIT			1
891 #define MSR_IA32_MISC_ENABLE_TCC			(1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT)
892 #define MSR_IA32_MISC_ENABLE_EMON_BIT			7
893 #define MSR_IA32_MISC_ENABLE_EMON			(1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT)
894 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT		11
895 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL		(1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT)
896 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT		12
897 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL		(1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT)
898 #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT	16
899 #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP		(1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT)
900 #define MSR_IA32_MISC_ENABLE_MWAIT_BIT			18
901 #define MSR_IA32_MISC_ENABLE_MWAIT			(1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT)
902 #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT		22
903 #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID		(1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT)
904 #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT		23
905 #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT)
906 #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT		34
907 #define MSR_IA32_MISC_ENABLE_XD_DISABLE			(1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT)
908 
909 /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */
910 #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT		2
911 #define MSR_IA32_MISC_ENABLE_X87_COMPAT			(1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT)
912 #define MSR_IA32_MISC_ENABLE_TM1_BIT			3
913 #define MSR_IA32_MISC_ENABLE_TM1			(1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT)
914 #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT	4
915 #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT)
916 #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT	6
917 #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT)
918 #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT		8
919 #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK		(1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT)
920 #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT	9
921 #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT)
922 #define MSR_IA32_MISC_ENABLE_FERR_BIT			10
923 #define MSR_IA32_MISC_ENABLE_FERR			(1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT)
924 #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT		10
925 #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX		(1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT)
926 #define MSR_IA32_MISC_ENABLE_TM2_BIT			13
927 #define MSR_IA32_MISC_ENABLE_TM2			(1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT)
928 #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT	19
929 #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT)
930 #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT		20
931 #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK		(1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT)
932 #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT		24
933 #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT		(1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT)
934 #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT	37
935 #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT)
936 #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT		38
937 #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT)
938 #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT	39
939 #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE		(1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT)
940 
941 /* MISC_FEATURES_ENABLES non-architectural features */
942 #define MSR_MISC_FEATURES_ENABLES	0x00000140
943 
944 #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT	0
945 #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT		BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT)
946 #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT	1
947 
948 #define MSR_IA32_TSC_DEADLINE		0x000006E0
949 
950 /* P4/Xeon+ specific */
951 #define MSR_IA32_MCG_EAX		0x00000180
952 #define MSR_IA32_MCG_EBX		0x00000181
953 #define MSR_IA32_MCG_ECX		0x00000182
954 #define MSR_IA32_MCG_EDX		0x00000183
955 #define MSR_IA32_MCG_ESI		0x00000184
956 #define MSR_IA32_MCG_EDI		0x00000185
957 #define MSR_IA32_MCG_EBP		0x00000186
958 #define MSR_IA32_MCG_ESP		0x00000187
959 #define MSR_IA32_MCG_EFLAGS		0x00000188
960 #define MSR_IA32_MCG_EIP		0x00000189
961 #define MSR_IA32_MCG_RESERVED		0x0000018a
962 
963 /* Pentium IV performance counter MSRs */
964 #define MSR_P4_BPU_PERFCTR0		0x00000300
965 #define MSR_P4_BPU_PERFCTR1		0x00000301
966 #define MSR_P4_BPU_PERFCTR2		0x00000302
967 #define MSR_P4_BPU_PERFCTR3		0x00000303
968 #define MSR_P4_MS_PERFCTR0		0x00000304
969 #define MSR_P4_MS_PERFCTR1		0x00000305
970 #define MSR_P4_MS_PERFCTR2		0x00000306
971 #define MSR_P4_MS_PERFCTR3		0x00000307
972 #define MSR_P4_FLAME_PERFCTR0		0x00000308
973 #define MSR_P4_FLAME_PERFCTR1		0x00000309
974 #define MSR_P4_FLAME_PERFCTR2		0x0000030a
975 #define MSR_P4_FLAME_PERFCTR3		0x0000030b
976 #define MSR_P4_IQ_PERFCTR0		0x0000030c
977 #define MSR_P4_IQ_PERFCTR1		0x0000030d
978 #define MSR_P4_IQ_PERFCTR2		0x0000030e
979 #define MSR_P4_IQ_PERFCTR3		0x0000030f
980 #define MSR_P4_IQ_PERFCTR4		0x00000310
981 #define MSR_P4_IQ_PERFCTR5		0x00000311
982 #define MSR_P4_BPU_CCCR0		0x00000360
983 #define MSR_P4_BPU_CCCR1		0x00000361
984 #define MSR_P4_BPU_CCCR2		0x00000362
985 #define MSR_P4_BPU_CCCR3		0x00000363
986 #define MSR_P4_MS_CCCR0			0x00000364
987 #define MSR_P4_MS_CCCR1			0x00000365
988 #define MSR_P4_MS_CCCR2			0x00000366
989 #define MSR_P4_MS_CCCR3			0x00000367
990 #define MSR_P4_FLAME_CCCR0		0x00000368
991 #define MSR_P4_FLAME_CCCR1		0x00000369
992 #define MSR_P4_FLAME_CCCR2		0x0000036a
993 #define MSR_P4_FLAME_CCCR3		0x0000036b
994 #define MSR_P4_IQ_CCCR0			0x0000036c
995 #define MSR_P4_IQ_CCCR1			0x0000036d
996 #define MSR_P4_IQ_CCCR2			0x0000036e
997 #define MSR_P4_IQ_CCCR3			0x0000036f
998 #define MSR_P4_IQ_CCCR4			0x00000370
999 #define MSR_P4_IQ_CCCR5			0x00000371
1000 #define MSR_P4_ALF_ESCR0		0x000003ca
1001 #define MSR_P4_ALF_ESCR1		0x000003cb
1002 #define MSR_P4_BPU_ESCR0		0x000003b2
1003 #define MSR_P4_BPU_ESCR1		0x000003b3
1004 #define MSR_P4_BSU_ESCR0		0x000003a0
1005 #define MSR_P4_BSU_ESCR1		0x000003a1
1006 #define MSR_P4_CRU_ESCR0		0x000003b8
1007 #define MSR_P4_CRU_ESCR1		0x000003b9
1008 #define MSR_P4_CRU_ESCR2		0x000003cc
1009 #define MSR_P4_CRU_ESCR3		0x000003cd
1010 #define MSR_P4_CRU_ESCR4		0x000003e0
1011 #define MSR_P4_CRU_ESCR5		0x000003e1
1012 #define MSR_P4_DAC_ESCR0		0x000003a8
1013 #define MSR_P4_DAC_ESCR1		0x000003a9
1014 #define MSR_P4_FIRM_ESCR0		0x000003a4
1015 #define MSR_P4_FIRM_ESCR1		0x000003a5
1016 #define MSR_P4_FLAME_ESCR0		0x000003a6
1017 #define MSR_P4_FLAME_ESCR1		0x000003a7
1018 #define MSR_P4_FSB_ESCR0		0x000003a2
1019 #define MSR_P4_FSB_ESCR1		0x000003a3
1020 #define MSR_P4_IQ_ESCR0			0x000003ba
1021 #define MSR_P4_IQ_ESCR1			0x000003bb
1022 #define MSR_P4_IS_ESCR0			0x000003b4
1023 #define MSR_P4_IS_ESCR1			0x000003b5
1024 #define MSR_P4_ITLB_ESCR0		0x000003b6
1025 #define MSR_P4_ITLB_ESCR1		0x000003b7
1026 #define MSR_P4_IX_ESCR0			0x000003c8
1027 #define MSR_P4_IX_ESCR1			0x000003c9
1028 #define MSR_P4_MOB_ESCR0		0x000003aa
1029 #define MSR_P4_MOB_ESCR1		0x000003ab
1030 #define MSR_P4_MS_ESCR0			0x000003c0
1031 #define MSR_P4_MS_ESCR1			0x000003c1
1032 #define MSR_P4_PMH_ESCR0		0x000003ac
1033 #define MSR_P4_PMH_ESCR1		0x000003ad
1034 #define MSR_P4_RAT_ESCR0		0x000003bc
1035 #define MSR_P4_RAT_ESCR1		0x000003bd
1036 #define MSR_P4_SAAT_ESCR0		0x000003ae
1037 #define MSR_P4_SAAT_ESCR1		0x000003af
1038 #define MSR_P4_SSU_ESCR0		0x000003be
1039 #define MSR_P4_SSU_ESCR1		0x000003bf /* guess: not in manual */
1040 
1041 #define MSR_P4_TBPU_ESCR0		0x000003c2
1042 #define MSR_P4_TBPU_ESCR1		0x000003c3
1043 #define MSR_P4_TC_ESCR0			0x000003c4
1044 #define MSR_P4_TC_ESCR1			0x000003c5
1045 #define MSR_P4_U2L_ESCR0		0x000003b0
1046 #define MSR_P4_U2L_ESCR1		0x000003b1
1047 
1048 #define MSR_P4_PEBS_MATRIX_VERT		0x000003f2
1049 
1050 /* Intel Core-based CPU performance counters */
1051 #define MSR_CORE_PERF_FIXED_CTR0	0x00000309
1052 #define MSR_CORE_PERF_FIXED_CTR1	0x0000030a
1053 #define MSR_CORE_PERF_FIXED_CTR2	0x0000030b
1054 #define MSR_CORE_PERF_FIXED_CTR_CTRL	0x0000038d
1055 #define MSR_CORE_PERF_GLOBAL_STATUS	0x0000038e
1056 #define MSR_CORE_PERF_GLOBAL_CTRL	0x0000038f
1057 #define MSR_CORE_PERF_GLOBAL_OVF_CTRL	0x00000390
1058 
1059 /* Geode defined MSRs */
1060 #define MSR_GEODE_BUSCONT_CONF0		0x00001900
1061 
1062 /* Intel VT MSRs */
1063 #define MSR_IA32_VMX_BASIC              0x00000480
1064 #define MSR_IA32_VMX_PINBASED_CTLS      0x00000481
1065 #define MSR_IA32_VMX_PROCBASED_CTLS     0x00000482
1066 #define MSR_IA32_VMX_EXIT_CTLS          0x00000483
1067 #define MSR_IA32_VMX_ENTRY_CTLS         0x00000484
1068 #define MSR_IA32_VMX_MISC               0x00000485
1069 #define MSR_IA32_VMX_CR0_FIXED0         0x00000486
1070 #define MSR_IA32_VMX_CR0_FIXED1         0x00000487
1071 #define MSR_IA32_VMX_CR4_FIXED0         0x00000488
1072 #define MSR_IA32_VMX_CR4_FIXED1         0x00000489
1073 #define MSR_IA32_VMX_VMCS_ENUM          0x0000048a
1074 #define MSR_IA32_VMX_PROCBASED_CTLS2    0x0000048b
1075 #define MSR_IA32_VMX_EPT_VPID_CAP       0x0000048c
1076 #define MSR_IA32_VMX_TRUE_PINBASED_CTLS  0x0000048d
1077 #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e
1078 #define MSR_IA32_VMX_TRUE_EXIT_CTLS      0x0000048f
1079 #define MSR_IA32_VMX_TRUE_ENTRY_CTLS     0x00000490
1080 #define MSR_IA32_VMX_VMFUNC             0x00000491
1081 
1082 /* VMX_BASIC bits and bitmasks */
1083 #define VMX_BASIC_VMCS_SIZE_SHIFT	32
1084 #define VMX_BASIC_TRUE_CTLS		(1ULL << 55)
1085 #define VMX_BASIC_64		0x0001000000000000LLU
1086 #define VMX_BASIC_MEM_TYPE_SHIFT	50
1087 #define VMX_BASIC_MEM_TYPE_MASK	0x003c000000000000LLU
1088 #define VMX_BASIC_MEM_TYPE_WB	6LLU
1089 #define VMX_BASIC_INOUT		0x0040000000000000LLU
1090 
1091 /* VMX_EPT_VPID_CAP bits */
1092 #define VMX_EPT_VPID_CAP_AD_BITS	(1ULL << 21)
1093 
1094 /* MSR_IA32_VMX_MISC bits */
1095 #define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29)
1096 #define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE   0x1F
1097 /* AMD-V MSRs */
1098 
1099 #define MSR_VM_CR                       0xc0010114
1100 #define MSR_VM_IGNNE                    0xc0010115
1101 #define MSR_VM_HSAVE_PA                 0xc0010117
1102 
1103 #endif /* SELFTEST_KVM_PROCESSOR_H */
1104