1 /*
2 * tools/testing/selftests/kvm/include/x86.h
3 *
4 * Copyright (C) 2018, Google LLC.
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 *
8 */
9
10 #ifndef SELFTEST_KVM_X86_H
11 #define SELFTEST_KVM_X86_H
12
13 #include <assert.h>
14 #include <stdint.h>
15
16 #define X86_EFLAGS_FIXED (1u << 1)
17
18 #define X86_CR4_VME (1ul << 0)
19 #define X86_CR4_PVI (1ul << 1)
20 #define X86_CR4_TSD (1ul << 2)
21 #define X86_CR4_DE (1ul << 3)
22 #define X86_CR4_PSE (1ul << 4)
23 #define X86_CR4_PAE (1ul << 5)
24 #define X86_CR4_MCE (1ul << 6)
25 #define X86_CR4_PGE (1ul << 7)
26 #define X86_CR4_PCE (1ul << 8)
27 #define X86_CR4_OSFXSR (1ul << 9)
28 #define X86_CR4_OSXMMEXCPT (1ul << 10)
29 #define X86_CR4_UMIP (1ul << 11)
30 #define X86_CR4_VMXE (1ul << 13)
31 #define X86_CR4_SMXE (1ul << 14)
32 #define X86_CR4_FSGSBASE (1ul << 16)
33 #define X86_CR4_PCIDE (1ul << 17)
34 #define X86_CR4_OSXSAVE (1ul << 18)
35 #define X86_CR4_SMEP (1ul << 20)
36 #define X86_CR4_SMAP (1ul << 21)
37 #define X86_CR4_PKE (1ul << 22)
38
39 /* The enum values match the intruction encoding of each register */
40 enum x86_register {
41 RAX = 0,
42 RCX,
43 RDX,
44 RBX,
45 RSP,
46 RBP,
47 RSI,
48 RDI,
49 R8,
50 R9,
51 R10,
52 R11,
53 R12,
54 R13,
55 R14,
56 R15,
57 };
58
59 struct desc64 {
60 uint16_t limit0;
61 uint16_t base0;
62 unsigned base1:8, s:1, type:4, dpl:2, p:1;
63 unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8;
64 uint32_t base3;
65 uint32_t zero1;
66 } __attribute__((packed));
67
68 struct desc_ptr {
69 uint16_t size;
70 uint64_t address;
71 } __attribute__((packed));
72
get_desc64_base(const struct desc64 * desc)73 static inline uint64_t get_desc64_base(const struct desc64 *desc)
74 {
75 return ((uint64_t)desc->base3 << 32) |
76 (desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24));
77 }
78
rdtsc(void)79 static inline uint64_t rdtsc(void)
80 {
81 uint32_t eax, edx;
82
83 /*
84 * The lfence is to wait (on Intel CPUs) until all previous
85 * instructions have been executed.
86 */
87 __asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx));
88 return ((uint64_t)edx) << 32 | eax;
89 }
90
rdtscp(uint32_t * aux)91 static inline uint64_t rdtscp(uint32_t *aux)
92 {
93 uint32_t eax, edx;
94
95 __asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux));
96 return ((uint64_t)edx) << 32 | eax;
97 }
98
rdmsr(uint32_t msr)99 static inline uint64_t rdmsr(uint32_t msr)
100 {
101 uint32_t a, d;
102
103 __asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory");
104
105 return a | ((uint64_t) d << 32);
106 }
107
wrmsr(uint32_t msr,uint64_t value)108 static inline void wrmsr(uint32_t msr, uint64_t value)
109 {
110 uint32_t a = value;
111 uint32_t d = value >> 32;
112
113 __asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory");
114 }
115
116
inw(uint16_t port)117 static inline uint16_t inw(uint16_t port)
118 {
119 uint16_t tmp;
120
121 __asm__ __volatile__("in %%dx, %%ax"
122 : /* output */ "=a" (tmp)
123 : /* input */ "d" (port));
124
125 return tmp;
126 }
127
get_es(void)128 static inline uint16_t get_es(void)
129 {
130 uint16_t es;
131
132 __asm__ __volatile__("mov %%es, %[es]"
133 : /* output */ [es]"=rm"(es));
134 return es;
135 }
136
get_cs(void)137 static inline uint16_t get_cs(void)
138 {
139 uint16_t cs;
140
141 __asm__ __volatile__("mov %%cs, %[cs]"
142 : /* output */ [cs]"=rm"(cs));
143 return cs;
144 }
145
get_ss(void)146 static inline uint16_t get_ss(void)
147 {
148 uint16_t ss;
149
150 __asm__ __volatile__("mov %%ss, %[ss]"
151 : /* output */ [ss]"=rm"(ss));
152 return ss;
153 }
154
get_ds(void)155 static inline uint16_t get_ds(void)
156 {
157 uint16_t ds;
158
159 __asm__ __volatile__("mov %%ds, %[ds]"
160 : /* output */ [ds]"=rm"(ds));
161 return ds;
162 }
163
get_fs(void)164 static inline uint16_t get_fs(void)
165 {
166 uint16_t fs;
167
168 __asm__ __volatile__("mov %%fs, %[fs]"
169 : /* output */ [fs]"=rm"(fs));
170 return fs;
171 }
172
get_gs(void)173 static inline uint16_t get_gs(void)
174 {
175 uint16_t gs;
176
177 __asm__ __volatile__("mov %%gs, %[gs]"
178 : /* output */ [gs]"=rm"(gs));
179 return gs;
180 }
181
get_tr(void)182 static inline uint16_t get_tr(void)
183 {
184 uint16_t tr;
185
186 __asm__ __volatile__("str %[tr]"
187 : /* output */ [tr]"=rm"(tr));
188 return tr;
189 }
190
get_cr0(void)191 static inline uint64_t get_cr0(void)
192 {
193 uint64_t cr0;
194
195 __asm__ __volatile__("mov %%cr0, %[cr0]"
196 : /* output */ [cr0]"=r"(cr0));
197 return cr0;
198 }
199
get_cr3(void)200 static inline uint64_t get_cr3(void)
201 {
202 uint64_t cr3;
203
204 __asm__ __volatile__("mov %%cr3, %[cr3]"
205 : /* output */ [cr3]"=r"(cr3));
206 return cr3;
207 }
208
get_cr4(void)209 static inline uint64_t get_cr4(void)
210 {
211 uint64_t cr4;
212
213 __asm__ __volatile__("mov %%cr4, %[cr4]"
214 : /* output */ [cr4]"=r"(cr4));
215 return cr4;
216 }
217
set_cr4(uint64_t val)218 static inline void set_cr4(uint64_t val)
219 {
220 __asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory");
221 }
222
get_gdt_base(void)223 static inline uint64_t get_gdt_base(void)
224 {
225 struct desc_ptr gdt;
226 __asm__ __volatile__("sgdt %[gdt]"
227 : /* output */ [gdt]"=m"(gdt));
228 return gdt.address;
229 }
230
get_idt_base(void)231 static inline uint64_t get_idt_base(void)
232 {
233 struct desc_ptr idt;
234 __asm__ __volatile__("sidt %[idt]"
235 : /* output */ [idt]"=m"(idt));
236 return idt.address;
237 }
238
239 #define SET_XMM(__var, __xmm) \
240 asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm)
241
set_xmm(int n,unsigned long val)242 static inline void set_xmm(int n, unsigned long val)
243 {
244 switch (n) {
245 case 0:
246 SET_XMM(val, xmm0);
247 break;
248 case 1:
249 SET_XMM(val, xmm1);
250 break;
251 case 2:
252 SET_XMM(val, xmm2);
253 break;
254 case 3:
255 SET_XMM(val, xmm3);
256 break;
257 case 4:
258 SET_XMM(val, xmm4);
259 break;
260 case 5:
261 SET_XMM(val, xmm5);
262 break;
263 case 6:
264 SET_XMM(val, xmm6);
265 break;
266 case 7:
267 SET_XMM(val, xmm7);
268 break;
269 }
270 }
271
272 typedef unsigned long v1di __attribute__ ((vector_size (8)));
get_xmm(int n)273 static inline unsigned long get_xmm(int n)
274 {
275 assert(n >= 0 && n <= 7);
276
277 register v1di xmm0 __asm__("%xmm0");
278 register v1di xmm1 __asm__("%xmm1");
279 register v1di xmm2 __asm__("%xmm2");
280 register v1di xmm3 __asm__("%xmm3");
281 register v1di xmm4 __asm__("%xmm4");
282 register v1di xmm5 __asm__("%xmm5");
283 register v1di xmm6 __asm__("%xmm6");
284 register v1di xmm7 __asm__("%xmm7");
285 switch (n) {
286 case 0:
287 return (unsigned long)xmm0;
288 case 1:
289 return (unsigned long)xmm1;
290 case 2:
291 return (unsigned long)xmm2;
292 case 3:
293 return (unsigned long)xmm3;
294 case 4:
295 return (unsigned long)xmm4;
296 case 5:
297 return (unsigned long)xmm5;
298 case 6:
299 return (unsigned long)xmm6;
300 case 7:
301 return (unsigned long)xmm7;
302 }
303 return 0;
304 }
305
306 struct kvm_x86_state;
307 struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid);
308 void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_x86_state *state);
309
310 /*
311 * Basic CPU control in CR0
312 */
313 #define X86_CR0_PE (1UL<<0) /* Protection Enable */
314 #define X86_CR0_MP (1UL<<1) /* Monitor Coprocessor */
315 #define X86_CR0_EM (1UL<<2) /* Emulation */
316 #define X86_CR0_TS (1UL<<3) /* Task Switched */
317 #define X86_CR0_ET (1UL<<4) /* Extension Type */
318 #define X86_CR0_NE (1UL<<5) /* Numeric Error */
319 #define X86_CR0_WP (1UL<<16) /* Write Protect */
320 #define X86_CR0_AM (1UL<<18) /* Alignment Mask */
321 #define X86_CR0_NW (1UL<<29) /* Not Write-through */
322 #define X86_CR0_CD (1UL<<30) /* Cache Disable */
323 #define X86_CR0_PG (1UL<<31) /* Paging */
324
325 /*
326 * CPU model specific register (MSR) numbers.
327 */
328
329 /* x86-64 specific MSRs */
330 #define MSR_EFER 0xc0000080 /* extended feature register */
331 #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */
332 #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */
333 #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */
334 #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */
335 #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */
336 #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */
337 #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */
338 #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */
339
340 /* EFER bits: */
341 #define EFER_SCE (1<<0) /* SYSCALL/SYSRET */
342 #define EFER_LME (1<<8) /* Long mode enable */
343 #define EFER_LMA (1<<10) /* Long mode active (read-only) */
344 #define EFER_NX (1<<11) /* No execute enable */
345 #define EFER_SVME (1<<12) /* Enable virtualization */
346 #define EFER_LMSLE (1<<13) /* Long Mode Segment Limit Enable */
347 #define EFER_FFXSR (1<<14) /* Enable Fast FXSAVE/FXRSTOR */
348
349 /* Intel MSRs. Some also available on other CPUs */
350
351 #define MSR_PPIN_CTL 0x0000004e
352 #define MSR_PPIN 0x0000004f
353
354 #define MSR_IA32_PERFCTR0 0x000000c1
355 #define MSR_IA32_PERFCTR1 0x000000c2
356 #define MSR_FSB_FREQ 0x000000cd
357 #define MSR_PLATFORM_INFO 0x000000ce
358 #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31
359 #define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT)
360
361 #define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2
362 #define NHM_C3_AUTO_DEMOTE (1UL << 25)
363 #define NHM_C1_AUTO_DEMOTE (1UL << 26)
364 #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25)
365 #define SNB_C1_AUTO_UNDEMOTE (1UL << 27)
366 #define SNB_C3_AUTO_UNDEMOTE (1UL << 28)
367
368 #define MSR_MTRRcap 0x000000fe
369 #define MSR_IA32_BBL_CR_CTL 0x00000119
370 #define MSR_IA32_BBL_CR_CTL3 0x0000011e
371
372 #define MSR_IA32_SYSENTER_CS 0x00000174
373 #define MSR_IA32_SYSENTER_ESP 0x00000175
374 #define MSR_IA32_SYSENTER_EIP 0x00000176
375
376 #define MSR_IA32_MCG_CAP 0x00000179
377 #define MSR_IA32_MCG_STATUS 0x0000017a
378 #define MSR_IA32_MCG_CTL 0x0000017b
379 #define MSR_IA32_MCG_EXT_CTL 0x000004d0
380
381 #define MSR_OFFCORE_RSP_0 0x000001a6
382 #define MSR_OFFCORE_RSP_1 0x000001a7
383 #define MSR_TURBO_RATIO_LIMIT 0x000001ad
384 #define MSR_TURBO_RATIO_LIMIT1 0x000001ae
385 #define MSR_TURBO_RATIO_LIMIT2 0x000001af
386
387 #define MSR_LBR_SELECT 0x000001c8
388 #define MSR_LBR_TOS 0x000001c9
389 #define MSR_LBR_NHM_FROM 0x00000680
390 #define MSR_LBR_NHM_TO 0x000006c0
391 #define MSR_LBR_CORE_FROM 0x00000040
392 #define MSR_LBR_CORE_TO 0x00000060
393
394 #define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */
395 #define LBR_INFO_MISPRED BIT_ULL(63)
396 #define LBR_INFO_IN_TX BIT_ULL(62)
397 #define LBR_INFO_ABORT BIT_ULL(61)
398 #define LBR_INFO_CYCLES 0xffff
399
400 #define MSR_IA32_PEBS_ENABLE 0x000003f1
401 #define MSR_IA32_DS_AREA 0x00000600
402 #define MSR_IA32_PERF_CAPABILITIES 0x00000345
403 #define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6
404
405 #define MSR_IA32_RTIT_CTL 0x00000570
406 #define MSR_IA32_RTIT_STATUS 0x00000571
407 #define MSR_IA32_RTIT_ADDR0_A 0x00000580
408 #define MSR_IA32_RTIT_ADDR0_B 0x00000581
409 #define MSR_IA32_RTIT_ADDR1_A 0x00000582
410 #define MSR_IA32_RTIT_ADDR1_B 0x00000583
411 #define MSR_IA32_RTIT_ADDR2_A 0x00000584
412 #define MSR_IA32_RTIT_ADDR2_B 0x00000585
413 #define MSR_IA32_RTIT_ADDR3_A 0x00000586
414 #define MSR_IA32_RTIT_ADDR3_B 0x00000587
415 #define MSR_IA32_RTIT_CR3_MATCH 0x00000572
416 #define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560
417 #define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561
418
419 #define MSR_MTRRfix64K_00000 0x00000250
420 #define MSR_MTRRfix16K_80000 0x00000258
421 #define MSR_MTRRfix16K_A0000 0x00000259
422 #define MSR_MTRRfix4K_C0000 0x00000268
423 #define MSR_MTRRfix4K_C8000 0x00000269
424 #define MSR_MTRRfix4K_D0000 0x0000026a
425 #define MSR_MTRRfix4K_D8000 0x0000026b
426 #define MSR_MTRRfix4K_E0000 0x0000026c
427 #define MSR_MTRRfix4K_E8000 0x0000026d
428 #define MSR_MTRRfix4K_F0000 0x0000026e
429 #define MSR_MTRRfix4K_F8000 0x0000026f
430 #define MSR_MTRRdefType 0x000002ff
431
432 #define MSR_IA32_CR_PAT 0x00000277
433
434 #define MSR_IA32_DEBUGCTLMSR 0x000001d9
435 #define MSR_IA32_LASTBRANCHFROMIP 0x000001db
436 #define MSR_IA32_LASTBRANCHTOIP 0x000001dc
437 #define MSR_IA32_LASTINTFROMIP 0x000001dd
438 #define MSR_IA32_LASTINTTOIP 0x000001de
439
440 /* DEBUGCTLMSR bits (others vary by model): */
441 #define DEBUGCTLMSR_LBR (1UL << 0) /* last branch recording */
442 #define DEBUGCTLMSR_BTF_SHIFT 1
443 #define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */
444 #define DEBUGCTLMSR_TR (1UL << 6)
445 #define DEBUGCTLMSR_BTS (1UL << 7)
446 #define DEBUGCTLMSR_BTINT (1UL << 8)
447 #define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9)
448 #define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10)
449 #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11)
450 #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14
451 #define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT)
452
453 #define MSR_PEBS_FRONTEND 0x000003f7
454
455 #define MSR_IA32_POWER_CTL 0x000001fc
456
457 #define MSR_IA32_MC0_CTL 0x00000400
458 #define MSR_IA32_MC0_STATUS 0x00000401
459 #define MSR_IA32_MC0_ADDR 0x00000402
460 #define MSR_IA32_MC0_MISC 0x00000403
461
462 /* C-state Residency Counters */
463 #define MSR_PKG_C3_RESIDENCY 0x000003f8
464 #define MSR_PKG_C6_RESIDENCY 0x000003f9
465 #define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa
466 #define MSR_PKG_C7_RESIDENCY 0x000003fa
467 #define MSR_CORE_C3_RESIDENCY 0x000003fc
468 #define MSR_CORE_C6_RESIDENCY 0x000003fd
469 #define MSR_CORE_C7_RESIDENCY 0x000003fe
470 #define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff
471 #define MSR_PKG_C2_RESIDENCY 0x0000060d
472 #define MSR_PKG_C8_RESIDENCY 0x00000630
473 #define MSR_PKG_C9_RESIDENCY 0x00000631
474 #define MSR_PKG_C10_RESIDENCY 0x00000632
475
476 /* Interrupt Response Limit */
477 #define MSR_PKGC3_IRTL 0x0000060a
478 #define MSR_PKGC6_IRTL 0x0000060b
479 #define MSR_PKGC7_IRTL 0x0000060c
480 #define MSR_PKGC8_IRTL 0x00000633
481 #define MSR_PKGC9_IRTL 0x00000634
482 #define MSR_PKGC10_IRTL 0x00000635
483
484 /* Run Time Average Power Limiting (RAPL) Interface */
485
486 #define MSR_RAPL_POWER_UNIT 0x00000606
487
488 #define MSR_PKG_POWER_LIMIT 0x00000610
489 #define MSR_PKG_ENERGY_STATUS 0x00000611
490 #define MSR_PKG_PERF_STATUS 0x00000613
491 #define MSR_PKG_POWER_INFO 0x00000614
492
493 #define MSR_DRAM_POWER_LIMIT 0x00000618
494 #define MSR_DRAM_ENERGY_STATUS 0x00000619
495 #define MSR_DRAM_PERF_STATUS 0x0000061b
496 #define MSR_DRAM_POWER_INFO 0x0000061c
497
498 #define MSR_PP0_POWER_LIMIT 0x00000638
499 #define MSR_PP0_ENERGY_STATUS 0x00000639
500 #define MSR_PP0_POLICY 0x0000063a
501 #define MSR_PP0_PERF_STATUS 0x0000063b
502
503 #define MSR_PP1_POWER_LIMIT 0x00000640
504 #define MSR_PP1_ENERGY_STATUS 0x00000641
505 #define MSR_PP1_POLICY 0x00000642
506
507 /* Config TDP MSRs */
508 #define MSR_CONFIG_TDP_NOMINAL 0x00000648
509 #define MSR_CONFIG_TDP_LEVEL_1 0x00000649
510 #define MSR_CONFIG_TDP_LEVEL_2 0x0000064A
511 #define MSR_CONFIG_TDP_CONTROL 0x0000064B
512 #define MSR_TURBO_ACTIVATION_RATIO 0x0000064C
513
514 #define MSR_PLATFORM_ENERGY_STATUS 0x0000064D
515
516 #define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658
517 #define MSR_PKG_ANY_CORE_C0_RES 0x00000659
518 #define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A
519 #define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B
520
521 #define MSR_CORE_C1_RES 0x00000660
522 #define MSR_MODULE_C6_RES_MS 0x00000664
523
524 #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668
525 #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669
526
527 #define MSR_ATOM_CORE_RATIOS 0x0000066a
528 #define MSR_ATOM_CORE_VIDS 0x0000066b
529 #define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c
530 #define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d
531
532
533 #define MSR_CORE_PERF_LIMIT_REASONS 0x00000690
534 #define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0
535 #define MSR_RING_PERF_LIMIT_REASONS 0x000006B1
536
537 /* Hardware P state interface */
538 #define MSR_PPERF 0x0000064e
539 #define MSR_PERF_LIMIT_REASONS 0x0000064f
540 #define MSR_PM_ENABLE 0x00000770
541 #define MSR_HWP_CAPABILITIES 0x00000771
542 #define MSR_HWP_REQUEST_PKG 0x00000772
543 #define MSR_HWP_INTERRUPT 0x00000773
544 #define MSR_HWP_REQUEST 0x00000774
545 #define MSR_HWP_STATUS 0x00000777
546
547 /* CPUID.6.EAX */
548 #define HWP_BASE_BIT (1<<7)
549 #define HWP_NOTIFICATIONS_BIT (1<<8)
550 #define HWP_ACTIVITY_WINDOW_BIT (1<<9)
551 #define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10)
552 #define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11)
553
554 /* IA32_HWP_CAPABILITIES */
555 #define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff)
556 #define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff)
557 #define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff)
558 #define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff)
559
560 /* IA32_HWP_REQUEST */
561 #define HWP_MIN_PERF(x) (x & 0xff)
562 #define HWP_MAX_PERF(x) ((x & 0xff) << 8)
563 #define HWP_DESIRED_PERF(x) ((x & 0xff) << 16)
564 #define HWP_ENERGY_PERF_PREFERENCE(x) (((unsigned long long) x & 0xff) << 24)
565 #define HWP_EPP_PERFORMANCE 0x00
566 #define HWP_EPP_BALANCE_PERFORMANCE 0x80
567 #define HWP_EPP_BALANCE_POWERSAVE 0xC0
568 #define HWP_EPP_POWERSAVE 0xFF
569 #define HWP_ACTIVITY_WINDOW(x) ((unsigned long long)(x & 0xff3) << 32)
570 #define HWP_PACKAGE_CONTROL(x) ((unsigned long long)(x & 0x1) << 42)
571
572 /* IA32_HWP_STATUS */
573 #define HWP_GUARANTEED_CHANGE(x) (x & 0x1)
574 #define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4)
575
576 /* IA32_HWP_INTERRUPT */
577 #define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1)
578 #define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2)
579
580 #define MSR_AMD64_MC0_MASK 0xc0010044
581
582 #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x))
583 #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x))
584 #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x))
585 #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x))
586
587 #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x))
588
589 /* These are consecutive and not in the normal 4er MCE bank block */
590 #define MSR_IA32_MC0_CTL2 0x00000280
591 #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x))
592
593 #define MSR_P6_PERFCTR0 0x000000c1
594 #define MSR_P6_PERFCTR1 0x000000c2
595 #define MSR_P6_EVNTSEL0 0x00000186
596 #define MSR_P6_EVNTSEL1 0x00000187
597
598 #define MSR_KNC_PERFCTR0 0x00000020
599 #define MSR_KNC_PERFCTR1 0x00000021
600 #define MSR_KNC_EVNTSEL0 0x00000028
601 #define MSR_KNC_EVNTSEL1 0x00000029
602
603 /* Alternative perfctr range with full access. */
604 #define MSR_IA32_PMC0 0x000004c1
605
606 /* AMD64 MSRs. Not complete. See the architecture manual for a more
607 complete list. */
608
609 #define MSR_AMD64_PATCH_LEVEL 0x0000008b
610 #define MSR_AMD64_TSC_RATIO 0xc0000104
611 #define MSR_AMD64_NB_CFG 0xc001001f
612 #define MSR_AMD64_PATCH_LOADER 0xc0010020
613 #define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140
614 #define MSR_AMD64_OSVW_STATUS 0xc0010141
615 #define MSR_AMD64_LS_CFG 0xc0011020
616 #define MSR_AMD64_DC_CFG 0xc0011022
617 #define MSR_AMD64_BU_CFG2 0xc001102a
618 #define MSR_AMD64_IBSFETCHCTL 0xc0011030
619 #define MSR_AMD64_IBSFETCHLINAD 0xc0011031
620 #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032
621 #define MSR_AMD64_IBSFETCH_REG_COUNT 3
622 #define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1)
623 #define MSR_AMD64_IBSOPCTL 0xc0011033
624 #define MSR_AMD64_IBSOPRIP 0xc0011034
625 #define MSR_AMD64_IBSOPDATA 0xc0011035
626 #define MSR_AMD64_IBSOPDATA2 0xc0011036
627 #define MSR_AMD64_IBSOPDATA3 0xc0011037
628 #define MSR_AMD64_IBSDCLINAD 0xc0011038
629 #define MSR_AMD64_IBSDCPHYSAD 0xc0011039
630 #define MSR_AMD64_IBSOP_REG_COUNT 7
631 #define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1)
632 #define MSR_AMD64_IBSCTL 0xc001103a
633 #define MSR_AMD64_IBSBRTARGET 0xc001103b
634 #define MSR_AMD64_IBSOPDATA4 0xc001103d
635 #define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */
636 #define MSR_AMD64_SEV 0xc0010131
637 #define MSR_AMD64_SEV_ENABLED_BIT 0
638 #define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT)
639
640 /* Fam 17h MSRs */
641 #define MSR_F17H_IRPERF 0xc00000e9
642
643 /* Fam 16h MSRs */
644 #define MSR_F16H_L2I_PERF_CTL 0xc0010230
645 #define MSR_F16H_L2I_PERF_CTR 0xc0010231
646 #define MSR_F16H_DR1_ADDR_MASK 0xc0011019
647 #define MSR_F16H_DR2_ADDR_MASK 0xc001101a
648 #define MSR_F16H_DR3_ADDR_MASK 0xc001101b
649 #define MSR_F16H_DR0_ADDR_MASK 0xc0011027
650
651 /* Fam 15h MSRs */
652 #define MSR_F15H_PERF_CTL 0xc0010200
653 #define MSR_F15H_PERF_CTR 0xc0010201
654 #define MSR_F15H_NB_PERF_CTL 0xc0010240
655 #define MSR_F15H_NB_PERF_CTR 0xc0010241
656 #define MSR_F15H_PTSC 0xc0010280
657 #define MSR_F15H_IC_CFG 0xc0011021
658
659 /* Fam 10h MSRs */
660 #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058
661 #define FAM10H_MMIO_CONF_ENABLE (1<<0)
662 #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf
663 #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2
664 #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL
665 #define FAM10H_MMIO_CONF_BASE_SHIFT 20
666 #define MSR_FAM10H_NODE_ID 0xc001100c
667 #define MSR_F10H_DECFG 0xc0011029
668 #define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT 1
669 #define MSR_F10H_DECFG_LFENCE_SERIALIZE BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT)
670
671 /* K8 MSRs */
672 #define MSR_K8_TOP_MEM1 0xc001001a
673 #define MSR_K8_TOP_MEM2 0xc001001d
674 #define MSR_K8_SYSCFG 0xc0010010
675 #define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT 23
676 #define MSR_K8_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT)
677 #define MSR_K8_INT_PENDING_MSG 0xc0010055
678 /* C1E active bits in int pending message */
679 #define K8_INTP_C1E_ACTIVE_MASK 0x18000000
680 #define MSR_K8_TSEG_ADDR 0xc0010112
681 #define MSR_K8_TSEG_MASK 0xc0010113
682 #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */
683 #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */
684 #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */
685
686 /* K7 MSRs */
687 #define MSR_K7_EVNTSEL0 0xc0010000
688 #define MSR_K7_PERFCTR0 0xc0010004
689 #define MSR_K7_EVNTSEL1 0xc0010001
690 #define MSR_K7_PERFCTR1 0xc0010005
691 #define MSR_K7_EVNTSEL2 0xc0010002
692 #define MSR_K7_PERFCTR2 0xc0010006
693 #define MSR_K7_EVNTSEL3 0xc0010003
694 #define MSR_K7_PERFCTR3 0xc0010007
695 #define MSR_K7_CLK_CTL 0xc001001b
696 #define MSR_K7_HWCR 0xc0010015
697 #define MSR_K7_HWCR_SMMLOCK_BIT 0
698 #define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT)
699 #define MSR_K7_FID_VID_CTL 0xc0010041
700 #define MSR_K7_FID_VID_STATUS 0xc0010042
701
702 /* K6 MSRs */
703 #define MSR_K6_WHCR 0xc0000082
704 #define MSR_K6_UWCCR 0xc0000085
705 #define MSR_K6_EPMR 0xc0000086
706 #define MSR_K6_PSOR 0xc0000087
707 #define MSR_K6_PFIR 0xc0000088
708
709 /* Centaur-Hauls/IDT defined MSRs. */
710 #define MSR_IDT_FCR1 0x00000107
711 #define MSR_IDT_FCR2 0x00000108
712 #define MSR_IDT_FCR3 0x00000109
713 #define MSR_IDT_FCR4 0x0000010a
714
715 #define MSR_IDT_MCR0 0x00000110
716 #define MSR_IDT_MCR1 0x00000111
717 #define MSR_IDT_MCR2 0x00000112
718 #define MSR_IDT_MCR3 0x00000113
719 #define MSR_IDT_MCR4 0x00000114
720 #define MSR_IDT_MCR5 0x00000115
721 #define MSR_IDT_MCR6 0x00000116
722 #define MSR_IDT_MCR7 0x00000117
723 #define MSR_IDT_MCR_CTRL 0x00000120
724
725 /* VIA Cyrix defined MSRs*/
726 #define MSR_VIA_FCR 0x00001107
727 #define MSR_VIA_LONGHAUL 0x0000110a
728 #define MSR_VIA_RNG 0x0000110b
729 #define MSR_VIA_BCR2 0x00001147
730
731 /* Transmeta defined MSRs */
732 #define MSR_TMTA_LONGRUN_CTRL 0x80868010
733 #define MSR_TMTA_LONGRUN_FLAGS 0x80868011
734 #define MSR_TMTA_LRTI_READOUT 0x80868018
735 #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a
736
737 /* Intel defined MSRs. */
738 #define MSR_IA32_P5_MC_ADDR 0x00000000
739 #define MSR_IA32_P5_MC_TYPE 0x00000001
740 #define MSR_IA32_TSC 0x00000010
741 #define MSR_IA32_PLATFORM_ID 0x00000017
742 #define MSR_IA32_EBL_CR_POWERON 0x0000002a
743 #define MSR_EBC_FREQUENCY_ID 0x0000002c
744 #define MSR_SMI_COUNT 0x00000034
745 #define MSR_IA32_FEATURE_CONTROL 0x0000003a
746 #define MSR_IA32_TSC_ADJUST 0x0000003b
747 #define MSR_IA32_BNDCFGS 0x00000d90
748
749 #define MSR_IA32_BNDCFGS_RSVD 0x00000ffc
750
751 #define MSR_IA32_XSS 0x00000da0
752
753 #define FEATURE_CONTROL_LOCKED (1<<0)
754 #define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX (1<<1)
755 #define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX (1<<2)
756 #define FEATURE_CONTROL_LMCE (1<<20)
757
758 #define MSR_IA32_APICBASE 0x0000001b
759 #define MSR_IA32_APICBASE_BSP (1<<8)
760 #define MSR_IA32_APICBASE_ENABLE (1<<11)
761 #define MSR_IA32_APICBASE_BASE (0xfffff<<12)
762
763 #define MSR_IA32_TSCDEADLINE 0x000006e0
764
765 #define MSR_IA32_UCODE_WRITE 0x00000079
766 #define MSR_IA32_UCODE_REV 0x0000008b
767
768 #define MSR_IA32_SMM_MONITOR_CTL 0x0000009b
769 #define MSR_IA32_SMBASE 0x0000009e
770
771 #define MSR_IA32_PERF_STATUS 0x00000198
772 #define MSR_IA32_PERF_CTL 0x00000199
773 #define INTEL_PERF_CTL_MASK 0xffff
774 #define MSR_AMD_PSTATE_DEF_BASE 0xc0010064
775 #define MSR_AMD_PERF_STATUS 0xc0010063
776 #define MSR_AMD_PERF_CTL 0xc0010062
777
778 #define MSR_IA32_MPERF 0x000000e7
779 #define MSR_IA32_APERF 0x000000e8
780
781 #define MSR_IA32_THERM_CONTROL 0x0000019a
782 #define MSR_IA32_THERM_INTERRUPT 0x0000019b
783
784 #define THERM_INT_HIGH_ENABLE (1 << 0)
785 #define THERM_INT_LOW_ENABLE (1 << 1)
786 #define THERM_INT_PLN_ENABLE (1 << 24)
787
788 #define MSR_IA32_THERM_STATUS 0x0000019c
789
790 #define THERM_STATUS_PROCHOT (1 << 0)
791 #define THERM_STATUS_POWER_LIMIT (1 << 10)
792
793 #define MSR_THERM2_CTL 0x0000019d
794
795 #define MSR_THERM2_CTL_TM_SELECT (1ULL << 16)
796
797 #define MSR_IA32_MISC_ENABLE 0x000001a0
798
799 #define MSR_IA32_TEMPERATURE_TARGET 0x000001a2
800
801 #define MSR_MISC_FEATURE_CONTROL 0x000001a4
802 #define MSR_MISC_PWR_MGMT 0x000001aa
803
804 #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0
805 #define ENERGY_PERF_BIAS_PERFORMANCE 0
806 #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4
807 #define ENERGY_PERF_BIAS_NORMAL 6
808 #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8
809 #define ENERGY_PERF_BIAS_POWERSAVE 15
810
811 #define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1
812
813 #define PACKAGE_THERM_STATUS_PROCHOT (1 << 0)
814 #define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10)
815
816 #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2
817
818 #define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0)
819 #define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1)
820 #define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24)
821
822 /* Thermal Thresholds Support */
823 #define THERM_INT_THRESHOLD0_ENABLE (1 << 15)
824 #define THERM_SHIFT_THRESHOLD0 8
825 #define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0)
826 #define THERM_INT_THRESHOLD1_ENABLE (1 << 23)
827 #define THERM_SHIFT_THRESHOLD1 16
828 #define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1)
829 #define THERM_STATUS_THRESHOLD0 (1 << 6)
830 #define THERM_LOG_THRESHOLD0 (1 << 7)
831 #define THERM_STATUS_THRESHOLD1 (1 << 8)
832 #define THERM_LOG_THRESHOLD1 (1 << 9)
833
834 /* MISC_ENABLE bits: architectural */
835 #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0
836 #define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT)
837 #define MSR_IA32_MISC_ENABLE_TCC_BIT 1
838 #define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT)
839 #define MSR_IA32_MISC_ENABLE_EMON_BIT 7
840 #define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT)
841 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11
842 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT)
843 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12
844 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT)
845 #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16
846 #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT)
847 #define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18
848 #define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT)
849 #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22
850 #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT)
851 #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23
852 #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT)
853 #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34
854 #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT)
855
856 /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */
857 #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2
858 #define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT)
859 #define MSR_IA32_MISC_ENABLE_TM1_BIT 3
860 #define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT)
861 #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4
862 #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT)
863 #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6
864 #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT)
865 #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8
866 #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT)
867 #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9
868 #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT)
869 #define MSR_IA32_MISC_ENABLE_FERR_BIT 10
870 #define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT)
871 #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10
872 #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT)
873 #define MSR_IA32_MISC_ENABLE_TM2_BIT 13
874 #define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT)
875 #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19
876 #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT)
877 #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20
878 #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT)
879 #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24
880 #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT)
881 #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37
882 #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT)
883 #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38
884 #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT)
885 #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39
886 #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT)
887
888 /* MISC_FEATURES_ENABLES non-architectural features */
889 #define MSR_MISC_FEATURES_ENABLES 0x00000140
890
891 #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0
892 #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT)
893 #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1
894
895 #define MSR_IA32_TSC_DEADLINE 0x000006E0
896
897 /* P4/Xeon+ specific */
898 #define MSR_IA32_MCG_EAX 0x00000180
899 #define MSR_IA32_MCG_EBX 0x00000181
900 #define MSR_IA32_MCG_ECX 0x00000182
901 #define MSR_IA32_MCG_EDX 0x00000183
902 #define MSR_IA32_MCG_ESI 0x00000184
903 #define MSR_IA32_MCG_EDI 0x00000185
904 #define MSR_IA32_MCG_EBP 0x00000186
905 #define MSR_IA32_MCG_ESP 0x00000187
906 #define MSR_IA32_MCG_EFLAGS 0x00000188
907 #define MSR_IA32_MCG_EIP 0x00000189
908 #define MSR_IA32_MCG_RESERVED 0x0000018a
909
910 /* Pentium IV performance counter MSRs */
911 #define MSR_P4_BPU_PERFCTR0 0x00000300
912 #define MSR_P4_BPU_PERFCTR1 0x00000301
913 #define MSR_P4_BPU_PERFCTR2 0x00000302
914 #define MSR_P4_BPU_PERFCTR3 0x00000303
915 #define MSR_P4_MS_PERFCTR0 0x00000304
916 #define MSR_P4_MS_PERFCTR1 0x00000305
917 #define MSR_P4_MS_PERFCTR2 0x00000306
918 #define MSR_P4_MS_PERFCTR3 0x00000307
919 #define MSR_P4_FLAME_PERFCTR0 0x00000308
920 #define MSR_P4_FLAME_PERFCTR1 0x00000309
921 #define MSR_P4_FLAME_PERFCTR2 0x0000030a
922 #define MSR_P4_FLAME_PERFCTR3 0x0000030b
923 #define MSR_P4_IQ_PERFCTR0 0x0000030c
924 #define MSR_P4_IQ_PERFCTR1 0x0000030d
925 #define MSR_P4_IQ_PERFCTR2 0x0000030e
926 #define MSR_P4_IQ_PERFCTR3 0x0000030f
927 #define MSR_P4_IQ_PERFCTR4 0x00000310
928 #define MSR_P4_IQ_PERFCTR5 0x00000311
929 #define MSR_P4_BPU_CCCR0 0x00000360
930 #define MSR_P4_BPU_CCCR1 0x00000361
931 #define MSR_P4_BPU_CCCR2 0x00000362
932 #define MSR_P4_BPU_CCCR3 0x00000363
933 #define MSR_P4_MS_CCCR0 0x00000364
934 #define MSR_P4_MS_CCCR1 0x00000365
935 #define MSR_P4_MS_CCCR2 0x00000366
936 #define MSR_P4_MS_CCCR3 0x00000367
937 #define MSR_P4_FLAME_CCCR0 0x00000368
938 #define MSR_P4_FLAME_CCCR1 0x00000369
939 #define MSR_P4_FLAME_CCCR2 0x0000036a
940 #define MSR_P4_FLAME_CCCR3 0x0000036b
941 #define MSR_P4_IQ_CCCR0 0x0000036c
942 #define MSR_P4_IQ_CCCR1 0x0000036d
943 #define MSR_P4_IQ_CCCR2 0x0000036e
944 #define MSR_P4_IQ_CCCR3 0x0000036f
945 #define MSR_P4_IQ_CCCR4 0x00000370
946 #define MSR_P4_IQ_CCCR5 0x00000371
947 #define MSR_P4_ALF_ESCR0 0x000003ca
948 #define MSR_P4_ALF_ESCR1 0x000003cb
949 #define MSR_P4_BPU_ESCR0 0x000003b2
950 #define MSR_P4_BPU_ESCR1 0x000003b3
951 #define MSR_P4_BSU_ESCR0 0x000003a0
952 #define MSR_P4_BSU_ESCR1 0x000003a1
953 #define MSR_P4_CRU_ESCR0 0x000003b8
954 #define MSR_P4_CRU_ESCR1 0x000003b9
955 #define MSR_P4_CRU_ESCR2 0x000003cc
956 #define MSR_P4_CRU_ESCR3 0x000003cd
957 #define MSR_P4_CRU_ESCR4 0x000003e0
958 #define MSR_P4_CRU_ESCR5 0x000003e1
959 #define MSR_P4_DAC_ESCR0 0x000003a8
960 #define MSR_P4_DAC_ESCR1 0x000003a9
961 #define MSR_P4_FIRM_ESCR0 0x000003a4
962 #define MSR_P4_FIRM_ESCR1 0x000003a5
963 #define MSR_P4_FLAME_ESCR0 0x000003a6
964 #define MSR_P4_FLAME_ESCR1 0x000003a7
965 #define MSR_P4_FSB_ESCR0 0x000003a2
966 #define MSR_P4_FSB_ESCR1 0x000003a3
967 #define MSR_P4_IQ_ESCR0 0x000003ba
968 #define MSR_P4_IQ_ESCR1 0x000003bb
969 #define MSR_P4_IS_ESCR0 0x000003b4
970 #define MSR_P4_IS_ESCR1 0x000003b5
971 #define MSR_P4_ITLB_ESCR0 0x000003b6
972 #define MSR_P4_ITLB_ESCR1 0x000003b7
973 #define MSR_P4_IX_ESCR0 0x000003c8
974 #define MSR_P4_IX_ESCR1 0x000003c9
975 #define MSR_P4_MOB_ESCR0 0x000003aa
976 #define MSR_P4_MOB_ESCR1 0x000003ab
977 #define MSR_P4_MS_ESCR0 0x000003c0
978 #define MSR_P4_MS_ESCR1 0x000003c1
979 #define MSR_P4_PMH_ESCR0 0x000003ac
980 #define MSR_P4_PMH_ESCR1 0x000003ad
981 #define MSR_P4_RAT_ESCR0 0x000003bc
982 #define MSR_P4_RAT_ESCR1 0x000003bd
983 #define MSR_P4_SAAT_ESCR0 0x000003ae
984 #define MSR_P4_SAAT_ESCR1 0x000003af
985 #define MSR_P4_SSU_ESCR0 0x000003be
986 #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */
987
988 #define MSR_P4_TBPU_ESCR0 0x000003c2
989 #define MSR_P4_TBPU_ESCR1 0x000003c3
990 #define MSR_P4_TC_ESCR0 0x000003c4
991 #define MSR_P4_TC_ESCR1 0x000003c5
992 #define MSR_P4_U2L_ESCR0 0x000003b0
993 #define MSR_P4_U2L_ESCR1 0x000003b1
994
995 #define MSR_P4_PEBS_MATRIX_VERT 0x000003f2
996
997 /* Intel Core-based CPU performance counters */
998 #define MSR_CORE_PERF_FIXED_CTR0 0x00000309
999 #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a
1000 #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b
1001 #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d
1002 #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e
1003 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f
1004 #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390
1005
1006 /* Geode defined MSRs */
1007 #define MSR_GEODE_BUSCONT_CONF0 0x00001900
1008
1009 /* Intel VT MSRs */
1010 #define MSR_IA32_VMX_BASIC 0x00000480
1011 #define MSR_IA32_VMX_PINBASED_CTLS 0x00000481
1012 #define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482
1013 #define MSR_IA32_VMX_EXIT_CTLS 0x00000483
1014 #define MSR_IA32_VMX_ENTRY_CTLS 0x00000484
1015 #define MSR_IA32_VMX_MISC 0x00000485
1016 #define MSR_IA32_VMX_CR0_FIXED0 0x00000486
1017 #define MSR_IA32_VMX_CR0_FIXED1 0x00000487
1018 #define MSR_IA32_VMX_CR4_FIXED0 0x00000488
1019 #define MSR_IA32_VMX_CR4_FIXED1 0x00000489
1020 #define MSR_IA32_VMX_VMCS_ENUM 0x0000048a
1021 #define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b
1022 #define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c
1023 #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d
1024 #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e
1025 #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f
1026 #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490
1027 #define MSR_IA32_VMX_VMFUNC 0x00000491
1028
1029 /* VMX_BASIC bits and bitmasks */
1030 #define VMX_BASIC_VMCS_SIZE_SHIFT 32
1031 #define VMX_BASIC_TRUE_CTLS (1ULL << 55)
1032 #define VMX_BASIC_64 0x0001000000000000LLU
1033 #define VMX_BASIC_MEM_TYPE_SHIFT 50
1034 #define VMX_BASIC_MEM_TYPE_MASK 0x003c000000000000LLU
1035 #define VMX_BASIC_MEM_TYPE_WB 6LLU
1036 #define VMX_BASIC_INOUT 0x0040000000000000LLU
1037
1038 /* MSR_IA32_VMX_MISC bits */
1039 #define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29)
1040 #define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE 0x1F
1041 /* AMD-V MSRs */
1042
1043 #define MSR_VM_CR 0xc0010114
1044 #define MSR_VM_IGNNE 0xc0010115
1045 #define MSR_VM_HSAVE_PA 0xc0010117
1046
1047 #endif /* !SELFTEST_KVM_X86_H */
1048