x86.h 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047
  1. /*
  2. * tools/testing/selftests/kvm/include/x86.h
  3. *
  4. * Copyright (C) 2018, Google LLC.
  5. *
  6. * This work is licensed under the terms of the GNU GPL, version 2.
  7. *
  8. */
  9. #ifndef SELFTEST_KVM_X86_H
  10. #define SELFTEST_KVM_X86_H
  11. #include <assert.h>
  12. #include <stdint.h>
  13. #define X86_EFLAGS_FIXED (1u << 1)
  14. #define X86_CR4_VME (1ul << 0)
  15. #define X86_CR4_PVI (1ul << 1)
  16. #define X86_CR4_TSD (1ul << 2)
  17. #define X86_CR4_DE (1ul << 3)
  18. #define X86_CR4_PSE (1ul << 4)
  19. #define X86_CR4_PAE (1ul << 5)
  20. #define X86_CR4_MCE (1ul << 6)
  21. #define X86_CR4_PGE (1ul << 7)
  22. #define X86_CR4_PCE (1ul << 8)
  23. #define X86_CR4_OSFXSR (1ul << 9)
  24. #define X86_CR4_OSXMMEXCPT (1ul << 10)
  25. #define X86_CR4_UMIP (1ul << 11)
  26. #define X86_CR4_VMXE (1ul << 13)
  27. #define X86_CR4_SMXE (1ul << 14)
  28. #define X86_CR4_FSGSBASE (1ul << 16)
  29. #define X86_CR4_PCIDE (1ul << 17)
  30. #define X86_CR4_OSXSAVE (1ul << 18)
  31. #define X86_CR4_SMEP (1ul << 20)
  32. #define X86_CR4_SMAP (1ul << 21)
  33. #define X86_CR4_PKE (1ul << 22)
  34. /* The enum values match the intruction encoding of each register */
  35. enum x86_register {
  36. RAX = 0,
  37. RCX,
  38. RDX,
  39. RBX,
  40. RSP,
  41. RBP,
  42. RSI,
  43. RDI,
  44. R8,
  45. R9,
  46. R10,
  47. R11,
  48. R12,
  49. R13,
  50. R14,
  51. R15,
  52. };
  53. struct desc64 {
  54. uint16_t limit0;
  55. uint16_t base0;
  56. unsigned base1:8, type:4, s:1, dpl:2, p:1;
  57. unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8;
  58. uint32_t base3;
  59. uint32_t zero1;
  60. } __attribute__((packed));
  61. struct desc_ptr {
  62. uint16_t size;
  63. uint64_t address;
  64. } __attribute__((packed));
  65. static inline uint64_t get_desc64_base(const struct desc64 *desc)
  66. {
  67. return ((uint64_t)desc->base3 << 32) |
  68. (desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24));
  69. }
  70. static inline uint64_t rdtsc(void)
  71. {
  72. uint32_t eax, edx;
  73. /*
  74. * The lfence is to wait (on Intel CPUs) until all previous
  75. * instructions have been executed.
  76. */
  77. __asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx));
  78. return ((uint64_t)edx) << 32 | eax;
  79. }
  80. static inline uint64_t rdtscp(uint32_t *aux)
  81. {
  82. uint32_t eax, edx;
  83. __asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux));
  84. return ((uint64_t)edx) << 32 | eax;
  85. }
  86. static inline uint64_t rdmsr(uint32_t msr)
  87. {
  88. uint32_t a, d;
  89. __asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory");
  90. return a | ((uint64_t) d << 32);
  91. }
  92. static inline void wrmsr(uint32_t msr, uint64_t value)
  93. {
  94. uint32_t a = value;
  95. uint32_t d = value >> 32;
  96. __asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory");
  97. }
  98. static inline uint16_t inw(uint16_t port)
  99. {
  100. uint16_t tmp;
  101. __asm__ __volatile__("in %%dx, %%ax"
  102. : /* output */ "=a" (tmp)
  103. : /* input */ "d" (port));
  104. return tmp;
  105. }
  106. static inline uint16_t get_es(void)
  107. {
  108. uint16_t es;
  109. __asm__ __volatile__("mov %%es, %[es]"
  110. : /* output */ [es]"=rm"(es));
  111. return es;
  112. }
  113. static inline uint16_t get_cs(void)
  114. {
  115. uint16_t cs;
  116. __asm__ __volatile__("mov %%cs, %[cs]"
  117. : /* output */ [cs]"=rm"(cs));
  118. return cs;
  119. }
  120. static inline uint16_t get_ss(void)
  121. {
  122. uint16_t ss;
  123. __asm__ __volatile__("mov %%ss, %[ss]"
  124. : /* output */ [ss]"=rm"(ss));
  125. return ss;
  126. }
  127. static inline uint16_t get_ds(void)
  128. {
  129. uint16_t ds;
  130. __asm__ __volatile__("mov %%ds, %[ds]"
  131. : /* output */ [ds]"=rm"(ds));
  132. return ds;
  133. }
  134. static inline uint16_t get_fs(void)
  135. {
  136. uint16_t fs;
  137. __asm__ __volatile__("mov %%fs, %[fs]"
  138. : /* output */ [fs]"=rm"(fs));
  139. return fs;
  140. }
  141. static inline uint16_t get_gs(void)
  142. {
  143. uint16_t gs;
  144. __asm__ __volatile__("mov %%gs, %[gs]"
  145. : /* output */ [gs]"=rm"(gs));
  146. return gs;
  147. }
  148. static inline uint16_t get_tr(void)
  149. {
  150. uint16_t tr;
  151. __asm__ __volatile__("str %[tr]"
  152. : /* output */ [tr]"=rm"(tr));
  153. return tr;
  154. }
  155. static inline uint64_t get_cr0(void)
  156. {
  157. uint64_t cr0;
  158. __asm__ __volatile__("mov %%cr0, %[cr0]"
  159. : /* output */ [cr0]"=r"(cr0));
  160. return cr0;
  161. }
  162. static inline uint64_t get_cr3(void)
  163. {
  164. uint64_t cr3;
  165. __asm__ __volatile__("mov %%cr3, %[cr3]"
  166. : /* output */ [cr3]"=r"(cr3));
  167. return cr3;
  168. }
  169. static inline uint64_t get_cr4(void)
  170. {
  171. uint64_t cr4;
  172. __asm__ __volatile__("mov %%cr4, %[cr4]"
  173. : /* output */ [cr4]"=r"(cr4));
  174. return cr4;
  175. }
  176. static inline void set_cr4(uint64_t val)
  177. {
  178. __asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory");
  179. }
  180. static inline uint64_t get_gdt_base(void)
  181. {
  182. struct desc_ptr gdt;
  183. __asm__ __volatile__("sgdt %[gdt]"
  184. : /* output */ [gdt]"=m"(gdt));
  185. return gdt.address;
  186. }
  187. static inline uint64_t get_idt_base(void)
  188. {
  189. struct desc_ptr idt;
  190. __asm__ __volatile__("sidt %[idt]"
  191. : /* output */ [idt]"=m"(idt));
  192. return idt.address;
  193. }
  194. #define SET_XMM(__var, __xmm) \
  195. asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm)
  196. static inline void set_xmm(int n, unsigned long val)
  197. {
  198. switch (n) {
  199. case 0:
  200. SET_XMM(val, xmm0);
  201. break;
  202. case 1:
  203. SET_XMM(val, xmm1);
  204. break;
  205. case 2:
  206. SET_XMM(val, xmm2);
  207. break;
  208. case 3:
  209. SET_XMM(val, xmm3);
  210. break;
  211. case 4:
  212. SET_XMM(val, xmm4);
  213. break;
  214. case 5:
  215. SET_XMM(val, xmm5);
  216. break;
  217. case 6:
  218. SET_XMM(val, xmm6);
  219. break;
  220. case 7:
  221. SET_XMM(val, xmm7);
  222. break;
  223. }
  224. }
  225. typedef unsigned long v1di __attribute__ ((vector_size (8)));
  226. static inline unsigned long get_xmm(int n)
  227. {
  228. assert(n >= 0 && n <= 7);
  229. register v1di xmm0 __asm__("%xmm0");
  230. register v1di xmm1 __asm__("%xmm1");
  231. register v1di xmm2 __asm__("%xmm2");
  232. register v1di xmm3 __asm__("%xmm3");
  233. register v1di xmm4 __asm__("%xmm4");
  234. register v1di xmm5 __asm__("%xmm5");
  235. register v1di xmm6 __asm__("%xmm6");
  236. register v1di xmm7 __asm__("%xmm7");
  237. switch (n) {
  238. case 0:
  239. return (unsigned long)xmm0;
  240. case 1:
  241. return (unsigned long)xmm1;
  242. case 2:
  243. return (unsigned long)xmm2;
  244. case 3:
  245. return (unsigned long)xmm3;
  246. case 4:
  247. return (unsigned long)xmm4;
  248. case 5:
  249. return (unsigned long)xmm5;
  250. case 6:
  251. return (unsigned long)xmm6;
  252. case 7:
  253. return (unsigned long)xmm7;
  254. }
  255. return 0;
  256. }
  257. struct kvm_x86_state;
  258. struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid);
  259. void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_x86_state *state);
  260. /*
  261. * Basic CPU control in CR0
  262. */
  263. #define X86_CR0_PE (1UL<<0) /* Protection Enable */
  264. #define X86_CR0_MP (1UL<<1) /* Monitor Coprocessor */
  265. #define X86_CR0_EM (1UL<<2) /* Emulation */
  266. #define X86_CR0_TS (1UL<<3) /* Task Switched */
  267. #define X86_CR0_ET (1UL<<4) /* Extension Type */
  268. #define X86_CR0_NE (1UL<<5) /* Numeric Error */
  269. #define X86_CR0_WP (1UL<<16) /* Write Protect */
  270. #define X86_CR0_AM (1UL<<18) /* Alignment Mask */
  271. #define X86_CR0_NW (1UL<<29) /* Not Write-through */
  272. #define X86_CR0_CD (1UL<<30) /* Cache Disable */
  273. #define X86_CR0_PG (1UL<<31) /* Paging */
  274. /*
  275. * CPU model specific register (MSR) numbers.
  276. */
  277. /* x86-64 specific MSRs */
  278. #define MSR_EFER 0xc0000080 /* extended feature register */
  279. #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */
  280. #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */
  281. #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */
  282. #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */
  283. #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */
  284. #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */
  285. #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */
  286. #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */
  287. /* EFER bits: */
  288. #define EFER_SCE (1<<0) /* SYSCALL/SYSRET */
  289. #define EFER_LME (1<<8) /* Long mode enable */
  290. #define EFER_LMA (1<<10) /* Long mode active (read-only) */
  291. #define EFER_NX (1<<11) /* No execute enable */
  292. #define EFER_SVME (1<<12) /* Enable virtualization */
  293. #define EFER_LMSLE (1<<13) /* Long Mode Segment Limit Enable */
  294. #define EFER_FFXSR (1<<14) /* Enable Fast FXSAVE/FXRSTOR */
  295. /* Intel MSRs. Some also available on other CPUs */
  296. #define MSR_PPIN_CTL 0x0000004e
  297. #define MSR_PPIN 0x0000004f
  298. #define MSR_IA32_PERFCTR0 0x000000c1
  299. #define MSR_IA32_PERFCTR1 0x000000c2
  300. #define MSR_FSB_FREQ 0x000000cd
  301. #define MSR_PLATFORM_INFO 0x000000ce
  302. #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31
  303. #define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT)
  304. #define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2
  305. #define NHM_C3_AUTO_DEMOTE (1UL << 25)
  306. #define NHM_C1_AUTO_DEMOTE (1UL << 26)
  307. #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25)
  308. #define SNB_C1_AUTO_UNDEMOTE (1UL << 27)
  309. #define SNB_C3_AUTO_UNDEMOTE (1UL << 28)
  310. #define MSR_MTRRcap 0x000000fe
  311. #define MSR_IA32_BBL_CR_CTL 0x00000119
  312. #define MSR_IA32_BBL_CR_CTL3 0x0000011e
  313. #define MSR_IA32_SYSENTER_CS 0x00000174
  314. #define MSR_IA32_SYSENTER_ESP 0x00000175
  315. #define MSR_IA32_SYSENTER_EIP 0x00000176
  316. #define MSR_IA32_MCG_CAP 0x00000179
  317. #define MSR_IA32_MCG_STATUS 0x0000017a
  318. #define MSR_IA32_MCG_CTL 0x0000017b
  319. #define MSR_IA32_MCG_EXT_CTL 0x000004d0
  320. #define MSR_OFFCORE_RSP_0 0x000001a6
  321. #define MSR_OFFCORE_RSP_1 0x000001a7
  322. #define MSR_TURBO_RATIO_LIMIT 0x000001ad
  323. #define MSR_TURBO_RATIO_LIMIT1 0x000001ae
  324. #define MSR_TURBO_RATIO_LIMIT2 0x000001af
  325. #define MSR_LBR_SELECT 0x000001c8
  326. #define MSR_LBR_TOS 0x000001c9
  327. #define MSR_LBR_NHM_FROM 0x00000680
  328. #define MSR_LBR_NHM_TO 0x000006c0
  329. #define MSR_LBR_CORE_FROM 0x00000040
  330. #define MSR_LBR_CORE_TO 0x00000060
  331. #define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */
  332. #define LBR_INFO_MISPRED BIT_ULL(63)
  333. #define LBR_INFO_IN_TX BIT_ULL(62)
  334. #define LBR_INFO_ABORT BIT_ULL(61)
  335. #define LBR_INFO_CYCLES 0xffff
  336. #define MSR_IA32_PEBS_ENABLE 0x000003f1
  337. #define MSR_IA32_DS_AREA 0x00000600
  338. #define MSR_IA32_PERF_CAPABILITIES 0x00000345
  339. #define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6
  340. #define MSR_IA32_RTIT_CTL 0x00000570
  341. #define MSR_IA32_RTIT_STATUS 0x00000571
  342. #define MSR_IA32_RTIT_ADDR0_A 0x00000580
  343. #define MSR_IA32_RTIT_ADDR0_B 0x00000581
  344. #define MSR_IA32_RTIT_ADDR1_A 0x00000582
  345. #define MSR_IA32_RTIT_ADDR1_B 0x00000583
  346. #define MSR_IA32_RTIT_ADDR2_A 0x00000584
  347. #define MSR_IA32_RTIT_ADDR2_B 0x00000585
  348. #define MSR_IA32_RTIT_ADDR3_A 0x00000586
  349. #define MSR_IA32_RTIT_ADDR3_B 0x00000587
  350. #define MSR_IA32_RTIT_CR3_MATCH 0x00000572
  351. #define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560
  352. #define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561
  353. #define MSR_MTRRfix64K_00000 0x00000250
  354. #define MSR_MTRRfix16K_80000 0x00000258
  355. #define MSR_MTRRfix16K_A0000 0x00000259
  356. #define MSR_MTRRfix4K_C0000 0x00000268
  357. #define MSR_MTRRfix4K_C8000 0x00000269
  358. #define MSR_MTRRfix4K_D0000 0x0000026a
  359. #define MSR_MTRRfix4K_D8000 0x0000026b
  360. #define MSR_MTRRfix4K_E0000 0x0000026c
  361. #define MSR_MTRRfix4K_E8000 0x0000026d
  362. #define MSR_MTRRfix4K_F0000 0x0000026e
  363. #define MSR_MTRRfix4K_F8000 0x0000026f
  364. #define MSR_MTRRdefType 0x000002ff
  365. #define MSR_IA32_CR_PAT 0x00000277
  366. #define MSR_IA32_DEBUGCTLMSR 0x000001d9
  367. #define MSR_IA32_LASTBRANCHFROMIP 0x000001db
  368. #define MSR_IA32_LASTBRANCHTOIP 0x000001dc
  369. #define MSR_IA32_LASTINTFROMIP 0x000001dd
  370. #define MSR_IA32_LASTINTTOIP 0x000001de
  371. /* DEBUGCTLMSR bits (others vary by model): */
  372. #define DEBUGCTLMSR_LBR (1UL << 0) /* last branch recording */
  373. #define DEBUGCTLMSR_BTF_SHIFT 1
  374. #define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */
  375. #define DEBUGCTLMSR_TR (1UL << 6)
  376. #define DEBUGCTLMSR_BTS (1UL << 7)
  377. #define DEBUGCTLMSR_BTINT (1UL << 8)
  378. #define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9)
  379. #define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10)
  380. #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11)
  381. #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14
  382. #define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT)
  383. #define MSR_PEBS_FRONTEND 0x000003f7
  384. #define MSR_IA32_POWER_CTL 0x000001fc
  385. #define MSR_IA32_MC0_CTL 0x00000400
  386. #define MSR_IA32_MC0_STATUS 0x00000401
  387. #define MSR_IA32_MC0_ADDR 0x00000402
  388. #define MSR_IA32_MC0_MISC 0x00000403
  389. /* C-state Residency Counters */
  390. #define MSR_PKG_C3_RESIDENCY 0x000003f8
  391. #define MSR_PKG_C6_RESIDENCY 0x000003f9
  392. #define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa
  393. #define MSR_PKG_C7_RESIDENCY 0x000003fa
  394. #define MSR_CORE_C3_RESIDENCY 0x000003fc
  395. #define MSR_CORE_C6_RESIDENCY 0x000003fd
  396. #define MSR_CORE_C7_RESIDENCY 0x000003fe
  397. #define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff
  398. #define MSR_PKG_C2_RESIDENCY 0x0000060d
  399. #define MSR_PKG_C8_RESIDENCY 0x00000630
  400. #define MSR_PKG_C9_RESIDENCY 0x00000631
  401. #define MSR_PKG_C10_RESIDENCY 0x00000632
  402. /* Interrupt Response Limit */
  403. #define MSR_PKGC3_IRTL 0x0000060a
  404. #define MSR_PKGC6_IRTL 0x0000060b
  405. #define MSR_PKGC7_IRTL 0x0000060c
  406. #define MSR_PKGC8_IRTL 0x00000633
  407. #define MSR_PKGC9_IRTL 0x00000634
  408. #define MSR_PKGC10_IRTL 0x00000635
  409. /* Run Time Average Power Limiting (RAPL) Interface */
  410. #define MSR_RAPL_POWER_UNIT 0x00000606
  411. #define MSR_PKG_POWER_LIMIT 0x00000610
  412. #define MSR_PKG_ENERGY_STATUS 0x00000611
  413. #define MSR_PKG_PERF_STATUS 0x00000613
  414. #define MSR_PKG_POWER_INFO 0x00000614
  415. #define MSR_DRAM_POWER_LIMIT 0x00000618
  416. #define MSR_DRAM_ENERGY_STATUS 0x00000619
  417. #define MSR_DRAM_PERF_STATUS 0x0000061b
  418. #define MSR_DRAM_POWER_INFO 0x0000061c
  419. #define MSR_PP0_POWER_LIMIT 0x00000638
  420. #define MSR_PP0_ENERGY_STATUS 0x00000639
  421. #define MSR_PP0_POLICY 0x0000063a
  422. #define MSR_PP0_PERF_STATUS 0x0000063b
  423. #define MSR_PP1_POWER_LIMIT 0x00000640
  424. #define MSR_PP1_ENERGY_STATUS 0x00000641
  425. #define MSR_PP1_POLICY 0x00000642
  426. /* Config TDP MSRs */
  427. #define MSR_CONFIG_TDP_NOMINAL 0x00000648
  428. #define MSR_CONFIG_TDP_LEVEL_1 0x00000649
  429. #define MSR_CONFIG_TDP_LEVEL_2 0x0000064A
  430. #define MSR_CONFIG_TDP_CONTROL 0x0000064B
  431. #define MSR_TURBO_ACTIVATION_RATIO 0x0000064C
  432. #define MSR_PLATFORM_ENERGY_STATUS 0x0000064D
  433. #define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658
  434. #define MSR_PKG_ANY_CORE_C0_RES 0x00000659
  435. #define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A
  436. #define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B
  437. #define MSR_CORE_C1_RES 0x00000660
  438. #define MSR_MODULE_C6_RES_MS 0x00000664
  439. #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668
  440. #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669
  441. #define MSR_ATOM_CORE_RATIOS 0x0000066a
  442. #define MSR_ATOM_CORE_VIDS 0x0000066b
  443. #define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c
  444. #define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d
  445. #define MSR_CORE_PERF_LIMIT_REASONS 0x00000690
  446. #define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0
  447. #define MSR_RING_PERF_LIMIT_REASONS 0x000006B1
  448. /* Hardware P state interface */
  449. #define MSR_PPERF 0x0000064e
  450. #define MSR_PERF_LIMIT_REASONS 0x0000064f
  451. #define MSR_PM_ENABLE 0x00000770
  452. #define MSR_HWP_CAPABILITIES 0x00000771
  453. #define MSR_HWP_REQUEST_PKG 0x00000772
  454. #define MSR_HWP_INTERRUPT 0x00000773
  455. #define MSR_HWP_REQUEST 0x00000774
  456. #define MSR_HWP_STATUS 0x00000777
  457. /* CPUID.6.EAX */
  458. #define HWP_BASE_BIT (1<<7)
  459. #define HWP_NOTIFICATIONS_BIT (1<<8)
  460. #define HWP_ACTIVITY_WINDOW_BIT (1<<9)
  461. #define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10)
  462. #define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11)
  463. /* IA32_HWP_CAPABILITIES */
  464. #define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff)
  465. #define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff)
  466. #define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff)
  467. #define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff)
  468. /* IA32_HWP_REQUEST */
  469. #define HWP_MIN_PERF(x) (x & 0xff)
  470. #define HWP_MAX_PERF(x) ((x & 0xff) << 8)
  471. #define HWP_DESIRED_PERF(x) ((x & 0xff) << 16)
  472. #define HWP_ENERGY_PERF_PREFERENCE(x) (((unsigned long long) x & 0xff) << 24)
  473. #define HWP_EPP_PERFORMANCE 0x00
  474. #define HWP_EPP_BALANCE_PERFORMANCE 0x80
  475. #define HWP_EPP_BALANCE_POWERSAVE 0xC0
  476. #define HWP_EPP_POWERSAVE 0xFF
  477. #define HWP_ACTIVITY_WINDOW(x) ((unsigned long long)(x & 0xff3) << 32)
  478. #define HWP_PACKAGE_CONTROL(x) ((unsigned long long)(x & 0x1) << 42)
  479. /* IA32_HWP_STATUS */
  480. #define HWP_GUARANTEED_CHANGE(x) (x & 0x1)
  481. #define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4)
  482. /* IA32_HWP_INTERRUPT */
  483. #define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1)
  484. #define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2)
  485. #define MSR_AMD64_MC0_MASK 0xc0010044
  486. #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x))
  487. #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x))
  488. #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x))
  489. #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x))
  490. #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x))
  491. /* These are consecutive and not in the normal 4er MCE bank block */
  492. #define MSR_IA32_MC0_CTL2 0x00000280
  493. #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x))
  494. #define MSR_P6_PERFCTR0 0x000000c1
  495. #define MSR_P6_PERFCTR1 0x000000c2
  496. #define MSR_P6_EVNTSEL0 0x00000186
  497. #define MSR_P6_EVNTSEL1 0x00000187
  498. #define MSR_KNC_PERFCTR0 0x00000020
  499. #define MSR_KNC_PERFCTR1 0x00000021
  500. #define MSR_KNC_EVNTSEL0 0x00000028
  501. #define MSR_KNC_EVNTSEL1 0x00000029
  502. /* Alternative perfctr range with full access. */
  503. #define MSR_IA32_PMC0 0x000004c1
  504. /* AMD64 MSRs. Not complete. See the architecture manual for a more
  505. complete list. */
  506. #define MSR_AMD64_PATCH_LEVEL 0x0000008b
  507. #define MSR_AMD64_TSC_RATIO 0xc0000104
  508. #define MSR_AMD64_NB_CFG 0xc001001f
  509. #define MSR_AMD64_PATCH_LOADER 0xc0010020
  510. #define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140
  511. #define MSR_AMD64_OSVW_STATUS 0xc0010141
  512. #define MSR_AMD64_LS_CFG 0xc0011020
  513. #define MSR_AMD64_DC_CFG 0xc0011022
  514. #define MSR_AMD64_BU_CFG2 0xc001102a
  515. #define MSR_AMD64_IBSFETCHCTL 0xc0011030
  516. #define MSR_AMD64_IBSFETCHLINAD 0xc0011031
  517. #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032
  518. #define MSR_AMD64_IBSFETCH_REG_COUNT 3
  519. #define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1)
  520. #define MSR_AMD64_IBSOPCTL 0xc0011033
  521. #define MSR_AMD64_IBSOPRIP 0xc0011034
  522. #define MSR_AMD64_IBSOPDATA 0xc0011035
  523. #define MSR_AMD64_IBSOPDATA2 0xc0011036
  524. #define MSR_AMD64_IBSOPDATA3 0xc0011037
  525. #define MSR_AMD64_IBSDCLINAD 0xc0011038
  526. #define MSR_AMD64_IBSDCPHYSAD 0xc0011039
  527. #define MSR_AMD64_IBSOP_REG_COUNT 7
  528. #define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1)
  529. #define MSR_AMD64_IBSCTL 0xc001103a
  530. #define MSR_AMD64_IBSBRTARGET 0xc001103b
  531. #define MSR_AMD64_IBSOPDATA4 0xc001103d
  532. #define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */
  533. #define MSR_AMD64_SEV 0xc0010131
  534. #define MSR_AMD64_SEV_ENABLED_BIT 0
  535. #define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT)
  536. /* Fam 17h MSRs */
  537. #define MSR_F17H_IRPERF 0xc00000e9
  538. /* Fam 16h MSRs */
  539. #define MSR_F16H_L2I_PERF_CTL 0xc0010230
  540. #define MSR_F16H_L2I_PERF_CTR 0xc0010231
  541. #define MSR_F16H_DR1_ADDR_MASK 0xc0011019
  542. #define MSR_F16H_DR2_ADDR_MASK 0xc001101a
  543. #define MSR_F16H_DR3_ADDR_MASK 0xc001101b
  544. #define MSR_F16H_DR0_ADDR_MASK 0xc0011027
  545. /* Fam 15h MSRs */
  546. #define MSR_F15H_PERF_CTL 0xc0010200
  547. #define MSR_F15H_PERF_CTR 0xc0010201
  548. #define MSR_F15H_NB_PERF_CTL 0xc0010240
  549. #define MSR_F15H_NB_PERF_CTR 0xc0010241
  550. #define MSR_F15H_PTSC 0xc0010280
  551. #define MSR_F15H_IC_CFG 0xc0011021
  552. /* Fam 10h MSRs */
  553. #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058
  554. #define FAM10H_MMIO_CONF_ENABLE (1<<0)
  555. #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf
  556. #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2
  557. #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL
  558. #define FAM10H_MMIO_CONF_BASE_SHIFT 20
  559. #define MSR_FAM10H_NODE_ID 0xc001100c
  560. #define MSR_F10H_DECFG 0xc0011029
  561. #define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT 1
  562. #define MSR_F10H_DECFG_LFENCE_SERIALIZE BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT)
  563. /* K8 MSRs */
  564. #define MSR_K8_TOP_MEM1 0xc001001a
  565. #define MSR_K8_TOP_MEM2 0xc001001d
  566. #define MSR_K8_SYSCFG 0xc0010010
  567. #define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT 23
  568. #define MSR_K8_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT)
  569. #define MSR_K8_INT_PENDING_MSG 0xc0010055
  570. /* C1E active bits in int pending message */
  571. #define K8_INTP_C1E_ACTIVE_MASK 0x18000000
  572. #define MSR_K8_TSEG_ADDR 0xc0010112
  573. #define MSR_K8_TSEG_MASK 0xc0010113
  574. #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */
  575. #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */
  576. #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */
  577. /* K7 MSRs */
  578. #define MSR_K7_EVNTSEL0 0xc0010000
  579. #define MSR_K7_PERFCTR0 0xc0010004
  580. #define MSR_K7_EVNTSEL1 0xc0010001
  581. #define MSR_K7_PERFCTR1 0xc0010005
  582. #define MSR_K7_EVNTSEL2 0xc0010002
  583. #define MSR_K7_PERFCTR2 0xc0010006
  584. #define MSR_K7_EVNTSEL3 0xc0010003
  585. #define MSR_K7_PERFCTR3 0xc0010007
  586. #define MSR_K7_CLK_CTL 0xc001001b
  587. #define MSR_K7_HWCR 0xc0010015
  588. #define MSR_K7_HWCR_SMMLOCK_BIT 0
  589. #define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT)
  590. #define MSR_K7_FID_VID_CTL 0xc0010041
  591. #define MSR_K7_FID_VID_STATUS 0xc0010042
  592. /* K6 MSRs */
  593. #define MSR_K6_WHCR 0xc0000082
  594. #define MSR_K6_UWCCR 0xc0000085
  595. #define MSR_K6_EPMR 0xc0000086
  596. #define MSR_K6_PSOR 0xc0000087
  597. #define MSR_K6_PFIR 0xc0000088
  598. /* Centaur-Hauls/IDT defined MSRs. */
  599. #define MSR_IDT_FCR1 0x00000107
  600. #define MSR_IDT_FCR2 0x00000108
  601. #define MSR_IDT_FCR3 0x00000109
  602. #define MSR_IDT_FCR4 0x0000010a
  603. #define MSR_IDT_MCR0 0x00000110
  604. #define MSR_IDT_MCR1 0x00000111
  605. #define MSR_IDT_MCR2 0x00000112
  606. #define MSR_IDT_MCR3 0x00000113
  607. #define MSR_IDT_MCR4 0x00000114
  608. #define MSR_IDT_MCR5 0x00000115
  609. #define MSR_IDT_MCR6 0x00000116
  610. #define MSR_IDT_MCR7 0x00000117
  611. #define MSR_IDT_MCR_CTRL 0x00000120
  612. /* VIA Cyrix defined MSRs*/
  613. #define MSR_VIA_FCR 0x00001107
  614. #define MSR_VIA_LONGHAUL 0x0000110a
  615. #define MSR_VIA_RNG 0x0000110b
  616. #define MSR_VIA_BCR2 0x00001147
  617. /* Transmeta defined MSRs */
  618. #define MSR_TMTA_LONGRUN_CTRL 0x80868010
  619. #define MSR_TMTA_LONGRUN_FLAGS 0x80868011
  620. #define MSR_TMTA_LRTI_READOUT 0x80868018
  621. #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a
  622. /* Intel defined MSRs. */
  623. #define MSR_IA32_P5_MC_ADDR 0x00000000
  624. #define MSR_IA32_P5_MC_TYPE 0x00000001
  625. #define MSR_IA32_TSC 0x00000010
  626. #define MSR_IA32_PLATFORM_ID 0x00000017
  627. #define MSR_IA32_EBL_CR_POWERON 0x0000002a
  628. #define MSR_EBC_FREQUENCY_ID 0x0000002c
  629. #define MSR_SMI_COUNT 0x00000034
  630. #define MSR_IA32_FEATURE_CONTROL 0x0000003a
  631. #define MSR_IA32_TSC_ADJUST 0x0000003b
  632. #define MSR_IA32_BNDCFGS 0x00000d90
  633. #define MSR_IA32_BNDCFGS_RSVD 0x00000ffc
  634. #define MSR_IA32_XSS 0x00000da0
  635. #define FEATURE_CONTROL_LOCKED (1<<0)
  636. #define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX (1<<1)
  637. #define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX (1<<2)
  638. #define FEATURE_CONTROL_LMCE (1<<20)
  639. #define MSR_IA32_APICBASE 0x0000001b
  640. #define MSR_IA32_APICBASE_BSP (1<<8)
  641. #define MSR_IA32_APICBASE_ENABLE (1<<11)
  642. #define MSR_IA32_APICBASE_BASE (0xfffff<<12)
  643. #define MSR_IA32_TSCDEADLINE 0x000006e0
  644. #define MSR_IA32_UCODE_WRITE 0x00000079
  645. #define MSR_IA32_UCODE_REV 0x0000008b
  646. #define MSR_IA32_SMM_MONITOR_CTL 0x0000009b
  647. #define MSR_IA32_SMBASE 0x0000009e
  648. #define MSR_IA32_PERF_STATUS 0x00000198
  649. #define MSR_IA32_PERF_CTL 0x00000199
  650. #define INTEL_PERF_CTL_MASK 0xffff
  651. #define MSR_AMD_PSTATE_DEF_BASE 0xc0010064
  652. #define MSR_AMD_PERF_STATUS 0xc0010063
  653. #define MSR_AMD_PERF_CTL 0xc0010062
  654. #define MSR_IA32_MPERF 0x000000e7
  655. #define MSR_IA32_APERF 0x000000e8
  656. #define MSR_IA32_THERM_CONTROL 0x0000019a
  657. #define MSR_IA32_THERM_INTERRUPT 0x0000019b
  658. #define THERM_INT_HIGH_ENABLE (1 << 0)
  659. #define THERM_INT_LOW_ENABLE (1 << 1)
  660. #define THERM_INT_PLN_ENABLE (1 << 24)
  661. #define MSR_IA32_THERM_STATUS 0x0000019c
  662. #define THERM_STATUS_PROCHOT (1 << 0)
  663. #define THERM_STATUS_POWER_LIMIT (1 << 10)
  664. #define MSR_THERM2_CTL 0x0000019d
  665. #define MSR_THERM2_CTL_TM_SELECT (1ULL << 16)
  666. #define MSR_IA32_MISC_ENABLE 0x000001a0
  667. #define MSR_IA32_TEMPERATURE_TARGET 0x000001a2
  668. #define MSR_MISC_FEATURE_CONTROL 0x000001a4
  669. #define MSR_MISC_PWR_MGMT 0x000001aa
  670. #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0
  671. #define ENERGY_PERF_BIAS_PERFORMANCE 0
  672. #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4
  673. #define ENERGY_PERF_BIAS_NORMAL 6
  674. #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8
  675. #define ENERGY_PERF_BIAS_POWERSAVE 15
  676. #define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1
  677. #define PACKAGE_THERM_STATUS_PROCHOT (1 << 0)
  678. #define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10)
  679. #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2
  680. #define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0)
  681. #define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1)
  682. #define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24)
  683. /* Thermal Thresholds Support */
  684. #define THERM_INT_THRESHOLD0_ENABLE (1 << 15)
  685. #define THERM_SHIFT_THRESHOLD0 8
  686. #define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0)
  687. #define THERM_INT_THRESHOLD1_ENABLE (1 << 23)
  688. #define THERM_SHIFT_THRESHOLD1 16
  689. #define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1)
  690. #define THERM_STATUS_THRESHOLD0 (1 << 6)
  691. #define THERM_LOG_THRESHOLD0 (1 << 7)
  692. #define THERM_STATUS_THRESHOLD1 (1 << 8)
  693. #define THERM_LOG_THRESHOLD1 (1 << 9)
  694. /* MISC_ENABLE bits: architectural */
  695. #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0
  696. #define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT)
  697. #define MSR_IA32_MISC_ENABLE_TCC_BIT 1
  698. #define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT)
  699. #define MSR_IA32_MISC_ENABLE_EMON_BIT 7
  700. #define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT)
  701. #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11
  702. #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT)
  703. #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12
  704. #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT)
  705. #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16
  706. #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT)
  707. #define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18
  708. #define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT)
  709. #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22
  710. #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT)
  711. #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23
  712. #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT)
  713. #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34
  714. #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT)
  715. /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */
  716. #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2
  717. #define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT)
  718. #define MSR_IA32_MISC_ENABLE_TM1_BIT 3
  719. #define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT)
  720. #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4
  721. #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT)
  722. #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6
  723. #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT)
  724. #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8
  725. #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT)
  726. #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9
  727. #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT)
  728. #define MSR_IA32_MISC_ENABLE_FERR_BIT 10
  729. #define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT)
  730. #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10
  731. #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT)
  732. #define MSR_IA32_MISC_ENABLE_TM2_BIT 13
  733. #define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT)
  734. #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19
  735. #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT)
  736. #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20
  737. #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT)
  738. #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24
  739. #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT)
  740. #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37
  741. #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT)
  742. #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38
  743. #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT)
  744. #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39
  745. #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT)
  746. /* MISC_FEATURES_ENABLES non-architectural features */
  747. #define MSR_MISC_FEATURES_ENABLES 0x00000140
  748. #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0
  749. #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT)
  750. #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1
  751. #define MSR_IA32_TSC_DEADLINE 0x000006E0
  752. /* P4/Xeon+ specific */
  753. #define MSR_IA32_MCG_EAX 0x00000180
  754. #define MSR_IA32_MCG_EBX 0x00000181
  755. #define MSR_IA32_MCG_ECX 0x00000182
  756. #define MSR_IA32_MCG_EDX 0x00000183
  757. #define MSR_IA32_MCG_ESI 0x00000184
  758. #define MSR_IA32_MCG_EDI 0x00000185
  759. #define MSR_IA32_MCG_EBP 0x00000186
  760. #define MSR_IA32_MCG_ESP 0x00000187
  761. #define MSR_IA32_MCG_EFLAGS 0x00000188
  762. #define MSR_IA32_MCG_EIP 0x00000189
  763. #define MSR_IA32_MCG_RESERVED 0x0000018a
  764. /* Pentium IV performance counter MSRs */
  765. #define MSR_P4_BPU_PERFCTR0 0x00000300
  766. #define MSR_P4_BPU_PERFCTR1 0x00000301
  767. #define MSR_P4_BPU_PERFCTR2 0x00000302
  768. #define MSR_P4_BPU_PERFCTR3 0x00000303
  769. #define MSR_P4_MS_PERFCTR0 0x00000304
  770. #define MSR_P4_MS_PERFCTR1 0x00000305
  771. #define MSR_P4_MS_PERFCTR2 0x00000306
  772. #define MSR_P4_MS_PERFCTR3 0x00000307
  773. #define MSR_P4_FLAME_PERFCTR0 0x00000308
  774. #define MSR_P4_FLAME_PERFCTR1 0x00000309
  775. #define MSR_P4_FLAME_PERFCTR2 0x0000030a
  776. #define MSR_P4_FLAME_PERFCTR3 0x0000030b
  777. #define MSR_P4_IQ_PERFCTR0 0x0000030c
  778. #define MSR_P4_IQ_PERFCTR1 0x0000030d
  779. #define MSR_P4_IQ_PERFCTR2 0x0000030e
  780. #define MSR_P4_IQ_PERFCTR3 0x0000030f
  781. #define MSR_P4_IQ_PERFCTR4 0x00000310
  782. #define MSR_P4_IQ_PERFCTR5 0x00000311
  783. #define MSR_P4_BPU_CCCR0 0x00000360
  784. #define MSR_P4_BPU_CCCR1 0x00000361
  785. #define MSR_P4_BPU_CCCR2 0x00000362
  786. #define MSR_P4_BPU_CCCR3 0x00000363
  787. #define MSR_P4_MS_CCCR0 0x00000364
  788. #define MSR_P4_MS_CCCR1 0x00000365
  789. #define MSR_P4_MS_CCCR2 0x00000366
  790. #define MSR_P4_MS_CCCR3 0x00000367
  791. #define MSR_P4_FLAME_CCCR0 0x00000368
  792. #define MSR_P4_FLAME_CCCR1 0x00000369
  793. #define MSR_P4_FLAME_CCCR2 0x0000036a
  794. #define MSR_P4_FLAME_CCCR3 0x0000036b
  795. #define MSR_P4_IQ_CCCR0 0x0000036c
  796. #define MSR_P4_IQ_CCCR1 0x0000036d
  797. #define MSR_P4_IQ_CCCR2 0x0000036e
  798. #define MSR_P4_IQ_CCCR3 0x0000036f
  799. #define MSR_P4_IQ_CCCR4 0x00000370
  800. #define MSR_P4_IQ_CCCR5 0x00000371
  801. #define MSR_P4_ALF_ESCR0 0x000003ca
  802. #define MSR_P4_ALF_ESCR1 0x000003cb
  803. #define MSR_P4_BPU_ESCR0 0x000003b2
  804. #define MSR_P4_BPU_ESCR1 0x000003b3
  805. #define MSR_P4_BSU_ESCR0 0x000003a0
  806. #define MSR_P4_BSU_ESCR1 0x000003a1
  807. #define MSR_P4_CRU_ESCR0 0x000003b8
  808. #define MSR_P4_CRU_ESCR1 0x000003b9
  809. #define MSR_P4_CRU_ESCR2 0x000003cc
  810. #define MSR_P4_CRU_ESCR3 0x000003cd
  811. #define MSR_P4_CRU_ESCR4 0x000003e0
  812. #define MSR_P4_CRU_ESCR5 0x000003e1
  813. #define MSR_P4_DAC_ESCR0 0x000003a8
  814. #define MSR_P4_DAC_ESCR1 0x000003a9
  815. #define MSR_P4_FIRM_ESCR0 0x000003a4
  816. #define MSR_P4_FIRM_ESCR1 0x000003a5
  817. #define MSR_P4_FLAME_ESCR0 0x000003a6
  818. #define MSR_P4_FLAME_ESCR1 0x000003a7
  819. #define MSR_P4_FSB_ESCR0 0x000003a2
  820. #define MSR_P4_FSB_ESCR1 0x000003a3
  821. #define MSR_P4_IQ_ESCR0 0x000003ba
  822. #define MSR_P4_IQ_ESCR1 0x000003bb
  823. #define MSR_P4_IS_ESCR0 0x000003b4
  824. #define MSR_P4_IS_ESCR1 0x000003b5
  825. #define MSR_P4_ITLB_ESCR0 0x000003b6
  826. #define MSR_P4_ITLB_ESCR1 0x000003b7
  827. #define MSR_P4_IX_ESCR0 0x000003c8
  828. #define MSR_P4_IX_ESCR1 0x000003c9
  829. #define MSR_P4_MOB_ESCR0 0x000003aa
  830. #define MSR_P4_MOB_ESCR1 0x000003ab
  831. #define MSR_P4_MS_ESCR0 0x000003c0
  832. #define MSR_P4_MS_ESCR1 0x000003c1
  833. #define MSR_P4_PMH_ESCR0 0x000003ac
  834. #define MSR_P4_PMH_ESCR1 0x000003ad
  835. #define MSR_P4_RAT_ESCR0 0x000003bc
  836. #define MSR_P4_RAT_ESCR1 0x000003bd
  837. #define MSR_P4_SAAT_ESCR0 0x000003ae
  838. #define MSR_P4_SAAT_ESCR1 0x000003af
  839. #define MSR_P4_SSU_ESCR0 0x000003be
  840. #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */
  841. #define MSR_P4_TBPU_ESCR0 0x000003c2
  842. #define MSR_P4_TBPU_ESCR1 0x000003c3
  843. #define MSR_P4_TC_ESCR0 0x000003c4
  844. #define MSR_P4_TC_ESCR1 0x000003c5
  845. #define MSR_P4_U2L_ESCR0 0x000003b0
  846. #define MSR_P4_U2L_ESCR1 0x000003b1
  847. #define MSR_P4_PEBS_MATRIX_VERT 0x000003f2
  848. /* Intel Core-based CPU performance counters */
  849. #define MSR_CORE_PERF_FIXED_CTR0 0x00000309
  850. #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a
  851. #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b
  852. #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d
  853. #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e
  854. #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f
  855. #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390
  856. /* Geode defined MSRs */
  857. #define MSR_GEODE_BUSCONT_CONF0 0x00001900
  858. /* Intel VT MSRs */
  859. #define MSR_IA32_VMX_BASIC 0x00000480
  860. #define MSR_IA32_VMX_PINBASED_CTLS 0x00000481
  861. #define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482
  862. #define MSR_IA32_VMX_EXIT_CTLS 0x00000483
  863. #define MSR_IA32_VMX_ENTRY_CTLS 0x00000484
  864. #define MSR_IA32_VMX_MISC 0x00000485
  865. #define MSR_IA32_VMX_CR0_FIXED0 0x00000486
  866. #define MSR_IA32_VMX_CR0_FIXED1 0x00000487
  867. #define MSR_IA32_VMX_CR4_FIXED0 0x00000488
  868. #define MSR_IA32_VMX_CR4_FIXED1 0x00000489
  869. #define MSR_IA32_VMX_VMCS_ENUM 0x0000048a
  870. #define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b
  871. #define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c
  872. #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d
  873. #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e
  874. #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f
  875. #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490
  876. #define MSR_IA32_VMX_VMFUNC 0x00000491
  877. /* VMX_BASIC bits and bitmasks */
  878. #define VMX_BASIC_VMCS_SIZE_SHIFT 32
  879. #define VMX_BASIC_TRUE_CTLS (1ULL << 55)
  880. #define VMX_BASIC_64 0x0001000000000000LLU
  881. #define VMX_BASIC_MEM_TYPE_SHIFT 50
  882. #define VMX_BASIC_MEM_TYPE_MASK 0x003c000000000000LLU
  883. #define VMX_BASIC_MEM_TYPE_WB 6LLU
  884. #define VMX_BASIC_INOUT 0x0040000000000000LLU
  885. /* MSR_IA32_VMX_MISC bits */
  886. #define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29)
  887. #define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE 0x1F
  888. /* AMD-V MSRs */
  889. #define MSR_VM_CR 0xc0010114
  890. #define MSR_VM_IGNNE 0xc0010115
  891. #define MSR_VM_HSAVE_PA 0xc0010117
  892. #endif /* !SELFTEST_KVM_X86_H */