1 #ifndef __ASM_MSR_INDEX_H 2 #define __ASM_MSR_INDEX_H 3 4 /* 5 * CPU model specific register (MSR) numbers 6 * 7 * Definitions for an MSR should follow this style: 8 * 9 * #define MSR_$NAME 0x$INDEX 10 * #define $NAME_$FIELD1 (_AC($X, ULL) << $POS1) 11 * #define $NAME_$FIELD2 (_AC($Y, ULL) << $POS2) 12 * 13 * Blocks of related constants should be sorted by MSR index. The constant 14 * names should be as concise as possible, and the bit names may have an 15 * abbreviated name. Exceptions will be considered on a case-by-case basis. 16 */ 17 18 #define MSR_APIC_BASE 0x0000001b 19 #define APIC_BASE_BSP (_AC(1, ULL) << 8) 20 #define APIC_BASE_EXTD (_AC(1, ULL) << 10) 21 #define APIC_BASE_ENABLE (_AC(1, ULL) << 11) 22 #define APIC_BASE_ADDR_MASK 0x000ffffffffff000ULL 23 24 #define MSR_TEST_CTRL 0x00000033 25 #define TEST_CTRL_SPLITLOCK_DETECT (_AC(1, ULL) << 29) 26 #define TEST_CTRL_SPLITLOCK_DISABLE (_AC(1, ULL) << 31) 27 28 #define MSR_INTEL_CORE_THREAD_COUNT 0x00000035 29 #define MSR_CTC_THREAD_MASK 0x0000ffff 30 #define MSR_CTC_CORE_MASK 0xffff0000 31 32 #define MSR_SPEC_CTRL 0x00000048 33 #define SPEC_CTRL_IBRS (_AC(1, ULL) << 0) 34 #define SPEC_CTRL_STIBP (_AC(1, ULL) << 1) 35 #define SPEC_CTRL_SSBD (_AC(1, ULL) << 2) 36 37 #define MSR_PRED_CMD 0x00000049 38 #define PRED_CMD_IBPB (_AC(1, ULL) << 0) 39 40 #define MSR_PPIN_CTL 0x0000004e 41 #define PPIN_LOCKOUT (_AC(1, ULL) << 0) 42 #define PPIN_ENABLE (_AC(1, ULL) << 1) 43 #define MSR_PPIN 0x0000004f 44 45 #define MSR_CORE_CAPABILITIES 0x000000cf 46 #define CORE_CAPS_SPLITLOCK_DETECT (_AC(1, ULL) << 5) 47 48 #define MSR_ARCH_CAPABILITIES 0x0000010a 49 #define ARCH_CAPS_RDCL_NO (_AC(1, ULL) << 0) 50 #define ARCH_CAPS_IBRS_ALL (_AC(1, ULL) << 1) 51 #define ARCH_CAPS_RSBA (_AC(1, ULL) << 2) 52 #define ARCH_CAPS_SKIP_L1DFL (_AC(1, ULL) << 3) 53 #define ARCH_CAPS_SSB_NO (_AC(1, ULL) << 4) 54 #define ARCH_CAPS_MDS_NO (_AC(1, ULL) << 5) 55 #define ARCH_CAPS_IF_PSCHANGE_MC_NO (_AC(1, ULL) << 6) 56 #define ARCH_CAPS_TSX_CTRL (_AC(1, ULL) << 7) 57 #define ARCH_CAPS_TAA_NO (_AC(1, ULL) << 8) 58 59 #define MSR_FLUSH_CMD 0x0000010b 60 #define FLUSH_CMD_L1D (_AC(1, ULL) << 0) 61 62 #define MSR_TSX_FORCE_ABORT 0x0000010f 63 #define TSX_FORCE_ABORT_RTM (_AC(1, ULL) << 0) 64 65 #define MSR_TSX_CTRL 0x00000122 66 #define TSX_CTRL_RTM_DISABLE (_AC(1, ULL) << 0) 67 #define TSX_CTRL_CPUID_CLEAR (_AC(1, ULL) << 1) 68 69 #define MSR_MCU_OPT_CTRL 0x00000123 70 #define MCU_OPT_CTRL_RNGDS_MITG_DIS (_AC(1, ULL) << 0) 71 72 #define MSR_RTIT_OUTPUT_BASE 0x00000560 73 #define MSR_RTIT_OUTPUT_MASK 0x00000561 74 #define MSR_RTIT_CTL 0x00000570 75 #define MSR_RTIT_STATUS 0x00000571 76 #define MSR_RTIT_CR3_MATCH 0x00000572 77 #define MSR_RTIT_ADDR_A(n) (0x00000580 + (n) * 2) 78 #define MSR_RTIT_ADDR_B(n) (0x00000581 + (n) * 2) 79 80 /* 81 * Intel Runtime Average Power Limiting (RAPL) interface. Power plane base 82 * addresses (MSR_*_POWER_LIMIT) are model specific, but have so-far been 83 * consistent since their introduction in SandyBridge. 84 * 85 * Offsets of functionality from the power plane base is architectural, but 86 * not all power planes support all functionality. 87 */ 88 #define MSR_RAPL_POWER_UNIT 0x00000606 89 90 #define MSR_PKG_POWER_LIMIT 0x00000610 91 #define MSR_PKG_ENERGY_STATUS 0x00000611 92 #define MSR_PKG_PERF_STATUS 0x00000613 93 #define MSR_PKG_POWER_INFO 0x00000614 94 95 #define MSR_DRAM_POWER_LIMIT 0x00000618 96 #define MSR_DRAM_ENERGY_STATUS 0x00000619 97 #define MSR_DRAM_PERF_STATUS 0x0000061b 98 #define MSR_DRAM_POWER_INFO 0x0000061c 99 100 #define MSR_PP0_POWER_LIMIT 0x00000638 101 #define MSR_PP0_ENERGY_STATUS 0x00000639 102 #define MSR_PP0_POLICY 0x0000063a 103 104 #define MSR_PP1_POWER_LIMIT 0x00000640 105 #define MSR_PP1_ENERGY_STATUS 0x00000641 106 #define MSR_PP1_POLICY 0x00000642 107 108 /* Intel Platform-wide power interface. */ 109 #define MSR_PLATFORM_ENERGY_COUNTER 0x0000064d 110 #define MSR_PLATFORM_POWER_LIMIT 0x0000065c 111 112 #define MSR_U_CET 0x000006a0 113 #define MSR_S_CET 0x000006a2 114 #define CET_SHSTK_EN (_AC(1, ULL) << 0) 115 #define CET_WRSS_EN (_AC(1, ULL) << 1) 116 117 #define MSR_PL0_SSP 0x000006a4 118 #define MSR_PL1_SSP 0x000006a5 119 #define MSR_PL2_SSP 0x000006a6 120 #define MSR_PL3_SSP 0x000006a7 121 #define MSR_INTERRUPT_SSP_TABLE 0x000006a8 122 123 #define MSR_PASID 0x00000d93 124 #define PASID_PASID_MASK 0x000fffff 125 #define PASID_VALID (_AC(1, ULL) << 31) 126 127 #define MSR_F15H_CU_POWER 0xc001007a 128 #define MSR_F15H_CU_MAX_POWER 0xc001007b 129 130 #define MSR_AMD_RAPL_POWER_UNIT 0xc0010299 131 #define MSR_AMD_CORE_ENERGY_STATUS 0xc001029a 132 #define MSR_AMD_PKG_ENERGY_STATUS 0xc001029b 133 134 /* 135 * Legacy MSR constants in need of cleanup. No new MSRs below this comment. 136 */ 137 138 /* x86-64 specific MSRs */ 139 #define MSR_EFER 0xc0000080 /* extended feature register */ 140 #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ 141 #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ 142 #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */ 143 #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ 144 #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */ 145 #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */ 146 #define MSR_SHADOW_GS_BASE 0xc0000102 /* SwapGS GS shadow */ 147 #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */ 148 149 /* EFER bits: */ 150 #define _EFER_SCE 0 /* SYSCALL/SYSRET */ 151 #define _EFER_LME 8 /* Long mode enable */ 152 #define _EFER_LMA 10 /* Long mode active (read-only) */ 153 #define _EFER_NX 11 /* No execute enable */ 154 #define _EFER_SVME 12 /* AMD: SVM enable */ 155 #define _EFER_FFXSE 14 /* AMD: Fast FXSAVE/FXRSTOR enable */ 156 157 #define EFER_SCE (1<<_EFER_SCE) 158 #define EFER_LME (1<<_EFER_LME) 159 #define EFER_LMA (1<<_EFER_LMA) 160 #define EFER_NX (1<<_EFER_NX) 161 #define EFER_SVME (1<<_EFER_SVME) 162 #define EFER_FFXSE (1<<_EFER_FFXSE) 163 164 #define EFER_KNOWN_MASK (EFER_SCE | EFER_LME | EFER_LMA | EFER_NX | \ 165 EFER_SVME | EFER_FFXSE) 166 167 /* Intel MSRs. Some also available on other CPUs */ 168 #define MSR_IA32_PERFCTR0 0x000000c1 169 #define MSR_IA32_A_PERFCTR0 0x000004c1 170 #define MSR_FSB_FREQ 0x000000cd 171 172 #define MSR_NHM_SNB_PKG_CST_CFG_CTL 0x000000e2 173 #define NHM_C3_AUTO_DEMOTE (1UL << 25) 174 #define NHM_C1_AUTO_DEMOTE (1UL << 26) 175 #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25) 176 177 #define MSR_MTRRcap 0x000000fe 178 #define MTRRcap_VCNT 0x000000ff 179 180 #define MSR_IA32_BBL_CR_CTL 0x00000119 181 182 #define MSR_IA32_SYSENTER_CS 0x00000174 183 #define MSR_IA32_SYSENTER_ESP 0x00000175 184 #define MSR_IA32_SYSENTER_EIP 0x00000176 185 186 #define MSR_IA32_MCG_CAP 0x00000179 187 #define MSR_IA32_MCG_STATUS 0x0000017a 188 #define MSR_IA32_MCG_CTL 0x0000017b 189 #define MSR_IA32_MCG_EXT_CTL 0x000004d0 190 191 #define MSR_IA32_PEBS_ENABLE 0x000003f1 192 #define MSR_IA32_DS_AREA 0x00000600 193 #define MSR_IA32_PERF_CAPABILITIES 0x00000345 194 /* Lower 6 bits define the format of the address in the LBR stack */ 195 #define MSR_IA32_PERF_CAP_LBR_FORMAT 0x3f 196 197 #define MSR_IA32_BNDCFGS 0x00000d90 198 #define IA32_BNDCFGS_ENABLE 0x00000001 199 #define IA32_BNDCFGS_PRESERVE 0x00000002 200 #define IA32_BNDCFGS_RESERVED 0x00000ffc 201 202 #define MSR_IA32_XSS 0x00000da0 203 204 #define MSR_MTRRfix64K_00000 0x00000250 205 #define MSR_MTRRfix16K_80000 0x00000258 206 #define MSR_MTRRfix16K_A0000 0x00000259 207 #define MSR_MTRRfix4K_C0000 0x00000268 208 #define MSR_MTRRfix4K_C8000 0x00000269 209 #define MSR_MTRRfix4K_D0000 0x0000026a 210 #define MSR_MTRRfix4K_D8000 0x0000026b 211 #define MSR_MTRRfix4K_E0000 0x0000026c 212 #define MSR_MTRRfix4K_E8000 0x0000026d 213 #define MSR_MTRRfix4K_F0000 0x0000026e 214 #define MSR_MTRRfix4K_F8000 0x0000026f 215 #define MSR_MTRRdefType 0x000002ff 216 #define MTRRdefType_FE (1u << 10) 217 #define MTRRdefType_E (1u << 11) 218 219 #define MSR_IA32_DEBUGCTLMSR 0x000001d9 220 #define IA32_DEBUGCTLMSR_LBR (1<<0) /* Last Branch Record */ 221 #define IA32_DEBUGCTLMSR_BTF (1<<1) /* Single Step on Branches */ 222 #define IA32_DEBUGCTLMSR_TR (1<<6) /* Trace Message Enable */ 223 #define IA32_DEBUGCTLMSR_BTS (1<<7) /* Branch Trace Store */ 224 #define IA32_DEBUGCTLMSR_BTINT (1<<8) /* Branch Trace Interrupt */ 225 #define IA32_DEBUGCTLMSR_BTS_OFF_OS (1<<9) /* BTS off if CPL 0 */ 226 #define IA32_DEBUGCTLMSR_BTS_OFF_USR (1<<10) /* BTS off if CPL > 0 */ 227 #define IA32_DEBUGCTLMSR_RTM (1<<15) /* RTM debugging enable */ 228 229 #define MSR_IA32_LASTBRANCHFROMIP 0x000001db 230 #define MSR_IA32_LASTBRANCHTOIP 0x000001dc 231 #define MSR_IA32_LASTINTFROMIP 0x000001dd 232 #define MSR_IA32_LASTINTTOIP 0x000001de 233 234 #define MSR_IA32_POWER_CTL 0x000001fc 235 236 #define MSR_IA32_MTRR_PHYSBASE(n) (0x00000200 + 2 * (n)) 237 #define MSR_IA32_MTRR_PHYSMASK(n) (0x00000201 + 2 * (n)) 238 239 #define MSR_IA32_CR_PAT 0x00000277 240 #define MSR_IA32_CR_PAT_RESET 0x0007040600070406ULL 241 242 #define MSR_IA32_MC0_CTL 0x00000400 243 #define MSR_IA32_MC0_STATUS 0x00000401 244 #define MSR_IA32_MC0_ADDR 0x00000402 245 #define MSR_IA32_MC0_MISC 0x00000403 246 #define MSR_IA32_MC0_CTL2 0x00000280 247 #define CMCI_EN (1UL<<30) 248 #define CMCI_THRESHOLD_MASK 0x7FFF 249 250 #define MSR_AMD64_MC0_MASK 0xc0010044 251 252 #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x)) 253 #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x)) 254 #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x)) 255 #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x)) 256 #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x)) 257 258 #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x)) 259 260 /* MSRs & bits used for VMX enabling */ 261 #define MSR_IA32_VMX_BASIC 0x480 262 #define MSR_IA32_VMX_PINBASED_CTLS 0x481 263 #define MSR_IA32_VMX_PROCBASED_CTLS 0x482 264 #define MSR_IA32_VMX_EXIT_CTLS 0x483 265 #define MSR_IA32_VMX_ENTRY_CTLS 0x484 266 #define MSR_IA32_VMX_MISC 0x485 267 #define MSR_IA32_VMX_CR0_FIXED0 0x486 268 #define MSR_IA32_VMX_CR0_FIXED1 0x487 269 #define MSR_IA32_VMX_CR4_FIXED0 0x488 270 #define MSR_IA32_VMX_CR4_FIXED1 0x489 271 #define MSR_IA32_VMX_VMCS_ENUM 0x48a 272 #define MSR_IA32_VMX_PROCBASED_CTLS2 0x48b 273 #define MSR_IA32_VMX_EPT_VPID_CAP 0x48c 274 #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x48d 275 #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x48e 276 #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x48f 277 #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x490 278 #define MSR_IA32_VMX_VMFUNC 0x491 279 280 /* K7/K8 MSRs. Not complete. See the architecture manual for a more 281 complete list. */ 282 #define MSR_K7_EVNTSEL0 0xc0010000 283 #define MSR_K7_PERFCTR0 0xc0010004 284 #define MSR_K7_EVNTSEL1 0xc0010001 285 #define MSR_K7_PERFCTR1 0xc0010005 286 #define MSR_K7_EVNTSEL2 0xc0010002 287 #define MSR_K7_PERFCTR2 0xc0010006 288 #define MSR_K7_EVNTSEL3 0xc0010003 289 #define MSR_K7_PERFCTR3 0xc0010007 290 #define MSR_K8_TOP_MEM1 0xc001001a 291 #define MSR_K7_CLK_CTL 0xc001001b 292 #define MSR_K8_TOP_MEM2 0xc001001d 293 #define MSR_K8_SYSCFG 0xc0010010 294 295 #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */ 296 #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */ 297 #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */ 298 299 #define MSR_K7_HWCR 0xc0010015 300 #define MSR_K8_HWCR 0xc0010015 301 #define MSR_K7_FID_VID_CTL 0xc0010041 302 #define MSR_K7_FID_VID_STATUS 0xc0010042 303 #define MSR_K8_PSTATE_LIMIT 0xc0010061 304 #define MSR_K8_PSTATE_CTRL 0xc0010062 305 #define MSR_K8_PSTATE_STATUS 0xc0010063 306 #define MSR_K8_PSTATE0 0xc0010064 307 #define MSR_K8_PSTATE1 0xc0010065 308 #define MSR_K8_PSTATE2 0xc0010066 309 #define MSR_K8_PSTATE3 0xc0010067 310 #define MSR_K8_PSTATE4 0xc0010068 311 #define MSR_K8_PSTATE5 0xc0010069 312 #define MSR_K8_PSTATE6 0xc001006A 313 #define MSR_K8_PSTATE7 0xc001006B 314 #define MSR_K8_ENABLE_C1E 0xc0010055 315 #define MSR_K8_VM_CR 0xc0010114 316 #define MSR_K8_VM_HSAVE_PA 0xc0010117 317 318 #define MSR_AMD_FAM15H_EVNTSEL0 0xc0010200 319 #define MSR_AMD_FAM15H_PERFCTR0 0xc0010201 320 #define MSR_AMD_FAM15H_EVNTSEL1 0xc0010202 321 #define MSR_AMD_FAM15H_PERFCTR1 0xc0010203 322 #define MSR_AMD_FAM15H_EVNTSEL2 0xc0010204 323 #define MSR_AMD_FAM15H_PERFCTR2 0xc0010205 324 #define MSR_AMD_FAM15H_EVNTSEL3 0xc0010206 325 #define MSR_AMD_FAM15H_PERFCTR3 0xc0010207 326 #define MSR_AMD_FAM15H_EVNTSEL4 0xc0010208 327 #define MSR_AMD_FAM15H_PERFCTR4 0xc0010209 328 #define MSR_AMD_FAM15H_EVNTSEL5 0xc001020a 329 #define MSR_AMD_FAM15H_PERFCTR5 0xc001020b 330 331 #define MSR_AMD_L7S0_FEATURE_MASK 0xc0011002 332 #define MSR_AMD_THRM_FEATURE_MASK 0xc0011003 333 #define MSR_K8_FEATURE_MASK 0xc0011004 334 #define MSR_K8_EXT_FEATURE_MASK 0xc0011005 335 336 /* MSR_K8_VM_CR bits: */ 337 #define _K8_VMCR_SVME_DISABLE 4 338 #define K8_VMCR_SVME_DISABLE (1 << _K8_VMCR_SVME_DISABLE) 339 340 /* AMD64 MSRs */ 341 #define MSR_AMD64_NB_CFG 0xc001001f 342 #define AMD64_NB_CFG_CF8_EXT_ENABLE_BIT 46 343 #define MSR_AMD64_LS_CFG 0xc0011020 344 #define MSR_AMD64_IC_CFG 0xc0011021 345 #define MSR_AMD64_DC_CFG 0xc0011022 346 #define MSR_AMD64_DE_CFG 0xc0011029 347 #define AMD64_DE_CFG_LFENCE_SERIALISE (_AC(1, ULL) << 1) 348 349 #define MSR_AMD64_DR0_ADDRESS_MASK 0xc0011027 350 #define MSR_AMD64_DR1_ADDRESS_MASK 0xc0011019 351 #define MSR_AMD64_DR2_ADDRESS_MASK 0xc001101a 352 #define MSR_AMD64_DR3_ADDRESS_MASK 0xc001101b 353 354 /* AMD Family10h machine check MSRs */ 355 #define MSR_F10_MC4_MISC1 0xc0000408 356 #define MSR_F10_MC4_MISC2 0xc0000409 357 #define MSR_F10_MC4_MISC3 0xc000040A 358 359 /* AMD Family10h Bus Unit MSRs */ 360 #define MSR_F10_BU_CFG 0xc0011023 361 #define MSR_F10_BU_CFG2 0xc001102a 362 363 /* Other AMD Fam10h MSRs */ 364 #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058 365 #define FAM10H_MMIO_CONF_ENABLE (1<<0) 366 #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf 367 #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2 368 #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL 369 #define FAM10H_MMIO_CONF_BASE_SHIFT 20 370 371 /* AMD Microcode MSRs */ 372 #define MSR_AMD_PATCHLEVEL 0x0000008b 373 #define MSR_AMD_PATCHLOADER 0xc0010020 374 375 /* AMD TSC RATE MSR */ 376 #define MSR_AMD64_TSC_RATIO 0xc0000104 377 378 /* AMD Lightweight Profiling MSRs */ 379 #define MSR_AMD64_LWP_CFG 0xc0000105 380 #define MSR_AMD64_LWP_CBADDR 0xc0000106 381 382 /* AMD OS Visible Workaround MSRs */ 383 #define MSR_AMD_OSVW_ID_LENGTH 0xc0010140 384 #define MSR_AMD_OSVW_STATUS 0xc0010141 385 386 /* AMD Protected Processor Inventory Number */ 387 #define MSR_AMD_PPIN_CTL 0xc00102f0 388 #define MSR_AMD_PPIN 0xc00102f1 389 390 /* K6 MSRs */ 391 #define MSR_K6_EFER 0xc0000080 392 #define MSR_K6_STAR 0xc0000081 393 #define MSR_K6_WHCR 0xc0000082 394 #define MSR_K6_UWCCR 0xc0000085 395 #define MSR_K6_EPMR 0xc0000086 396 #define MSR_K6_PSOR 0xc0000087 397 #define MSR_K6_PFIR 0xc0000088 398 399 /* Centaur-Hauls/IDT defined MSRs. */ 400 #define MSR_IDT_FCR1 0x00000107 401 #define MSR_IDT_FCR2 0x00000108 402 #define MSR_IDT_FCR3 0x00000109 403 #define MSR_IDT_FCR4 0x0000010a 404 405 #define MSR_IDT_MCR0 0x00000110 406 #define MSR_IDT_MCR1 0x00000111 407 #define MSR_IDT_MCR2 0x00000112 408 #define MSR_IDT_MCR3 0x00000113 409 #define MSR_IDT_MCR4 0x00000114 410 #define MSR_IDT_MCR5 0x00000115 411 #define MSR_IDT_MCR6 0x00000116 412 #define MSR_IDT_MCR7 0x00000117 413 #define MSR_IDT_MCR_CTRL 0x00000120 414 415 /* VIA Cyrix defined MSRs*/ 416 #define MSR_VIA_FCR 0x00001107 417 #define MSR_VIA_LONGHAUL 0x0000110a 418 #define MSR_VIA_RNG 0x0000110b 419 #define MSR_VIA_BCR2 0x00001147 420 421 /* Transmeta defined MSRs */ 422 #define MSR_TMTA_LONGRUN_CTRL 0x80868010 423 #define MSR_TMTA_LONGRUN_FLAGS 0x80868011 424 #define MSR_TMTA_LRTI_READOUT 0x80868018 425 #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a 426 427 /* Intel defined MSRs. */ 428 #define MSR_IA32_P5_MC_ADDR 0x00000000 429 #define MSR_IA32_P5_MC_TYPE 0x00000001 430 #define MSR_IA32_TSC 0x00000010 431 #define MSR_IA32_PLATFORM_ID 0x00000017 432 #define MSR_IA32_EBL_CR_POWERON 0x0000002a 433 #define MSR_IA32_EBC_FREQUENCY_ID 0x0000002c 434 435 #define MSR_IA32_FEATURE_CONTROL 0x0000003a 436 #define IA32_FEATURE_CONTROL_LOCK 0x0001 437 #define IA32_FEATURE_CONTROL_ENABLE_VMXON_INSIDE_SMX 0x0002 438 #define IA32_FEATURE_CONTROL_ENABLE_VMXON_OUTSIDE_SMX 0x0004 439 #define IA32_FEATURE_CONTROL_SENTER_PARAM_CTL 0x7f00 440 #define IA32_FEATURE_CONTROL_ENABLE_SENTER 0x8000 441 #define IA32_FEATURE_CONTROL_SGX_ENABLE 0x40000 442 #define IA32_FEATURE_CONTROL_LMCE_ON 0x100000 443 444 #define MSR_IA32_TSC_ADJUST 0x0000003b 445 446 #define MSR_X2APIC_FIRST 0x00000800 447 #define MSR_X2APIC_LAST 0x00000bff 448 449 #define MSR_X2APIC_TPR 0x00000808 450 #define MSR_X2APIC_PPR 0x0000080a 451 #define MSR_X2APIC_EOI 0x0000080b 452 #define MSR_X2APIC_TMICT 0x00000838 453 #define MSR_X2APIC_TMCCT 0x00000839 454 #define MSR_X2APIC_SELF 0x0000083f 455 456 #define MSR_IA32_UCODE_WRITE 0x00000079 457 #define MSR_IA32_UCODE_REV 0x0000008b 458 459 #define MSR_IA32_PERF_STATUS 0x00000198 460 #define MSR_IA32_PERF_CTL 0x00000199 461 462 #define MSR_IA32_MPERF 0x000000e7 463 #define MSR_IA32_APERF 0x000000e8 464 465 #define MSR_IA32_THERM_CONTROL 0x0000019a 466 #define MSR_IA32_THERM_INTERRUPT 0x0000019b 467 #define MSR_IA32_THERM_STATUS 0x0000019c 468 #define MSR_IA32_MISC_ENABLE 0x000001a0 469 #define MSR_IA32_MISC_ENABLE_PERF_AVAIL (1<<7) 470 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1<<11) 471 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1<<12) 472 #define MSR_IA32_MISC_ENABLE_MONITOR_ENABLE (1<<18) 473 #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1<<22) 474 #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1<<23) 475 #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << 34) 476 477 #define MSR_IA32_TSC_DEADLINE 0x000006E0 478 #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0 479 480 /* Platform Shared Resource MSRs */ 481 #define MSR_IA32_CMT_EVTSEL 0x00000c8d 482 #define MSR_IA32_CMT_EVTSEL_UE_MASK 0x0000ffff 483 #define MSR_IA32_CMT_CTR 0x00000c8e 484 #define MSR_IA32_PSR_ASSOC 0x00000c8f 485 #define MSR_IA32_PSR_L3_QOS_CFG 0x00000c81 486 #define MSR_IA32_PSR_L3_MASK(n) (0x00000c90 + (n)) 487 #define MSR_IA32_PSR_L3_MASK_CODE(n) (0x00000c90 + (n) * 2 + 1) 488 #define MSR_IA32_PSR_L3_MASK_DATA(n) (0x00000c90 + (n) * 2) 489 #define MSR_IA32_PSR_L2_MASK(n) (0x00000d10 + (n)) 490 #define MSR_IA32_PSR_MBA_MASK(n) (0x00000d50 + (n)) 491 492 /* Intel Model 6 */ 493 #define MSR_P6_PERFCTR(n) (0x000000c1 + (n)) 494 #define MSR_P6_EVNTSEL(n) (0x00000186 + (n)) 495 496 /* P4/Xeon+ specific */ 497 #define MSR_IA32_MCG_EAX 0x00000180 498 #define MSR_IA32_MCG_EBX 0x00000181 499 #define MSR_IA32_MCG_ECX 0x00000182 500 #define MSR_IA32_MCG_EDX 0x00000183 501 #define MSR_IA32_MCG_ESI 0x00000184 502 #define MSR_IA32_MCG_EDI 0x00000185 503 #define MSR_IA32_MCG_EBP 0x00000186 504 #define MSR_IA32_MCG_ESP 0x00000187 505 #define MSR_IA32_MCG_EFLAGS 0x00000188 506 #define MSR_IA32_MCG_EIP 0x00000189 507 #define MSR_IA32_MCG_MISC 0x0000018a 508 #define MSR_IA32_MCG_R8 0x00000190 509 #define MSR_IA32_MCG_R9 0x00000191 510 #define MSR_IA32_MCG_R10 0x00000192 511 #define MSR_IA32_MCG_R11 0x00000193 512 #define MSR_IA32_MCG_R12 0x00000194 513 #define MSR_IA32_MCG_R13 0x00000195 514 #define MSR_IA32_MCG_R14 0x00000196 515 #define MSR_IA32_MCG_R15 0x00000197 516 517 /* Pentium IV performance counter MSRs */ 518 #define MSR_P4_BPU_PERFCTR0 0x00000300 519 #define MSR_P4_BPU_PERFCTR1 0x00000301 520 #define MSR_P4_BPU_PERFCTR2 0x00000302 521 #define MSR_P4_BPU_PERFCTR3 0x00000303 522 #define MSR_P4_MS_PERFCTR0 0x00000304 523 #define MSR_P4_MS_PERFCTR1 0x00000305 524 #define MSR_P4_MS_PERFCTR2 0x00000306 525 #define MSR_P4_MS_PERFCTR3 0x00000307 526 #define MSR_P4_FLAME_PERFCTR0 0x00000308 527 #define MSR_P4_FLAME_PERFCTR1 0x00000309 528 #define MSR_P4_FLAME_PERFCTR2 0x0000030a 529 #define MSR_P4_FLAME_PERFCTR3 0x0000030b 530 #define MSR_P4_IQ_PERFCTR0 0x0000030c 531 #define MSR_P4_IQ_PERFCTR1 0x0000030d 532 #define MSR_P4_IQ_PERFCTR2 0x0000030e 533 #define MSR_P4_IQ_PERFCTR3 0x0000030f 534 #define MSR_P4_IQ_PERFCTR4 0x00000310 535 #define MSR_P4_IQ_PERFCTR5 0x00000311 536 #define MSR_P4_BPU_CCCR0 0x00000360 537 #define MSR_P4_BPU_CCCR1 0x00000361 538 #define MSR_P4_BPU_CCCR2 0x00000362 539 #define MSR_P4_BPU_CCCR3 0x00000363 540 #define MSR_P4_MS_CCCR0 0x00000364 541 #define MSR_P4_MS_CCCR1 0x00000365 542 #define MSR_P4_MS_CCCR2 0x00000366 543 #define MSR_P4_MS_CCCR3 0x00000367 544 #define MSR_P4_FLAME_CCCR0 0x00000368 545 #define MSR_P4_FLAME_CCCR1 0x00000369 546 #define MSR_P4_FLAME_CCCR2 0x0000036a 547 #define MSR_P4_FLAME_CCCR3 0x0000036b 548 #define MSR_P4_IQ_CCCR0 0x0000036c 549 #define MSR_P4_IQ_CCCR1 0x0000036d 550 #define MSR_P4_IQ_CCCR2 0x0000036e 551 #define MSR_P4_IQ_CCCR3 0x0000036f 552 #define MSR_P4_IQ_CCCR4 0x00000370 553 #define MSR_P4_IQ_CCCR5 0x00000371 554 #define MSR_P4_ALF_ESCR0 0x000003ca 555 #define MSR_P4_ALF_ESCR1 0x000003cb 556 #define MSR_P4_BPU_ESCR0 0x000003b2 557 #define MSR_P4_BPU_ESCR1 0x000003b3 558 #define MSR_P4_BSU_ESCR0 0x000003a0 559 #define MSR_P4_BSU_ESCR1 0x000003a1 560 #define MSR_P4_CRU_ESCR0 0x000003b8 561 #define MSR_P4_CRU_ESCR1 0x000003b9 562 #define MSR_P4_CRU_ESCR2 0x000003cc 563 #define MSR_P4_CRU_ESCR3 0x000003cd 564 #define MSR_P4_CRU_ESCR4 0x000003e0 565 #define MSR_P4_CRU_ESCR5 0x000003e1 566 #define MSR_P4_DAC_ESCR0 0x000003a8 567 #define MSR_P4_DAC_ESCR1 0x000003a9 568 #define MSR_P4_FIRM_ESCR0 0x000003a4 569 #define MSR_P4_FIRM_ESCR1 0x000003a5 570 #define MSR_P4_FLAME_ESCR0 0x000003a6 571 #define MSR_P4_FLAME_ESCR1 0x000003a7 572 #define MSR_P4_FSB_ESCR0 0x000003a2 573 #define MSR_P4_FSB_ESCR1 0x000003a3 574 #define MSR_P4_IQ_ESCR0 0x000003ba 575 #define MSR_P4_IQ_ESCR1 0x000003bb 576 #define MSR_P4_IS_ESCR0 0x000003b4 577 #define MSR_P4_IS_ESCR1 0x000003b5 578 #define MSR_P4_ITLB_ESCR0 0x000003b6 579 #define MSR_P4_ITLB_ESCR1 0x000003b7 580 #define MSR_P4_IX_ESCR0 0x000003c8 581 #define MSR_P4_IX_ESCR1 0x000003c9 582 #define MSR_P4_MOB_ESCR0 0x000003aa 583 #define MSR_P4_MOB_ESCR1 0x000003ab 584 #define MSR_P4_MS_ESCR0 0x000003c0 585 #define MSR_P4_MS_ESCR1 0x000003c1 586 #define MSR_P4_PMH_ESCR0 0x000003ac 587 #define MSR_P4_PMH_ESCR1 0x000003ad 588 #define MSR_P4_RAT_ESCR0 0x000003bc 589 #define MSR_P4_RAT_ESCR1 0x000003bd 590 #define MSR_P4_SAAT_ESCR0 0x000003ae 591 #define MSR_P4_SAAT_ESCR1 0x000003af 592 #define MSR_P4_SSU_ESCR0 0x000003be 593 #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */ 594 595 #define MSR_P4_TBPU_ESCR0 0x000003c2 596 #define MSR_P4_TBPU_ESCR1 0x000003c3 597 #define MSR_P4_TC_ESCR0 0x000003c4 598 #define MSR_P4_TC_ESCR1 0x000003c5 599 #define MSR_P4_U2L_ESCR0 0x000003b0 600 #define MSR_P4_U2L_ESCR1 0x000003b1 601 602 /* Netburst (P4) last-branch recording */ 603 #define MSR_P4_LER_FROM_LIP 0x000001d7 604 #define MSR_P4_LER_TO_LIP 0x000001d8 605 #define MSR_P4_LASTBRANCH_TOS 0x000001da 606 #define MSR_P4_LASTBRANCH_0 0x000001db 607 #define NUM_MSR_P4_LASTBRANCH 4 608 #define MSR_P4_LASTBRANCH_0_FROM_LIP 0x00000680 609 #define MSR_P4_LASTBRANCH_0_TO_LIP 0x000006c0 610 #define NUM_MSR_P4_LASTBRANCH_FROM_TO 16 611 612 /* Core 2 and Atom last-branch recording */ 613 #define MSR_C2_LASTBRANCH_TOS 0x000001c9 614 #define MSR_C2_LASTBRANCH_0_FROM_IP 0x00000040 615 #define MSR_C2_LASTBRANCH_0_TO_IP 0x00000060 616 #define NUM_MSR_C2_LASTBRANCH_FROM_TO 4 617 #define NUM_MSR_ATOM_LASTBRANCH_FROM_TO 8 618 619 /* Skylake (and newer) last-branch recording */ 620 #define MSR_SKL_LASTBRANCH_TOS 0x000001c9 621 #define MSR_SKL_LASTBRANCH_0_FROM_IP 0x00000680 622 #define MSR_SKL_LASTBRANCH_0_TO_IP 0x000006c0 623 #define MSR_SKL_LASTBRANCH_0_INFO 0x00000dc0 624 #define NUM_MSR_SKL_LASTBRANCH 32 625 626 /* Goldmont last-branch recording */ 627 #define MSR_GM_LASTBRANCH_TOS 0x000001c9 628 #define MSR_GM_LASTBRANCH_0_FROM_IP 0x00000680 629 #define MSR_GM_LASTBRANCH_0_TO_IP 0x000006c0 630 #define NUM_MSR_GM_LASTBRANCH_FROM_TO 32 631 632 /* Intel Core-based CPU performance counters */ 633 #define MSR_CORE_PERF_FIXED_CTR0 0x00000309 634 #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a 635 #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b 636 #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d 637 #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e 638 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f 639 #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390 640 641 /* Intel cpuid spoofing MSRs */ 642 #define MSR_INTEL_MASK_V1_CPUID1 0x00000478 643 644 #define MSR_INTEL_MASK_V2_CPUID1 0x00000130 645 #define MSR_INTEL_MASK_V2_CPUID80000001 0x00000131 646 647 #define MSR_INTEL_MASK_V3_CPUID1 0x00000132 648 #define MSR_INTEL_MASK_V3_CPUID80000001 0x00000133 649 #define MSR_INTEL_MASK_V3_CPUIDD_01 0x00000134 650 651 /* Intel cpuid faulting MSRs */ 652 #define MSR_INTEL_PLATFORM_INFO 0x000000ce 653 #define _MSR_PLATFORM_INFO_CPUID_FAULTING 31 654 #define MSR_PLATFORM_INFO_CPUID_FAULTING (1ULL << _MSR_PLATFORM_INFO_CPUID_FAULTING) 655 656 #define MSR_INTEL_MISC_FEATURES_ENABLES 0x00000140 657 #define _MSR_MISC_FEATURES_CPUID_FAULTING 0 658 #define MSR_MISC_FEATURES_CPUID_FAULTING (1ULL << _MSR_MISC_FEATURES_CPUID_FAULTING) 659 660 #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668 661 #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669 662 663 /* Interrupt Response Limit */ 664 #define MSR_PKGC3_IRTL 0x0000060a 665 #define MSR_PKGC6_IRTL 0x0000060b 666 #define MSR_PKGC7_IRTL 0x0000060c 667 #define MSR_PKGC8_IRTL 0x00000633 668 #define MSR_PKGC9_IRTL 0x00000634 669 #define MSR_PKGC10_IRTL 0x00000635 670 671 #endif /* __ASM_MSR_INDEX_H */ 672