/xen/xen/arch/x86/oprofile/ |
A D | op_model_p4.c | 561 wrmsrl(p4_unused_cccr[i], msr_content); in p4_setup_ctrs() 567 wrmsrl(addr, 0x0ULL); in p4_setup_ctrs() 572 wrmsrl(MSR_P4_IQ_ESCR0, 0x0ULL); in p4_setup_ctrs() 573 wrmsrl(MSR_P4_IQ_ESCR1, 0x0ULL); in p4_setup_ctrs() 578 wrmsrl(addr, 0x0ULL); in p4_setup_ctrs() 583 wrmsrl(addr, 0x0ULL); in p4_setup_ctrs() 588 wrmsrl(addr, 0x0ULL); in p4_setup_ctrs() 592 wrmsrl(MSR_P4_CRU_ESCR4, 0x0ULL); in p4_setup_ctrs() 593 wrmsrl(MSR_P4_CRU_ESCR5, 0x0ULL); in p4_setup_ctrs() 595 wrmsrl(MSR_P4_CRU_ESCR4, 0x0ULL); in p4_setup_ctrs() [all …]
|
A D | op_model_ppro.c | 52 #define CTRL_WRITE(msr_content,msrs,c) do {wrmsrl((msrs->controls[(c)].addr), (msr_content));} whil… 108 wrmsrl(msrs->counters[i].addr, ~0x0ULL); in ppro_setup_ctrs() 115 wrmsrl(msrs->counters[i].addr, -reset_value[i]); in ppro_setup_ctrs() 148 wrmsrl(msrs->counters[i].addr, -reset_value[i]); in ppro_check_ctrs() 189 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, (1ULL<<num_counters) - 1); in ppro_start() 206 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0x0ULL); in ppro_stop()
|
A D | op_model_athlon.c | 41 #define CTRL_WRITE(msr_content,msrs,c) do {wrmsrl(msrs->controls[(c)].addr, (msr_content));} while … 277 wrmsrl(MSR_AMD64_IBSFETCHCTL, ctl); in handle_ibs() 304 wrmsrl(MSR_AMD64_IBSOPCTL, ctl); in handle_ibs() 359 wrmsrl(MSR_AMD64_IBSFETCHCTL, val); in start_ibs() 385 wrmsrl(MSR_AMD64_IBSOPCTL, val); in start_ibs() 411 wrmsrl(MSR_AMD64_IBSFETCHCTL, 0); in stop_ibs() 415 wrmsrl(MSR_AMD64_IBSOPCTL, 0); in stop_ibs()
|
A D | nmi_int.c | 224 wrmsrl(controls[i].addr, controls[i].value); in nmi_restore_registers() 228 wrmsrl(counters[i].addr, counters[i].value); in nmi_restore_registers()
|
/xen/xen/include/asm-x86/ |
A D | msr.h | 33 static inline void wrmsrl(unsigned int msr, __u64 val) in wrmsrl() function 112 #define __write_tsc(val) wrmsrl(MSR_IA32_TSC, val) 208 wrmsrl(MSR_FS_BASE, base); in wrfsbase() 220 wrmsrl(MSR_GS_BASE, base); in wrgsbase() 239 wrmsrl(MSR_SHADOW_GS_BASE, base); in wrgsshadow() 251 wrmsrl(MSR_EFER, val); in write_efer()
|
/xen/xen/arch/x86/cpu/ |
A D | vpmu_intel.c | 132 wrmsrl(MSR_P6_PERFCTR(i), 1); in handle_pmc_quirk() 144 wrmsrl(MSR_CORE_PERF_FIXED_CTR0 + i, 1); in handle_pmc_quirk() 296 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in core2_vpmu_save() 327 wrmsrl(MSR_CORE_PERF_FIXED_CTR0 + i, fixed_counters[i]); in __core2_vpmu_load() 335 wrmsrl(pmc_start + i, xen_pmu_cntr_pair[i].counter); in __core2_vpmu_load() 336 wrmsrl(MSR_P6_EVNTSEL(i), xen_pmu_cntr_pair[i].control); in __core2_vpmu_load() 341 wrmsrl(MSR_IA32_DS_AREA, core2_vpmu_cxt->ds_area); in __core2_vpmu_load() 552 wrmsrl(MSR_CORE_PERF_GLOBAL_OVF_CTRL, msr_content); in core2_vpmu_do_wrmsr() 678 wrmsrl(msr, msr_content); in core2_vpmu_do_wrmsr() 684 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, msr_content); in core2_vpmu_do_wrmsr() [all …]
|
A D | amd.c | 479 wrmsrl(MSR_K8_SYSCFG, syscfg); in check_syscfg_dram_mod_en() 569 wrmsrl(MSR_AMD64_NB_CFG, in amd_log_freq() 612 wrmsrl(MSR_AMD64_NB_CFG, nbcfg); in amd_log_freq() 660 wrmsrl(MSR_K7_HWCR, value); in init_amd() 861 wrmsrl(MSR_AMD64_LS_CFG, value | (1 << 15)); in init_amd() 873 wrmsrl(MSR_AMD64_DE_CFG, value | (1U << 31)); in init_amd() 896 wrmsrl(MSR_F10_BU_CFG2, value); in init_amd() 911 wrmsrl(MSR_K7_PERFCTR0, 0); in init_amd() 912 wrmsrl(MSR_K7_PERFCTR1, 0); in init_amd() 913 wrmsrl(MSR_K7_PERFCTR2, 0); in init_amd() [all …]
|
A D | centaur.c | 29 wrmsrl(MSR_VIA_FCR, msr_content | ACE_FCR); in init_c3() 37 wrmsrl(MSR_VIA_RNG, msr_content | RNG_ENABLE); in init_c3()
|
A D | intel.c | 168 wrmsrl(msr_basic, val); in intel_ctxt_switch_masking() 178 wrmsrl((msr), masks->field); \ in intel_ctxt_switch_masking() 271 wrmsrl(MSR_IA32_MISC_ENABLE, misc_enable & ~disable); in early_init_intel() 360 wrmsrl(MSR_TSX_FORCE_ABORT, TSX_FORCE_ABORT_RTM); in Intel_errata_workarounds()
|
A D | vpmu_amd.c | 210 wrmsrl(counters[i], counter_regs[i]); in context_load() 211 wrmsrl(ctrls[i], ctrl_regs[i]); in context_load() 230 wrmsrl(ctrls[i], ctrl_regs[i]); in amd_vpmu_load() 295 wrmsrl(ctrls[i], 0); in amd_vpmu_save() 410 wrmsrl(msr, msr_content); in amd_vpmu_do_wrmsr()
|
A D | hygon.c | 100 wrmsrl(MSR_K7_HWCR, value); in init_hygon()
|
A D | mwait-idle.c | 848 wrmsrl(MSR_NHM_SNB_PKG_CST_CFG_CTL, msr_bits); in auto_demotion_disable() 853 wrmsrl(MSR_CC6_DEMOTION_POLICY_CONFIG, 0); in byt_auto_demotion_disable() 854 wrmsrl(MSR_MC6_DEMOTION_POLICY_CONFIG, 0); in byt_auto_demotion_disable() 863 wrmsrl(MSR_IA32_POWER_CTL, msr_bits); in c1e_promotion_disable()
|
/xen/xen/arch/x86/cpu/mcheck/ |
A D | mce_amd.c | 205 wrmsrl(MSR_IA32_MCx_CTL(4), ~(1ULL << 10)); in mcequirk_amd_apply() 206 wrmsrl(MSR_IA32_MCx_STATUS(4), 0ULL); in mcequirk_amd_apply() 307 wrmsrl(MSR_IA32_MCx_CTL(i), 0xffffffffffffffffULL); in amd_mcheck_init() 308 wrmsrl(MSR_IA32_MCx_STATUS(i), 0x0ULL); in amd_mcheck_init()
|
A D | mce_intel.c | 168 wrmsrl(MSR_IA32_THERM_INTERRUPT, msr_content | 0x03); in intel_init_thermal() 171 wrmsrl(MSR_IA32_MISC_ENABLE, msr_content | (1ULL<<3)); in intel_init_thermal() 507 wrmsrl(msr, val | CMCI_EN | CMCI_THRESHOLD_MASK); in do_cmci_discover() 515 wrmsrl(msr, val & ~CMCI_THRESHOLD_MASK); in do_cmci_discover() 527 wrmsrl(msr, (val & ~CMCI_THRESHOLD_MASK) | CMCI_EN | threshold); in do_cmci_discover() 631 wrmsrl(msr, val & ~(CMCI_EN|CMCI_THRESHOLD_MASK)); in clear_cmci() 737 wrmsrl(MSR_IA32_MCG_EXT_CTL, MCG_EXT_CTL_LMCE_EN); in intel_enable_lmce() 839 wrmsrl(MSR_IA32_MCx_CTL(i), 0xffffffffffffffffULL); in intel_init_mce() 840 wrmsrl(MSR_IA32_MCx_STATUS(i), 0x0ULL); in intel_init_mce() 844 wrmsrl(MSR_IA32_MC0_STATUS, 0x0ULL); in intel_init_mce()
|
A D | amd_nonfatal.c | 239 wrmsrl(MSR_IA32_MCx_MISC(4), value); in amd_nonfatal_mcheck_init()
|
/xen/xen/arch/x86/x86_64/ |
A D | traps.c | 326 wrmsrl(MSR_LSTAR, stub_va); in subarch_percpu_traps_init() 335 wrmsrl(MSR_IA32_SYSENTER_ESP, stack_bottom); in subarch_percpu_traps_init() 336 wrmsrl(MSR_IA32_SYSENTER_EIP, (unsigned long)sysenter_entry); in subarch_percpu_traps_init() 341 wrmsrl(MSR_CSTAR, stub_va); in subarch_percpu_traps_init() 352 wrmsrl(MSR_STAR, XEN_MSR_STAR); in subarch_percpu_traps_init() 353 wrmsrl(MSR_SYSCALL_MASK, XEN_SYSCALL_MASK); in subarch_percpu_traps_init()
|
/xen/xen/arch/x86/ |
A D | nmi.c | 300 wrmsrl(nmi_perfctr_msr, 0 - count); in write_watchdog_counter() 384 wrmsrl(MSR_P4_CRU_ESCR0, P4_NMI_CRU_ESCR0); in setup_p4_watchdog() 385 wrmsrl(MSR_P4_IQ_CCCR0, P4_NMI_IQ_CCCR0 & ~P4_CCCR_ENABLE); in setup_p4_watchdog() 388 wrmsrl(MSR_P4_IQ_CCCR0, nmi_p4_cccr_val); in setup_p4_watchdog() 538 wrmsrl(MSR_P4_IQ_CCCR0, nmi_p4_cccr_val); in nmi_watchdog_tick()
|
A D | apic.c | 311 wrmsrl(MSR_APIC_BASE, msr_content & in disable_local_APIC() 320 wrmsrl(MSR_APIC_BASE, msr_content); in disable_local_APIC() 328 wrmsrl(MSR_APIC_BASE, msr_content); in disable_local_APIC() 332 wrmsrl(MSR_APIC_BASE, msr_content); in disable_local_APIC() 334 wrmsrl(MSR_APIC_BASE, msr_content); in disable_local_APIC() 491 wrmsrl(MSR_APIC_BASE, msr_content); in __enable_x2apic() 729 wrmsrl(MSR_APIC_BASE, in lapic_resume() 828 wrmsrl(MSR_APIC_BASE, msr_content); in detect_init_APIC() 1344 wrmsrl(MSR_IA32_TSC_DEADLINE, timeout ? stime2tsc(timeout) : 0); in reprogram_timer()
|
A D | psr.c | 338 wrmsrl((type == FEAT_TYPE_L3_CAT ? in cat_init_feature() 359 wrmsrl(MSR_IA32_PSR_L3_MASK(0), cat_default_val(feat->cat.cbm_len)); in cat_init_feature() 360 wrmsrl(MSR_IA32_PSR_L3_MASK(1), cat_default_val(feat->cat.cbm_len)); in cat_init_feature() 362 wrmsrl(MSR_IA32_PSR_L3_QOS_CFG, in cat_init_feature() 406 wrmsrl(MSR_IA32_PSR_MBA_MASK(0), 0); in mba_init_feature() 437 wrmsrl(MSR_IA32_PSR_L3_MASK(cos), val); in l3_cat_write_msr() 464 wrmsrl(((type == PSR_TYPE_L3_DATA) ? in l3_cdp_write_msr() 484 wrmsrl(MSR_IA32_PSR_L2_MASK(cos), val); in l2_cat_write_msr() 514 wrmsrl(MSR_IA32_PSR_MBA_MASK(cos), val); in mba_write_msr() 790 wrmsrl(MSR_IA32_PSR_ASSOC, reg); in psr_ctxt_switch_to()
|
A D | tsx.c | 61 wrmsrl(MSR_TSX_CTRL, val); in tsx_init()
|
A D | msr.c | 462 wrmsrl(MSR_PRED_CMD, val); in guest_wrmsr() 473 wrmsrl(MSR_FLUSH_CMD, val); in guest_wrmsr() 568 wrmsrl(msr, val); in guest_wrmsr()
|
A D | crash.c | 206 wrmsrl(MSR_S_CET, 0); in machine_crash_shutdown()
|
/xen/xen/arch/x86/guest/hyperv/ |
A D | hyperv.c | 107 wrmsrl(HV_X64_MSR_GUEST_OS_ID, guest_id.raw); in setup_hypercall_page() 116 wrmsrl(HV_X64_MSR_HYPERCALL, hypercall_msr.as_uint64); in setup_hypercall_page() 173 wrmsrl(HV_X64_MSR_VP_ASSIST_PAGE, msr.raw); in setup_vp_assist()
|
/xen/xen/arch/x86/cpu/microcode/ |
A D | intel.c | 129 wrmsrl(MSR_IA32_UCODE_REV, 0x0ULL); in collect_cpu_info() 274 wrmsrl(MSR_IA32_UCODE_WRITE, (unsigned long)patch->data); in apply_microcode() 275 wrmsrl(MSR_IA32_UCODE_REV, 0x0ULL); in apply_microcode()
|
/xen/xen/arch/x86/acpi/cpufreq/ |
A D | powernow.c | 56 wrmsrl(MSR_PSTATE_CTRL, *(unsigned int *)pstate); in transition_pstate() 73 wrmsrl(MSR_K8_HWCR, msr_content); in update_cpb()
|