Home
last modified time | relevance | path

Searched refs:msr (Results 1 – 25 of 88) sorted by relevance

1234

/xen/xen/arch/x86/
A Dmonitor.c47 ASSERT(d->arch.monitor.msr_bitmap && msr); in monitor_bitmap_for_msr()
49 switch ( *msr ) in monitor_bitmap_for_msr()
58 *msr &= 0x1fff; in monitor_bitmap_for_msr()
63 *msr &= 0x1fff; in monitor_bitmap_for_msr()
74 u32 index = msr; in monitor_enable_msr()
86 hvm_enable_msr_interception(d, msr); in monitor_enable_msr()
103 bitmap = monitor_bitmap_for_msr(d, &msr); in monitor_disable_msr()
108 __clear_bit(msr, bitmap); in monitor_disable_msr()
125 return test_bit(msr, bitmap); in monitored_msr()
210 u32 msr = mop->u.mov_to_msr.msr; in arch_monitor_domctl_event() local
[all …]
A Dmsr.c149 d->arch.msr = mp; in init_domain_msr_policy()
171 const struct msr_policy *mp = d->arch.msr; in guest_rdmsr()
175 switch ( msr ) in guest_rdmsr()
280 ret = guest_rdmsr_x2apic(v, msr, val); in guest_rdmsr()
305 ret = guest_rdmsr_xen(v, msr, val); in guest_rdmsr()
327 rdmsrl(msr, *val); in guest_rdmsr()
361 const struct msr_policy *mp = d->arch.msr; in guest_wrmsr()
365 switch ( msr ) in guest_wrmsr()
514 ret = guest_wrmsr_x2apic(v, msr, val); in guest_wrmsr()
543 ret = guest_wrmsr_xen(v, msr, val); in guest_wrmsr()
[all …]
/xen/xen/arch/x86/cpu/mcheck/
A Dmce.h84 static inline uint64_t mca_rdmsr(unsigned int msr) in mca_rdmsr() argument
87 if (intpose_lookup(smp_processor_id(), msr, &val) == NULL) in mca_rdmsr()
88 rdmsrl(msr, val); in mca_rdmsr()
93 #define mca_wrmsr(msr, val) do { \ argument
94 if ( !intpose_inval(smp_processor_id(), msr) ) \
95 wrmsrl(msr, val); \
169 if (msr >= MSR_IA32_MC0_CTL2 && in mce_vendor_bank_msr()
175 switch (msr) { in mce_vendor_bank_msr()
186 static inline int mce_bank_msr(const struct vcpu *v, uint32_t msr) in mce_bank_msr() argument
188 if ( (msr >= MSR_IA32_MC0_CTL && in mce_bank_msr()
[all …]
A Dvmce.c111 switch ( msr & (-MSR_IA32_MC0_CTL | 3) ) in bank_mce_rdmsr()
154 ret = vmce_intel_rdmsr(v, msr, val); in bank_mce_rdmsr()
159 ret = vmce_amd_rdmsr(v, msr, val); in bank_mce_rdmsr()
177 int vmce_rdmsr(uint32_t msr, uint64_t *val) in vmce_rdmsr() argument
186 switch ( msr ) in vmce_rdmsr()
227 ret = mce_bank_msr(cur, msr) ? bank_mce_rdmsr(cur, msr, val) : 0; in vmce_rdmsr()
245 switch ( msr & (-MSR_IA32_MC0_CTL | 3) ) in bank_mce_wrmsr()
290 ret = vmce_amd_wrmsr(v, msr, val); in bank_mce_wrmsr()
308 int vmce_wrmsr(uint32_t msr, uint64_t val) in vmce_wrmsr() argument
315 switch ( msr ) in vmce_wrmsr()
[all …]
A Dvmce.h14 int vmce_intel_rdmsr(const struct vcpu *, uint32_t msr, uint64_t *val);
15 int vmce_intel_wrmsr(struct vcpu *, uint32_t msr, uint64_t val);
16 int vmce_amd_rdmsr(const struct vcpu *, uint32_t msr, uint64_t *val);
17 int vmce_amd_wrmsr(struct vcpu *, uint32_t msr, uint64_t val);
A Dmce_intel.c183 && msr < MSR_IA32_MCG_EAX + nr_intel_ext_msrs ) in intel_get_extended_msr()
185 ext->mc_msr[ext->mc_msrs].reg = msr; in intel_get_extended_msr()
186 rdmsrl(msr, ext->mc_msr[ext->mc_msrs].value); in intel_get_extended_msr()
490 unsigned msr = MSR_IA32_MCx_CTL2(i); in do_cmci_discover() local
497 rdmsrl(msr, val); in do_cmci_discover()
508 rdmsrl(msr, val); in do_cmci_discover()
515 wrmsrl(msr, val & ~CMCI_THRESHOLD_MASK); in do_cmci_discover()
624 unsigned msr = MSR_IA32_MCx_CTL2(i); in clear_cmci() local
629 rdmsrl(msr, val); in clear_cmci()
991 unsigned int bank = msr - MSR_IA32_MC0_CTL2; in vmce_intel_wrmsr()
[all …]
A Dmce.c1070 uint64_t msr; member
1094 if ( intpose_arr[i].cpu_nr == cpu_nr && intpose_arr[i].msr == msr ) in intpose_lookup()
1121 ent->msr = msr; in intpose_add()
1248 struct mcinfo_msr *msr; in x86_mc_msrinject() local
1258 for ( i = 0, msr = &mci->mcinj_msr[0]; i < mci->mcinj_count; i++, msr++ ) in x86_mc_msrinject()
1263 (unsigned long long)msr->reg, in x86_mc_msrinject()
1267 intpose_add(mci->mcinj_cpunr, msr->reg, msr->value); in x86_mc_msrinject()
1269 wrmsrl(msr->reg, msr->value); in x86_mc_msrinject()
1500 struct mcinfo_msr *msr; in do_mca() local
1520 i++, msr++ ) in do_mca()
[all …]
/xen/xen/include/asm-x86/
A Dmsr.h16 #define rdmsr(msr,val1,val2) \ argument
19 : "c" (msr))
21 #define rdmsrl(msr,val) do { unsigned long a__,b__; \ argument
24 : "c" (msr)); \
28 #define wrmsr(msr,val1,val2) \ argument
31 : "c" (msr), "a" (val1), "d" (val2))
33 static inline void wrmsrl(unsigned int msr, __u64 val) in wrmsrl() argument
38 wrmsr(msr, lo, hi); in wrmsrl()
42 #define rdmsr_safe(msr,val) ({\ argument
53 : "c" (msr), "2" (0), "i" (-EFAULT)); \
[all …]
A Dvpmu.h42 int (*do_wrmsr)(unsigned int msr, uint64_t msr_content,
44 int (*do_rdmsr)(unsigned int msr, uint64_t *msr_content);
108 int vpmu_do_msr(unsigned int msr, uint64_t *msr_content,
117 static inline int vpmu_do_wrmsr(unsigned int msr, uint64_t msr_content, in vpmu_do_wrmsr() argument
120 return vpmu_do_msr(msr, &msr_content, supported, 1); in vpmu_do_wrmsr()
122 static inline int vpmu_do_rdmsr(unsigned int msr, uint64_t *msr_content) in vpmu_do_rdmsr() argument
124 return vpmu_do_msr(msr, msr_content, 0, 0); in vpmu_do_rdmsr()
A Dxenoprof.h68 int passive_domain_do_rdmsr(unsigned int msr, uint64_t *msr_content);
69 int passive_domain_do_wrmsr(unsigned int msr, uint64_t msr_content);
74 static inline int passive_domain_do_rdmsr(unsigned int msr, in passive_domain_do_rdmsr() argument
80 static inline int passive_domain_do_wrmsr(unsigned int msr, in passive_domain_do_wrmsr() argument
A Dmce.h42 extern int vmce_wrmsr(uint32_t msr, uint64_t val);
43 extern int vmce_rdmsr(uint32_t msr, uint64_t *val);
/xen/xen/arch/x86/cpu/
A Dvpmu_amd.c37 #define is_guest_mode(msr) ((msr) & (1ULL << MSR_F10H_EVNTSEL_GO_SHIFT)) argument
38 #define is_pmu_enabled(msr) ((msr) & (1ULL << MSR_F10H_EVNTSEL_EN_SHIFT)) argument
39 #define set_guest_mode(msr) (msr |= (1ULL << MSR_F10H_EVNTSEL_GO_SHIFT)) argument
40 #define is_overflowed(msr) (!((msr) & (1ULL << (MSR_F10H_COUNTER_LENGTH-1)))) argument
335 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3)) ) in context_update()
337 msr = get_fam15h_addr(msr); in context_update()
342 if ( msr == ctrls[i] ) in context_update()
347 else if (msr == counters[i] ) in context_update()
407 context_update(msr, msr_content); in amd_vpmu_do_wrmsr()
410 wrmsrl(msr, msr_content); in amd_vpmu_do_wrmsr()
[all …]
A Dmwait-idle.c1054 unsigned long long msr; in bxt_idle_state_table_update() local
1057 rdmsrl(MSR_PKGC6_IRTL, msr); in bxt_idle_state_table_update()
1058 usec = irtl_2_usec(msr); in bxt_idle_state_table_update()
1064 rdmsrl(MSR_PKGC7_IRTL, msr); in bxt_idle_state_table_update()
1065 usec = irtl_2_usec(msr); in bxt_idle_state_table_update()
1071 rdmsrl(MSR_PKGC8_IRTL, msr); in bxt_idle_state_table_update()
1072 usec = irtl_2_usec(msr); in bxt_idle_state_table_update()
1079 usec = irtl_2_usec(msr); in bxt_idle_state_table_update()
1086 usec = irtl_2_usec(msr); in bxt_idle_state_table_update()
1101 u64 msr; in sklh_idle_state_table_update() local
[all …]
A Dintel.c52 static uint64_t __init _probe_mask_msr(unsigned int *msr, uint64_t caps) in _probe_mask_msr() argument
58 if (rdmsr_safe(*msr, val) || wrmsr_safe(*msr, val)) in _probe_mask_msr()
59 *msr = 0; in _probe_mask_msr()
173 #define LAZY(msr, field) \ in intel_ctxt_switch_masking() argument
176 (msr)) \ in intel_ctxt_switch_masking()
178 wrmsrl((msr), masks->field); \ in intel_ctxt_switch_masking()
A Damd.c52 static inline int rdmsr_amd_safe(unsigned int msr, unsigned int *lo, in rdmsr_amd_safe() argument
64 : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT)); in rdmsr_amd_safe()
69 static inline int wrmsr_amd_safe(unsigned int msr, unsigned int lo, in wrmsr_amd_safe() argument
81 : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a), in wrmsr_amd_safe()
87 static void wrmsr_amd(unsigned int msr, uint64_t val) in wrmsr_amd() argument
90 "c" (msr), "a" ((uint32_t)val), in wrmsr_amd()
144 static uint64_t __init _probe_mask_msr(unsigned int msr, uint64_t caps) in _probe_mask_msr() argument
150 if ((rdmsr_amd_safe(msr, &lo, &hi) == 0) && in _probe_mask_msr()
151 (wrmsr_amd_safe(msr, lo, hi) == 0)) in _probe_mask_msr()
237 #define LAZY(cap, msr, field) \ in amd_ctxt_switch_masking() argument
[all …]
/xen/xen/include/asm-x86/hvm/vmx/
A Dvmcs.h568 int vmx_add_msr(struct vcpu *v, uint32_t msr, uint64_t val,
579 return vmx_add_msr(v, msr, val, VMX_MSR_GUEST); in vmx_add_guest_msr()
581 static inline int vmx_add_host_load_msr(struct vcpu *v, uint32_t msr, in vmx_add_host_load_msr() argument
584 return vmx_add_msr(v, msr, val, VMX_MSR_HOST); in vmx_add_host_load_msr()
587 struct vmx_msr_entry *vmx_find_msr(const struct vcpu *v, uint32_t msr,
607 const struct vcpu *v, uint32_t msr, uint64_t *val) in vmx_read_guest_loadonly_msr() argument
610 vmx_find_msr(v, msr, VMX_MSR_GUEST_LOADONLY); in vmx_read_guest_loadonly_msr()
623 static inline int vmx_write_guest_msr(struct vcpu *v, uint32_t msr, in vmx_write_guest_msr() argument
626 struct vmx_msr_entry *ent = vmx_find_msr(v, msr, VMX_MSR_GUEST); in vmx_write_guest_msr()
644 void vmx_clear_msr_intercept(struct vcpu *v, unsigned int msr,
[all …]
/xen/xen/arch/arm/arm64/
A Dentry.S79 msr SPSR_el1, x23
85 msr SP_el0, x22
89 msr SP_el1, x22
90 msr ELR_el1, x23
96 msr SPSR_fiq, x22
97 msr SPSR_irq, x23
101 msr SPSR_und, x22
102 msr SPSR_abt, x23
288 msr daif, x0
315 msr daif, x0
[all …]
A Dhead.S288 msr DAIFSet, 0xf /* Disable all interrupts */
342 msr DAIFSet, 0xf /* Disable all interrupts */
389 msr TTBR0_EL2, x4
465 msr mair_el2, x0
480 msr tcr_el2, x0
483 msr SCTLR_EL2, x0
490 msr spsel, #1
679 msr TTBR0_EL2, x0
686 msr SCTLR_EL2, x0 /* now paging is enabled */
825 msr TTBR0_EL2, x0
[all …]
/xen/xen/arch/x86/hvm/viridian/
A Dsynic.c102 vv->vp_assist.msr.raw = val; in viridian_synic_wrmsr()
104 if ( vv->vp_assist.msr.enabled ) in viridian_synic_wrmsr()
130 vv->simp.msr.raw = val; in viridian_synic_wrmsr()
132 if ( vv->simp.msr.enabled ) in viridian_synic_wrmsr()
211 *val = vv->vp_assist.msr.raw; in viridian_synic_rdmsr()
246 *val = vv->simp.msr.raw; in viridian_synic_rdmsr()
406 ctxt->simp_msr = vv->simp.msr.raw; in viridian_synic_save_vcpu_ctxt()
409 ctxt->vp_assist_msr = vv->vp_assist.msr.raw; in viridian_synic_save_vcpu_ctxt()
420 if ( vv->vp_assist.msr.enabled ) in viridian_synic_load_vcpu_ctxt()
425 vv->simp.msr.raw = ctxt->simp_msr; in viridian_synic_load_vcpu_ctxt()
[all …]
/xen/xen/arch/x86/hvm/vmx/
A Dvmcs.c898 if ( msr <= 0x1fff ) in vmx_clear_msr_intercept()
905 else if ( (msr >= 0xc0000000) && (msr <= 0xc0001fff) ) in vmx_clear_msr_intercept()
907 msr &= 0x1fff; in vmx_clear_msr_intercept()
926 if ( msr <= 0x1fff ) in vmx_set_msr_intercept()
933 else if ( (msr >= 0xc0000000) && (msr <= 0xc0001fff) ) in vmx_set_msr_intercept()
935 msr &= 0x1fff; in vmx_set_msr_intercept()
948 if ( msr <= 0x1fff ) in vmx_msr_is_intercepted()
951 else if ( (msr >= 0xc0000000) && (msr <= 0xc0001fff) ) in vmx_msr_is_intercepted()
1355 if ( msr < mid->index ) in locate_msr_entry()
1357 else if ( msr > mid->index ) in locate_msr_entry()
[all …]
/xen/xen/lib/x86/
A Dpolicy.c16 do { e.msr = (m); goto out; } while ( 0 ) in x86_cpu_policies_are_compatible()
26 if ( ~host->msr->platform_info.raw & guest->msr->platform_info.raw ) in x86_cpu_policies_are_compatible()
/xen/xen/arch/x86/guest/hyperv/
A Dhyperv.c155 union hv_vp_assist_page_msr msr; in setup_vp_assist() local
170 rdmsrl(HV_X64_MSR_VP_ASSIST_PAGE, msr.raw); in setup_vp_assist()
171 msr.pfn = virt_to_mfn(this_cpu(hv_vp_assist)); in setup_vp_assist()
172 msr.enabled = 1; in setup_vp_assist()
173 wrmsrl(HV_X64_MSR_VP_ASSIST_PAGE, msr.raw); in setup_vp_assist()
/xen/tools/tests/cpu-policy/
A Dtest-cpu-policy.c359 xen_msr_entry_t msr; in test_msr_deserialise_failure() member
364 .msr = { .idx = 0xdeadc0de }, in test_msr_deserialise_failure()
369 .msr = { .idx = 0xce, .flags = 1 }, in test_msr_deserialise_failure()
374 .msr = { .idx = 0xce, .val = ~0ull }, in test_msr_deserialise_failure()
379 .msr = { .idx = 0x10a, .val = ~0ull }, in test_msr_deserialise_failure()
393 rc = x86_msr_copy_from_buffer(NULL, &t->msr, 1, &err_msr); in test_msr_deserialise_failure()
402 if ( err_msr != t->msr.idx ) in test_msr_deserialise_failure()
405 t->name, t->msr.idx, err_msr); in test_msr_deserialise_failure()
602 t->name, res, e.leaf, e.subleaf, e.msr); in test_is_compatible_success()
656 t->e.leaf, t->e.subleaf, t->e.msr, in test_is_compatible_failure()
[all …]
/xen/xen/include/xen/lib/x86/
A Dcpu-policy.h11 struct msr_policy *msr; member
17 uint32_t msr; member
/xen/xen/arch/x86/oprofile/
A Dnmi_int.c41 static int passive_domain_msr_op_checks(unsigned int msr, int *typep, int *indexp) in passive_domain_msr_op_checks() argument
48 if ( !model->is_arch_pmu_msr(msr, typep, indexp) ) in passive_domain_msr_op_checks()
57 int passive_domain_do_rdmsr(unsigned int msr, uint64_t *msr_content) in passive_domain_do_rdmsr() argument
61 if ( !passive_domain_msr_op_checks(msr, &type, &index)) in passive_domain_do_rdmsr()
68 int passive_domain_do_wrmsr(unsigned int msr, uint64_t msr_content) in passive_domain_do_wrmsr() argument
72 if ( !passive_domain_msr_op_checks(msr, &type, &index)) in passive_domain_do_wrmsr()

Completed in 53 milliseconds

1234