Home
last modified time | relevance | path

Searched refs:msrs (Results 1 – 14 of 14) sorted by relevance

/linux/arch/x86/kvm/vmx/
A Dnested.h100 return vmx_misc_cr3_count(to_vmx(vcpu)->nested.msrs.misc_low); in nested_cpu_vmx_misc_cr3_count()
110 return to_vmx(vcpu)->nested.msrs.misc_low & in nested_cpu_has_vmwrite_any_field()
121 return to_vmx(vcpu)->nested.msrs.procbased_ctls_high & in nested_cpu_supports_monitor_trap_flag()
127 return to_vmx(vcpu)->nested.msrs.secondary_ctls_high & in nested_cpu_has_vmx_shadow_vmcs()
258 u64 fixed0 = to_vmx(vcpu)->nested.msrs.cr0_fixed0; in nested_guest_cr0_valid()
259 u64 fixed1 = to_vmx(vcpu)->nested.msrs.cr0_fixed1; in nested_guest_cr0_valid()
262 if (to_vmx(vcpu)->nested.msrs.secondary_ctls_high & in nested_guest_cr0_valid()
272 u64 fixed0 = to_vmx(vcpu)->nested.msrs.cr0_fixed0; in nested_host_cr0_valid()
273 u64 fixed1 = to_vmx(vcpu)->nested.msrs.cr0_fixed1; in nested_host_cr0_valid()
280 u64 fixed0 = to_vmx(vcpu)->nested.msrs.cr4_fixed0; in nested_cr4_valid()
[all …]
A Dnested.c1424 *pdata = msrs->basic; in vmx_get_vmx_msr()
1445 msrs->exit_ctls_low, in vmx_get_vmx_msr()
1460 msrs->misc_low, in vmx_get_vmx_msr()
1461 msrs->misc_high); in vmx_get_vmx_msr()
6493 msrs->exit_ctls_low, in nested_vmx_setup_ctls_msrs()
6599 msrs->ept_caps = in nested_vmx_setup_ctls_msrs()
6655 msrs->misc_low, in nested_vmx_setup_ctls_msrs()
6656 msrs->misc_high); in nested_vmx_setup_ctls_msrs()
6658 msrs->misc_low |= in nested_vmx_setup_ctls_msrs()
6663 msrs->misc_high = 0; in nested_vmx_setup_ctls_msrs()
[all …]
A Dvmx.c4393 memcpy(&vmx->nested.msrs, &vmcs_config.nested, sizeof(vmx->nested.msrs)); in __vmx_vcpu_reset()
6532 struct perf_guest_switch_msr *msrs; in atomic_switch_perf_msrs() local
6535 msrs = perf_guest_get_msrs(&nr_msrs); in atomic_switch_perf_msrs()
6536 if (!msrs) in atomic_switch_perf_msrs()
6540 if (msrs[i].host == msrs[i].guest) in atomic_switch_perf_msrs()
6541 clear_atomic_switch_msr(vmx, msrs[i].msr); in atomic_switch_perf_msrs()
6543 add_atomic_switch_msr(vmx, msrs[i].msr, msrs[i].guest, in atomic_switch_perf_msrs()
6544 msrs[i].host, false); in atomic_switch_perf_msrs()
7046 vmx->nested.msrs.cr0_fixed1 = 0xffffffff; in nested_vmx_cr_fixed1_bits_update()
7047 vmx->nested.msrs.cr4_fixed1 = X86_CR4_PCE; in nested_vmx_cr_fixed1_bits_update()
[all …]
A Dvmx.h222 struct nested_vmx_msrs msrs; member
/linux/arch/x86/lib/
A Dmsr-smp.c14 if (rv->msrs) in __rdmsr_on_cpu()
15 reg = per_cpu_ptr(rv->msrs, this_cpu); in __rdmsr_on_cpu()
28 if (rv->msrs) in __wrmsr_on_cpu()
29 reg = per_cpu_ptr(rv->msrs, this_cpu); in __wrmsr_on_cpu()
100 struct msr *msrs, in __rwmsr_on_cpus() argument
108 rv.msrs = msrs; in __rwmsr_on_cpus()
127 void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs) in rdmsr_on_cpus() argument
129 __rwmsr_on_cpus(mask, msr_no, msrs, __rdmsr_on_cpu); in rdmsr_on_cpus()
141 void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs) in wrmsr_on_cpus() argument
143 __rwmsr_on_cpus(mask, msr_no, msrs, __wrmsr_on_cpu); in wrmsr_on_cpus()
A Dmsr.c11 struct msr *msrs = NULL; in msrs_alloc() local
13 msrs = alloc_percpu(struct msr); in msrs_alloc()
14 if (!msrs) { in msrs_alloc()
19 return msrs; in msrs_alloc()
23 void msrs_free(struct msr *msrs) in msrs_free() argument
25 free_percpu(msrs); in msrs_free()
/linux/Documentation/trace/postprocess/
A Ddecode_msr.py7 msrs = dict() variable
13 msrs[int(m.group(2), 16)] = m.group(1)
26 if num in msrs:
27 r = msrs[num]
/linux/arch/x86/include/asm/
A Dmsr.h27 struct msr *msrs; member
328 void msrs_free(struct msr *msrs);
337 void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
338 void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
367 struct msr *msrs) in rdmsr_on_cpus() argument
369 rdmsr_on_cpu(0, msr_no, &(msrs[0].l), &(msrs[0].h)); in rdmsr_on_cpus()
372 struct msr *msrs) in wrmsr_on_cpus() argument
374 wrmsr_on_cpu(0, msr_no, msrs[0].l, msrs[0].h); in wrmsr_on_cpus()
/linux/tools/testing/selftests/kvm/lib/x86_64/
A Dprocessor.c1032 struct kvm_msrs msrs; member
1102 state = malloc(sizeof(*state) + nmsrs * sizeof(state->msrs.entries[0])); in vcpu_save_state()
1140 state->msrs.nmsrs = nmsrs; in vcpu_save_state()
1142 state->msrs.entries[i].index = list->indices[i]; in vcpu_save_state()
1143 r = ioctl(vcpu->fd, KVM_GET_MSRS, &state->msrs); in vcpu_save_state()
1174 r = ioctl(vcpu->fd, KVM_SET_MSRS, &state->msrs); in vcpu_load_state()
1175 … TEST_ASSERT(r == state->msrs.nmsrs, "Unexpected result from KVM_SET_MSRS, r: %i (failed at %x)", in vcpu_load_state()
1176 r, r == state->msrs.nmsrs ? -1 : state->msrs.entries[r].index); in vcpu_load_state()
/linux/Documentation/virt/kvm/
A Dcpuid.rst44 KVM_FEATURE_CLOCKSOURCE 0 kvmclock available at msrs
52 KVM_FEATURE_CLOCKSOURCE2 3 kvmclock available at msrs
A Dapi.rst214 __u32 nmsrs; /* number of msrs in entries */
219 kvm adjusts nmsrs to reflect the actual number of msrs and fills in the
222 KVM_GET_MSR_INDEX_LIST returns the guest msrs that are supported. The list
641 :Returns: number of msrs successfully returned;
657 __u32 nmsrs; /* number of msrs in entries */
681 :Returns: number of msrs successfully set (see below), -1 on error
3916 __u32 nmsrs; /* number of msrs in bitmap */
5010 __u32 nmsrs; /* number of msrs in bitmap */
/linux/arch/x86/kernel/cpu/mce/
A Damd.c621 u32 msrs[NR_BLOCKS]; in disable_err_thresholding() local
624 msrs[0] = 0x00000413; /* MC4_MISC0 */ in disable_err_thresholding()
625 msrs[1] = 0xc0000408; /* MC4_MISC1 */ in disable_err_thresholding()
633 msrs[0] = MSR_AMD64_SMCA_MCx_MISC(bank); in disable_err_thresholding()
648 msr_clear_bit(msrs[i], 62); in disable_err_thresholding()
/linux/drivers/edac/
A Damd64_edac.c14 static struct msr __percpu *msrs; variable
3367 rdmsr_on_cpus(mask, MSR_IA32_MCG_CTL, msrs); in nb_mce_bank_enabled_on_node()
3370 struct msr *reg = per_cpu_ptr(msrs, cpu); in nb_mce_bank_enabled_on_node()
3399 rdmsr_on_cpus(cmask, MSR_IA32_MCG_CTL, msrs); in toggle_ecc_err_reporting()
3403 struct msr *reg = per_cpu_ptr(msrs, cpu); in toggle_ecc_err_reporting()
3973 msrs = msrs_alloc(); in amd64_edac_init()
3974 if (!msrs) in amd64_edac_init()
4012 msrs_free(msrs); in amd64_edac_init()
4013 msrs = NULL; in amd64_edac_init()
4043 msrs_free(msrs); in amd64_edac_exit()
[all …]
/linux/arch/x86/kvm/
A Dx86.c354 if (msrs->registered) { in kvm_on_user_return()
355 msrs->registered = false; in kvm_on_user_return()
360 values = &msrs->values[slot]; in kvm_on_user_return()
416 msrs->values[i].host = value; in kvm_user_return_msr_cpu_online()
417 msrs->values[i].curr = value; in kvm_user_return_msr_cpu_online()
434 msrs->values[slot].curr = value; in kvm_set_user_return_msr()
435 if (!msrs->registered) { in kvm_set_user_return_msr()
438 msrs->registered = true; in kvm_set_user_return_msr()
449 if (msrs->registered) in drop_user_return_notifiers()
4030 struct kvm_msrs msrs; in msr_io() local
[all …]

Completed in 81 milliseconds