Searched refs:giveup_ext (Results 1 – 4 of 4) sorted by relevance
244 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_emulate_loadstore()245 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()268 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_emulate_loadstore()269 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()313 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_emulate_loadstore()314 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, in kvmppc_emulate_loadstore()
1171 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_complete_mmio_load()1172 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_FP); in kvmppc_complete_mmio_load()1187 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_complete_mmio_load()1188 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VSX); in kvmppc_complete_mmio_load()1204 if (vcpu->kvm->arch.kvm_ops->giveup_ext) in kvmppc_complete_mmio_load()1205 vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VEC); in kvmppc_complete_mmio_load()
2079 .giveup_ext = kvmppc_giveup_ext,
309 void (*giveup_ext)(struct kvm_vcpu *vcpu, ulong msr); member
Completed in 16 milliseconds