Home
last modified time | relevance | path

Searched refs:kvm_cpu_context (Results 1 – 22 of 22) sorted by relevance

/linux/arch/arm64/kvm/hyp/nvhe/
A Dhyp-main.c22 void __kvm_hyp_host_forward_smc(struct kvm_cpu_context *host_ctxt);
24 static void handle___kvm_vcpu_run(struct kvm_cpu_context *host_ctxt) in handle___kvm_vcpu_run()
31 static void handle___kvm_adjust_pc(struct kvm_cpu_context *host_ctxt) in handle___kvm_adjust_pc()
71 static void handle___kvm_enable_ssbs(struct kvm_cpu_context *host_ctxt) in handle___kvm_enable_ssbs()
95 static void handle___vgic_v3_init_lrs(struct kvm_cpu_context *host_ctxt) in handle___vgic_v3_init_lrs()
100 static void handle___kvm_get_mdcr_el2(struct kvm_cpu_context *host_ctxt) in handle___kvm_get_mdcr_el2()
119 static void handle___pkvm_init(struct kvm_cpu_context *host_ctxt) in handle___pkvm_init()
171 typedef void (*hcall_t)(struct kvm_cpu_context *);
201 static void handle_host_hcall(struct kvm_cpu_context *host_ctxt) in handle_host_hcall()
241 static void handle_host_smc(struct kvm_cpu_context *host_ctxt) in handle_host_smc()
[all …]
A Dpsci-relay.c20 void __noreturn __host_enter(struct kvm_cpu_context *host_ctxt);
72 static unsigned long psci_forward(struct kvm_cpu_context *host_ctxt) in psci_forward()
107 static int psci_cpu_on(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_cpu_on()
151 static int psci_cpu_suspend(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_cpu_suspend()
179 static int psci_system_suspend(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_system_suspend()
206 struct kvm_cpu_context *host_ctxt; in kvm_host_psci_cpu_entry()
224 static unsigned long psci_0_1_handler(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_0_1_handler()
236 static unsigned long psci_0_2_handler(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_0_2_handler()
262 static unsigned long psci_1_0_handler(u64 func_id, struct kvm_cpu_context *host_ctxt) in psci_1_0_handler()
276 bool kvm_host_psci_handler(struct kvm_cpu_context *host_ctxt) in kvm_host_psci_handler()
A Dswitch.c35 DEFINE_PER_CPU(struct kvm_cpu_context, kvm_hyp_ctxt);
56 struct kvm_cpu_context *ctxt = &vcpu->arch.ctxt; in __activate_traps()
127 static bool __pmu_switch_to_guest(struct kvm_cpu_context *host_ctxt) in __pmu_switch_to_guest()
147 static void __pmu_switch_to_host(struct kvm_cpu_context *host_ctxt) in __pmu_switch_to_host()
257 struct kvm_cpu_context *host_ctxt; in __kvm_vcpu_run()
258 struct kvm_cpu_context *guest_ctxt; in __kvm_vcpu_run()
356 struct kvm_cpu_context *host_ctxt; in hyp_panic()
A Dsysreg-sr.c21 void __sysreg_save_state_nvhe(struct kvm_cpu_context *ctxt) in __sysreg_save_state_nvhe()
29 void __sysreg_restore_state_nvhe(struct kvm_cpu_context *ctxt) in __sysreg_restore_state_nvhe()
A Dsetup.c227 struct kvm_cpu_context *host_ctxt = &host_data->host_ctxt; in __pkvm_init_finalise()
A Dmem_protect.c441 void handle_host_mem_abort(struct kvm_cpu_context *host_ctxt) in handle_host_mem_abort()
/linux/arch/riscv/include/asm/
A Dkvm_vcpu_fp.h15 struct kvm_cpu_context;
18 void __kvm_riscv_fp_f_save(struct kvm_cpu_context *context);
19 void __kvm_riscv_fp_f_restore(struct kvm_cpu_context *context);
20 void __kvm_riscv_fp_d_save(struct kvm_cpu_context *context);
21 void __kvm_riscv_fp_d_restore(struct kvm_cpu_context *context);
24 void kvm_riscv_vcpu_guest_fp_save(struct kvm_cpu_context *cntx,
26 void kvm_riscv_vcpu_guest_fp_restore(struct kvm_cpu_context *cntx,
28 void kvm_riscv_vcpu_host_fp_save(struct kvm_cpu_context *cntx);
29 void kvm_riscv_vcpu_host_fp_restore(struct kvm_cpu_context *cntx);
39 struct kvm_cpu_context *cntx, in kvm_riscv_vcpu_guest_fp_restore()
[all …]
A Dkvm_host.h95 struct kvm_cpu_context { struct
160 struct kvm_cpu_context host_context;
163 struct kvm_cpu_context guest_context;
169 struct kvm_cpu_context guest_reset_context;
/linux/arch/riscv/kernel/
A Dasm-offsets.c199 OFFSET(KVM_ARCH_FP_F_F0, kvm_cpu_context, fp.f.f[0]); in asm_offsets()
200 OFFSET(KVM_ARCH_FP_F_F1, kvm_cpu_context, fp.f.f[1]); in asm_offsets()
201 OFFSET(KVM_ARCH_FP_F_F2, kvm_cpu_context, fp.f.f[2]); in asm_offsets()
202 OFFSET(KVM_ARCH_FP_F_F3, kvm_cpu_context, fp.f.f[3]); in asm_offsets()
203 OFFSET(KVM_ARCH_FP_F_F4, kvm_cpu_context, fp.f.f[4]); in asm_offsets()
204 OFFSET(KVM_ARCH_FP_F_F5, kvm_cpu_context, fp.f.f[5]); in asm_offsets()
205 OFFSET(KVM_ARCH_FP_F_F6, kvm_cpu_context, fp.f.f[6]); in asm_offsets()
206 OFFSET(KVM_ARCH_FP_F_F7, kvm_cpu_context, fp.f.f[7]); in asm_offsets()
207 OFFSET(KVM_ARCH_FP_F_F8, kvm_cpu_context, fp.f.f[8]); in asm_offsets()
208 OFFSET(KVM_ARCH_FP_F_F9, kvm_cpu_context, fp.f.f[9]); in asm_offsets()
[all …]
/linux/arch/arm64/kvm/hyp/vhe/
A Dsysreg-sr.c27 void sysreg_save_host_state_vhe(struct kvm_cpu_context *ctxt) in sysreg_save_host_state_vhe()
33 void sysreg_save_guest_state_vhe(struct kvm_cpu_context *ctxt) in sysreg_save_guest_state_vhe()
40 void sysreg_restore_host_state_vhe(struct kvm_cpu_context *ctxt) in sysreg_restore_host_state_vhe()
46 void sysreg_restore_guest_state_vhe(struct kvm_cpu_context *ctxt) in sysreg_restore_guest_state_vhe()
66 struct kvm_cpu_context *guest_ctxt = &vcpu->arch.ctxt; in kvm_vcpu_load_sysregs_vhe()
67 struct kvm_cpu_context *host_ctxt; in kvm_vcpu_load_sysregs_vhe()
100 struct kvm_cpu_context *guest_ctxt = &vcpu->arch.ctxt; in kvm_vcpu_put_sysregs_vhe()
101 struct kvm_cpu_context *host_ctxt; in kvm_vcpu_put_sysregs_vhe()
A Dswitch.c31 DEFINE_PER_CPU(struct kvm_cpu_context, kvm_hyp_ctxt);
122 struct kvm_cpu_context *host_ctxt; in __kvm_vcpu_run_vhe()
123 struct kvm_cpu_context *guest_ctxt; in __kvm_vcpu_run_vhe()
210 struct kvm_cpu_context *host_ctxt; in __hyp_call_panic()
/linux/arch/arm64/include/asm/
A Dkvm_hyp.h15 DECLARE_PER_CPU(struct kvm_cpu_context, kvm_hyp_ctxt);
74 void __sysreg_save_state_nvhe(struct kvm_cpu_context *ctxt);
75 void __sysreg_restore_state_nvhe(struct kvm_cpu_context *ctxt);
77 void sysreg_save_host_state_vhe(struct kvm_cpu_context *ctxt);
78 void sysreg_restore_host_state_vhe(struct kvm_cpu_context *ctxt);
79 void sysreg_save_guest_state_vhe(struct kvm_cpu_context *ctxt);
80 void sysreg_restore_guest_state_vhe(struct kvm_cpu_context *ctxt);
103 bool kvm_host_psci_handler(struct kvm_cpu_context *host_ctxt);
106 void __noreturn __hyp_do_panic(struct kvm_cpu_context *host_ctxt, u64 spsr,
115 void __noreturn __host_enter(struct kvm_cpu_context *host_ctxt);
A Dkvm_host.h228 struct kvm_cpu_context { struct
249 struct kvm_cpu_context host_ctxt; argument
283 struct kvm_cpu_context ctxt;
709 static inline void kvm_init_host_cpu_context(struct kvm_cpu_context *cpu_ctxt) in kvm_init_host_cpu_context()
/linux/arch/riscv/kvm/
A Dvcpu_fp.c19 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_vcpu_fp_reset()
29 void kvm_riscv_vcpu_fp_clean(struct kvm_cpu_context *cntx) in kvm_riscv_vcpu_fp_clean()
35 void kvm_riscv_vcpu_guest_fp_save(struct kvm_cpu_context *cntx, in kvm_riscv_vcpu_guest_fp_save()
47 void kvm_riscv_vcpu_guest_fp_restore(struct kvm_cpu_context *cntx, in kvm_riscv_vcpu_guest_fp_restore()
59 void kvm_riscv_vcpu_host_fp_save(struct kvm_cpu_context *cntx) in kvm_riscv_vcpu_host_fp_save()
68 void kvm_riscv_vcpu_host_fp_restore(struct kvm_cpu_context *cntx) in kvm_riscv_vcpu_host_fp_restore()
81 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_vcpu_get_reg_fp()
127 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_vcpu_set_reg_fp()
A Dvcpu_sbi.c22 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_riscv_vcpu_sbi_forward()
41 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_riscv_vcpu_sbi_return()
85 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_riscv_vcpu_sbi_ecall()
A Dvcpu.c54 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_reset_vcpu()
55 struct kvm_cpu_context *reset_cntx = &vcpu->arch.guest_reset_context; in kvm_riscv_reset_vcpu()
76 struct kvm_cpu_context *cntx; in kvm_arch_vcpu_create()
212 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_vcpu_get_reg_core()
245 struct kvm_cpu_context *cntx = &vcpu->arch.guest_context; in kvm_riscv_vcpu_set_reg_core()
A Dvcpu_exit.c165 struct kvm_cpu_context *ct; in virtual_inst_fault()
198 struct kvm_cpu_context *ct = &vcpu->arch.guest_context; in emulate_load()
311 struct kvm_cpu_context *ct = &vcpu->arch.guest_context; in emulate_store()
/linux/arch/arm64/kvm/hyp/include/hyp/
A Dsysreg-sr.h19 static inline void __sysreg_save_common_state(struct kvm_cpu_context *ctxt) in __sysreg_save_common_state()
24 static inline void __sysreg_save_user_state(struct kvm_cpu_context *ctxt) in __sysreg_save_user_state()
30 static inline bool ctxt_has_mte(struct kvm_cpu_context *ctxt) in ctxt_has_mte()
40 static inline void __sysreg_save_el1_state(struct kvm_cpu_context *ctxt) in __sysreg_save_el1_state()
70 static inline void __sysreg_save_el2_return_state(struct kvm_cpu_context *ctxt) in __sysreg_save_el2_return_state()
84 static inline void __sysreg_restore_common_state(struct kvm_cpu_context *ctxt) in __sysreg_restore_common_state()
89 static inline void __sysreg_restore_user_state(struct kvm_cpu_context *ctxt) in __sysreg_restore_user_state()
95 static inline void __sysreg_restore_el1_state(struct kvm_cpu_context *ctxt) in __sysreg_restore_el1_state()
159 static inline void __sysreg_restore_el2_return_state(struct kvm_cpu_context *ctxt) in __sysreg_restore_el2_return_state()
A Ddebug-sr.h92 struct kvm_cpu_context *ctxt) in __debug_save_state()
110 struct kvm_cpu_context *ctxt) in __debug_restore_state()
130 struct kvm_cpu_context *host_ctxt; in __debug_switch_to_guest_common()
131 struct kvm_cpu_context *guest_ctxt; in __debug_switch_to_guest_common()
149 struct kvm_cpu_context *host_ctxt; in __debug_switch_to_host_common()
150 struct kvm_cpu_context *guest_ctxt; in __debug_switch_to_host_common()
A Dswitch.h315 DECLARE_PER_CPU(struct kvm_cpu_context, kvm_hyp_ctxt);
319 struct kvm_cpu_context *ctxt; in kvm_hyp_handle_ptrauth()
/linux/arch/arm64/kernel/
A Dasm-offsets.c116 DEFINE(CPU_USER_PT_REGS, offsetof(struct kvm_cpu_context, regs)); in main()
117 DEFINE(CPU_RGSR_EL1, offsetof(struct kvm_cpu_context, sys_regs[RGSR_EL1])); in main()
118 DEFINE(CPU_GCR_EL1, offsetof(struct kvm_cpu_context, sys_regs[GCR_EL1])); in main()
119 DEFINE(CPU_APIAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APIAKEYLO_EL1])); in main()
120 DEFINE(CPU_APIBKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APIBKEYLO_EL1])); in main()
121 DEFINE(CPU_APDAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APDAKEYLO_EL1])); in main()
122 DEFINE(CPU_APDBKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APDBKEYLO_EL1])); in main()
123 DEFINE(CPU_APGAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APGAKEYLO_EL1])); in main()
124 DEFINE(HOST_CONTEXT_VCPU, offsetof(struct kvm_cpu_context, __hyp_running_vcpu)); in main()
/linux/arch/arm64/kvm/hyp/include/nvhe/
A Dmem_protect.h58 void handle_host_mem_abort(struct kvm_cpu_context *host_ctxt);

Completed in 54 milliseconds