Lines Matching refs:csr

52 	struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr;  in kvm_riscv_reset_vcpu()  local
57 memcpy(csr, reset_csr, sizeof(*csr)); in kvm_riscv_reset_vcpu()
280 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_get_reg_csr() local
295 reg_val = (csr->hvip >> VSIP_TO_HVIP_SHIFT) & VSIP_VALID_MASK; in kvm_riscv_vcpu_get_reg_csr()
297 reg_val = ((unsigned long *)csr)[reg_num]; in kvm_riscv_vcpu_get_reg_csr()
308 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_set_reg_csr() local
329 ((unsigned long *)csr)[reg_num] = reg_val; in kvm_riscv_vcpu_set_reg_csr()
469 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_flush_interrupts() local
476 csr->hvip &= ~mask; in kvm_riscv_vcpu_flush_interrupts()
477 csr->hvip |= val; in kvm_riscv_vcpu_flush_interrupts()
485 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_vcpu_sync_interrupts() local
488 csr->vsie = csr_read(CSR_VSIE); in kvm_riscv_vcpu_sync_interrupts()
492 if ((csr->hvip ^ hvip) & (1UL << IRQ_VS_SOFT)) { in kvm_riscv_vcpu_sync_interrupts()
595 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_arch_vcpu_load() local
597 csr_write(CSR_VSSTATUS, csr->vsstatus); in kvm_arch_vcpu_load()
598 csr_write(CSR_VSIE, csr->vsie); in kvm_arch_vcpu_load()
599 csr_write(CSR_VSTVEC, csr->vstvec); in kvm_arch_vcpu_load()
600 csr_write(CSR_VSSCRATCH, csr->vsscratch); in kvm_arch_vcpu_load()
601 csr_write(CSR_VSEPC, csr->vsepc); in kvm_arch_vcpu_load()
602 csr_write(CSR_VSCAUSE, csr->vscause); in kvm_arch_vcpu_load()
603 csr_write(CSR_VSTVAL, csr->vstval); in kvm_arch_vcpu_load()
604 csr_write(CSR_HVIP, csr->hvip); in kvm_arch_vcpu_load()
605 csr_write(CSR_VSATP, csr->vsatp); in kvm_arch_vcpu_load()
620 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_arch_vcpu_put() local
630 csr->vsstatus = csr_read(CSR_VSSTATUS); in kvm_arch_vcpu_put()
631 csr->vsie = csr_read(CSR_VSIE); in kvm_arch_vcpu_put()
632 csr->vstvec = csr_read(CSR_VSTVEC); in kvm_arch_vcpu_put()
633 csr->vsscratch = csr_read(CSR_VSSCRATCH); in kvm_arch_vcpu_put()
634 csr->vsepc = csr_read(CSR_VSEPC); in kvm_arch_vcpu_put()
635 csr->vscause = csr_read(CSR_VSCAUSE); in kvm_arch_vcpu_put()
636 csr->vstval = csr_read(CSR_VSTVAL); in kvm_arch_vcpu_put()
637 csr->hvip = csr_read(CSR_HVIP); in kvm_arch_vcpu_put()
638 csr->vsatp = csr_read(CSR_VSATP); in kvm_arch_vcpu_put()
673 struct kvm_vcpu_csr *csr = &vcpu->arch.guest_csr; in kvm_riscv_update_hvip() local
675 csr_write(CSR_HVIP, csr->hvip); in kvm_riscv_update_hvip()