Lines Matching refs:vmcs12
54 #define SHADOW_FIELD_RO(x, y) { x, offsetof(struct vmcs12, y) },
61 #define SHADOW_FIELD_RW(x, y) { x, offsetof(struct vmcs12, y) },
362 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_ept_inject_page_fault() local
384 nested_ept_invalidate_addr(vcpu, vmcs12->ept_pointer, in nested_ept_inject_page_fault()
389 vmcs12->guest_physical_address = fault->address; in nested_ept_inject_page_fault()
420 static bool nested_vmx_is_page_fault_vmexit(struct vmcs12 *vmcs12, in nested_vmx_is_page_fault_vmexit() argument
425 bit = (vmcs12->exception_bitmap & (1u << PF_VECTOR)) != 0; in nested_vmx_is_page_fault_vmexit()
427 (error_code & vmcs12->page_fault_error_code_mask) != in nested_vmx_is_page_fault_vmexit()
428 vmcs12->page_fault_error_code_match; in nested_vmx_is_page_fault_vmexit()
439 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_exception() local
449 if (nested_vmx_is_page_fault_vmexit(vmcs12, in nested_vmx_check_exception()
454 } else if (vmcs12->exception_bitmap & (1u << nr)) { in nested_vmx_check_exception()
474 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in vmx_inject_page_fault_nested() local
478 if (nested_vmx_is_page_fault_vmexit(vmcs12, fault->error_code) && in vmx_inject_page_fault_nested()
480 vmcs12->vm_exit_intr_error_code = fault->error_code; in vmx_inject_page_fault_nested()
491 struct vmcs12 *vmcs12) in nested_vmx_check_io_bitmap_controls() argument
493 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_check_io_bitmap_controls()
496 if (CC(!page_address_valid(vcpu, vmcs12->io_bitmap_a)) || in nested_vmx_check_io_bitmap_controls()
497 CC(!page_address_valid(vcpu, vmcs12->io_bitmap_b))) in nested_vmx_check_io_bitmap_controls()
504 struct vmcs12 *vmcs12) in nested_vmx_check_msr_bitmap_controls() argument
506 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_check_msr_bitmap_controls()
509 if (CC(!page_address_valid(vcpu, vmcs12->msr_bitmap))) in nested_vmx_check_msr_bitmap_controls()
516 struct vmcs12 *vmcs12) in nested_vmx_check_tpr_shadow_controls() argument
518 if (!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) in nested_vmx_check_tpr_shadow_controls()
521 if (CC(!page_address_valid(vcpu, vmcs12->virtual_apic_page_addr))) in nested_vmx_check_tpr_shadow_controls()
588 struct vmcs12 *vmcs12) in nested_vmx_prepare_msr_bitmap() argument
598 !nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_prepare_msr_bitmap()
601 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) in nested_vmx_prepare_msr_bitmap()
613 if (nested_cpu_has_virt_x2apic_mode(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
614 if (nested_cpu_has_apic_reg_virt(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
633 if (nested_cpu_has_vid(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
671 struct vmcs12 *vmcs12) in nested_cache_shadow_vmcs12() argument
676 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_cache_shadow_vmcs12()
677 vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_cache_shadow_vmcs12()
680 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_cache_shadow_vmcs12()
682 vmcs12->vmcs_link_pointer, VMCS12_SIZE)) in nested_cache_shadow_vmcs12()
690 struct vmcs12 *vmcs12) in nested_flush_cached_shadow_vmcs12() argument
695 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_flush_cached_shadow_vmcs12()
696 vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_flush_cached_shadow_vmcs12()
699 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_flush_cached_shadow_vmcs12()
701 vmcs12->vmcs_link_pointer, VMCS12_SIZE)) in nested_flush_cached_shadow_vmcs12()
719 struct vmcs12 *vmcs12) in nested_vmx_check_apic_access_controls() argument
721 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES) && in nested_vmx_check_apic_access_controls()
722 CC(!page_address_valid(vcpu, vmcs12->apic_access_addr))) in nested_vmx_check_apic_access_controls()
729 struct vmcs12 *vmcs12) in nested_vmx_check_apicv_controls() argument
731 if (!nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
732 !nested_cpu_has_apic_reg_virt(vmcs12) && in nested_vmx_check_apicv_controls()
733 !nested_cpu_has_vid(vmcs12) && in nested_vmx_check_apicv_controls()
734 !nested_cpu_has_posted_intr(vmcs12)) in nested_vmx_check_apicv_controls()
741 if (CC(nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
742 nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES))) in nested_vmx_check_apicv_controls()
749 if (CC(nested_cpu_has_vid(vmcs12) && !nested_exit_on_intr(vcpu))) in nested_vmx_check_apicv_controls()
759 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_check_apicv_controls()
760 (CC(!nested_cpu_has_vid(vmcs12)) || in nested_vmx_check_apicv_controls()
762 CC((vmcs12->posted_intr_nv & 0xff00)) || in nested_vmx_check_apicv_controls()
763 CC(!kvm_vcpu_is_legal_aligned_gpa(vcpu, vmcs12->posted_intr_desc_addr, 64)))) in nested_vmx_check_apicv_controls()
767 if (CC(!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW))) in nested_vmx_check_apicv_controls()
787 struct vmcs12 *vmcs12) in nested_vmx_check_exit_msr_switch_controls() argument
790 vmcs12->vm_exit_msr_load_count, in nested_vmx_check_exit_msr_switch_controls()
791 vmcs12->vm_exit_msr_load_addr)) || in nested_vmx_check_exit_msr_switch_controls()
793 vmcs12->vm_exit_msr_store_count, in nested_vmx_check_exit_msr_switch_controls()
794 vmcs12->vm_exit_msr_store_addr))) in nested_vmx_check_exit_msr_switch_controls()
801 struct vmcs12 *vmcs12) in nested_vmx_check_entry_msr_switch_controls() argument
804 vmcs12->vm_entry_msr_load_count, in nested_vmx_check_entry_msr_switch_controls()
805 vmcs12->vm_entry_msr_load_addr))) in nested_vmx_check_entry_msr_switch_controls()
812 struct vmcs12 *vmcs12) in nested_vmx_check_pml_controls() argument
814 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_check_pml_controls()
817 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_vmx_check_pml_controls()
818 CC(!page_address_valid(vcpu, vmcs12->pml_address))) in nested_vmx_check_pml_controls()
825 struct vmcs12 *vmcs12) in nested_vmx_check_unrestricted_guest_controls() argument
827 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST) && in nested_vmx_check_unrestricted_guest_controls()
828 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_unrestricted_guest_controls()
834 struct vmcs12 *vmcs12) in nested_vmx_check_mode_based_ept_exec_controls() argument
836 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_MODE_BASED_EPT_EXEC) && in nested_vmx_check_mode_based_ept_exec_controls()
837 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_mode_based_ept_exec_controls()
843 struct vmcs12 *vmcs12) in nested_vmx_check_shadow_vmcs_controls() argument
845 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_check_shadow_vmcs_controls()
848 if (CC(!page_address_valid(vcpu, vmcs12->vmread_bitmap)) || in nested_vmx_check_shadow_vmcs_controls()
849 CC(!page_address_valid(vcpu, vmcs12->vmwrite_bitmap))) in nested_vmx_check_shadow_vmcs_controls()
1026 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_msr_store_list_has_msr() local
1027 u32 count = vmcs12->vm_exit_msr_store_count; in nested_msr_store_list_has_msr()
1028 u64 gpa = vmcs12->vm_exit_msr_store_addr; in nested_msr_store_list_has_msr()
1130 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_has_guest_tlb_tag() local
1133 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1137 struct vmcs12 *vmcs12, in nested_vmx_transition_tlb_flush() argument
1156 if (!nested_cpu_has_vpid(vmcs12)) { in nested_vmx_transition_tlb_flush()
1171 if (is_vmenter && vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in nested_vmx_transition_tlb_flush()
1172 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in nested_vmx_transition_tlb_flush()
1508 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_shadow_to_vmcs12() local
1523 vmcs12_write_any(vmcs12, field.encoding, field.offset, val); in copy_shadow_to_vmcs12()
1543 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_vmcs12_to_shadow() local
1556 val = vmcs12_read_any(vmcs12, field.encoding, in copy_vmcs12_to_shadow()
1568 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12() local
1572 vmcs12->tpr_threshold = evmcs->tpr_threshold; in copy_enlightened_to_vmcs12()
1573 vmcs12->guest_rip = evmcs->guest_rip; in copy_enlightened_to_vmcs12()
1577 vmcs12->guest_rsp = evmcs->guest_rsp; in copy_enlightened_to_vmcs12()
1578 vmcs12->guest_rflags = evmcs->guest_rflags; in copy_enlightened_to_vmcs12()
1579 vmcs12->guest_interruptibility_info = in copy_enlightened_to_vmcs12()
1585 vmcs12->cpu_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1591 vmcs12->exception_bitmap = evmcs->exception_bitmap; in copy_enlightened_to_vmcs12()
1596 vmcs12->vm_entry_controls = evmcs->vm_entry_controls; in copy_enlightened_to_vmcs12()
1601 vmcs12->vm_entry_intr_info_field = in copy_enlightened_to_vmcs12()
1603 vmcs12->vm_entry_exception_error_code = in copy_enlightened_to_vmcs12()
1605 vmcs12->vm_entry_instruction_len = in copy_enlightened_to_vmcs12()
1611 vmcs12->host_ia32_pat = evmcs->host_ia32_pat; in copy_enlightened_to_vmcs12()
1612 vmcs12->host_ia32_efer = evmcs->host_ia32_efer; in copy_enlightened_to_vmcs12()
1613 vmcs12->host_cr0 = evmcs->host_cr0; in copy_enlightened_to_vmcs12()
1614 vmcs12->host_cr3 = evmcs->host_cr3; in copy_enlightened_to_vmcs12()
1615 vmcs12->host_cr4 = evmcs->host_cr4; in copy_enlightened_to_vmcs12()
1616 vmcs12->host_ia32_sysenter_esp = evmcs->host_ia32_sysenter_esp; in copy_enlightened_to_vmcs12()
1617 vmcs12->host_ia32_sysenter_eip = evmcs->host_ia32_sysenter_eip; in copy_enlightened_to_vmcs12()
1618 vmcs12->host_rip = evmcs->host_rip; in copy_enlightened_to_vmcs12()
1619 vmcs12->host_ia32_sysenter_cs = evmcs->host_ia32_sysenter_cs; in copy_enlightened_to_vmcs12()
1620 vmcs12->host_es_selector = evmcs->host_es_selector; in copy_enlightened_to_vmcs12()
1621 vmcs12->host_cs_selector = evmcs->host_cs_selector; in copy_enlightened_to_vmcs12()
1622 vmcs12->host_ss_selector = evmcs->host_ss_selector; in copy_enlightened_to_vmcs12()
1623 vmcs12->host_ds_selector = evmcs->host_ds_selector; in copy_enlightened_to_vmcs12()
1624 vmcs12->host_fs_selector = evmcs->host_fs_selector; in copy_enlightened_to_vmcs12()
1625 vmcs12->host_gs_selector = evmcs->host_gs_selector; in copy_enlightened_to_vmcs12()
1626 vmcs12->host_tr_selector = evmcs->host_tr_selector; in copy_enlightened_to_vmcs12()
1631 vmcs12->pin_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1633 vmcs12->vm_exit_controls = evmcs->vm_exit_controls; in copy_enlightened_to_vmcs12()
1634 vmcs12->secondary_vm_exec_control = in copy_enlightened_to_vmcs12()
1640 vmcs12->io_bitmap_a = evmcs->io_bitmap_a; in copy_enlightened_to_vmcs12()
1641 vmcs12->io_bitmap_b = evmcs->io_bitmap_b; in copy_enlightened_to_vmcs12()
1646 vmcs12->msr_bitmap = evmcs->msr_bitmap; in copy_enlightened_to_vmcs12()
1651 vmcs12->guest_es_base = evmcs->guest_es_base; in copy_enlightened_to_vmcs12()
1652 vmcs12->guest_cs_base = evmcs->guest_cs_base; in copy_enlightened_to_vmcs12()
1653 vmcs12->guest_ss_base = evmcs->guest_ss_base; in copy_enlightened_to_vmcs12()
1654 vmcs12->guest_ds_base = evmcs->guest_ds_base; in copy_enlightened_to_vmcs12()
1655 vmcs12->guest_fs_base = evmcs->guest_fs_base; in copy_enlightened_to_vmcs12()
1656 vmcs12->guest_gs_base = evmcs->guest_gs_base; in copy_enlightened_to_vmcs12()
1657 vmcs12->guest_ldtr_base = evmcs->guest_ldtr_base; in copy_enlightened_to_vmcs12()
1658 vmcs12->guest_tr_base = evmcs->guest_tr_base; in copy_enlightened_to_vmcs12()
1659 vmcs12->guest_gdtr_base = evmcs->guest_gdtr_base; in copy_enlightened_to_vmcs12()
1660 vmcs12->guest_idtr_base = evmcs->guest_idtr_base; in copy_enlightened_to_vmcs12()
1661 vmcs12->guest_es_limit = evmcs->guest_es_limit; in copy_enlightened_to_vmcs12()
1662 vmcs12->guest_cs_limit = evmcs->guest_cs_limit; in copy_enlightened_to_vmcs12()
1663 vmcs12->guest_ss_limit = evmcs->guest_ss_limit; in copy_enlightened_to_vmcs12()
1664 vmcs12->guest_ds_limit = evmcs->guest_ds_limit; in copy_enlightened_to_vmcs12()
1665 vmcs12->guest_fs_limit = evmcs->guest_fs_limit; in copy_enlightened_to_vmcs12()
1666 vmcs12->guest_gs_limit = evmcs->guest_gs_limit; in copy_enlightened_to_vmcs12()
1667 vmcs12->guest_ldtr_limit = evmcs->guest_ldtr_limit; in copy_enlightened_to_vmcs12()
1668 vmcs12->guest_tr_limit = evmcs->guest_tr_limit; in copy_enlightened_to_vmcs12()
1669 vmcs12->guest_gdtr_limit = evmcs->guest_gdtr_limit; in copy_enlightened_to_vmcs12()
1670 vmcs12->guest_idtr_limit = evmcs->guest_idtr_limit; in copy_enlightened_to_vmcs12()
1671 vmcs12->guest_es_ar_bytes = evmcs->guest_es_ar_bytes; in copy_enlightened_to_vmcs12()
1672 vmcs12->guest_cs_ar_bytes = evmcs->guest_cs_ar_bytes; in copy_enlightened_to_vmcs12()
1673 vmcs12->guest_ss_ar_bytes = evmcs->guest_ss_ar_bytes; in copy_enlightened_to_vmcs12()
1674 vmcs12->guest_ds_ar_bytes = evmcs->guest_ds_ar_bytes; in copy_enlightened_to_vmcs12()
1675 vmcs12->guest_fs_ar_bytes = evmcs->guest_fs_ar_bytes; in copy_enlightened_to_vmcs12()
1676 vmcs12->guest_gs_ar_bytes = evmcs->guest_gs_ar_bytes; in copy_enlightened_to_vmcs12()
1677 vmcs12->guest_ldtr_ar_bytes = evmcs->guest_ldtr_ar_bytes; in copy_enlightened_to_vmcs12()
1678 vmcs12->guest_tr_ar_bytes = evmcs->guest_tr_ar_bytes; in copy_enlightened_to_vmcs12()
1679 vmcs12->guest_es_selector = evmcs->guest_es_selector; in copy_enlightened_to_vmcs12()
1680 vmcs12->guest_cs_selector = evmcs->guest_cs_selector; in copy_enlightened_to_vmcs12()
1681 vmcs12->guest_ss_selector = evmcs->guest_ss_selector; in copy_enlightened_to_vmcs12()
1682 vmcs12->guest_ds_selector = evmcs->guest_ds_selector; in copy_enlightened_to_vmcs12()
1683 vmcs12->guest_fs_selector = evmcs->guest_fs_selector; in copy_enlightened_to_vmcs12()
1684 vmcs12->guest_gs_selector = evmcs->guest_gs_selector; in copy_enlightened_to_vmcs12()
1685 vmcs12->guest_ldtr_selector = evmcs->guest_ldtr_selector; in copy_enlightened_to_vmcs12()
1686 vmcs12->guest_tr_selector = evmcs->guest_tr_selector; in copy_enlightened_to_vmcs12()
1691 vmcs12->tsc_offset = evmcs->tsc_offset; in copy_enlightened_to_vmcs12()
1692 vmcs12->virtual_apic_page_addr = evmcs->virtual_apic_page_addr; in copy_enlightened_to_vmcs12()
1693 vmcs12->xss_exit_bitmap = evmcs->xss_exit_bitmap; in copy_enlightened_to_vmcs12()
1698 vmcs12->cr0_guest_host_mask = evmcs->cr0_guest_host_mask; in copy_enlightened_to_vmcs12()
1699 vmcs12->cr4_guest_host_mask = evmcs->cr4_guest_host_mask; in copy_enlightened_to_vmcs12()
1700 vmcs12->cr0_read_shadow = evmcs->cr0_read_shadow; in copy_enlightened_to_vmcs12()
1701 vmcs12->cr4_read_shadow = evmcs->cr4_read_shadow; in copy_enlightened_to_vmcs12()
1702 vmcs12->guest_cr0 = evmcs->guest_cr0; in copy_enlightened_to_vmcs12()
1703 vmcs12->guest_cr3 = evmcs->guest_cr3; in copy_enlightened_to_vmcs12()
1704 vmcs12->guest_cr4 = evmcs->guest_cr4; in copy_enlightened_to_vmcs12()
1705 vmcs12->guest_dr7 = evmcs->guest_dr7; in copy_enlightened_to_vmcs12()
1710 vmcs12->host_fs_base = evmcs->host_fs_base; in copy_enlightened_to_vmcs12()
1711 vmcs12->host_gs_base = evmcs->host_gs_base; in copy_enlightened_to_vmcs12()
1712 vmcs12->host_tr_base = evmcs->host_tr_base; in copy_enlightened_to_vmcs12()
1713 vmcs12->host_gdtr_base = evmcs->host_gdtr_base; in copy_enlightened_to_vmcs12()
1714 vmcs12->host_idtr_base = evmcs->host_idtr_base; in copy_enlightened_to_vmcs12()
1715 vmcs12->host_rsp = evmcs->host_rsp; in copy_enlightened_to_vmcs12()
1720 vmcs12->ept_pointer = evmcs->ept_pointer; in copy_enlightened_to_vmcs12()
1721 vmcs12->virtual_processor_id = evmcs->virtual_processor_id; in copy_enlightened_to_vmcs12()
1726 vmcs12->vmcs_link_pointer = evmcs->vmcs_link_pointer; in copy_enlightened_to_vmcs12()
1727 vmcs12->guest_ia32_debugctl = evmcs->guest_ia32_debugctl; in copy_enlightened_to_vmcs12()
1728 vmcs12->guest_ia32_pat = evmcs->guest_ia32_pat; in copy_enlightened_to_vmcs12()
1729 vmcs12->guest_ia32_efer = evmcs->guest_ia32_efer; in copy_enlightened_to_vmcs12()
1730 vmcs12->guest_pdptr0 = evmcs->guest_pdptr0; in copy_enlightened_to_vmcs12()
1731 vmcs12->guest_pdptr1 = evmcs->guest_pdptr1; in copy_enlightened_to_vmcs12()
1732 vmcs12->guest_pdptr2 = evmcs->guest_pdptr2; in copy_enlightened_to_vmcs12()
1733 vmcs12->guest_pdptr3 = evmcs->guest_pdptr3; in copy_enlightened_to_vmcs12()
1734 vmcs12->guest_pending_dbg_exceptions = in copy_enlightened_to_vmcs12()
1736 vmcs12->guest_sysenter_esp = evmcs->guest_sysenter_esp; in copy_enlightened_to_vmcs12()
1737 vmcs12->guest_sysenter_eip = evmcs->guest_sysenter_eip; in copy_enlightened_to_vmcs12()
1738 vmcs12->guest_bndcfgs = evmcs->guest_bndcfgs; in copy_enlightened_to_vmcs12()
1739 vmcs12->guest_activity_state = evmcs->guest_activity_state; in copy_enlightened_to_vmcs12()
1740 vmcs12->guest_sysenter_cs = evmcs->guest_sysenter_cs; in copy_enlightened_to_vmcs12()
1784 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened() local
1851 evmcs->guest_es_selector = vmcs12->guest_es_selector; in copy_vmcs12_to_enlightened()
1852 evmcs->guest_cs_selector = vmcs12->guest_cs_selector; in copy_vmcs12_to_enlightened()
1853 evmcs->guest_ss_selector = vmcs12->guest_ss_selector; in copy_vmcs12_to_enlightened()
1854 evmcs->guest_ds_selector = vmcs12->guest_ds_selector; in copy_vmcs12_to_enlightened()
1855 evmcs->guest_fs_selector = vmcs12->guest_fs_selector; in copy_vmcs12_to_enlightened()
1856 evmcs->guest_gs_selector = vmcs12->guest_gs_selector; in copy_vmcs12_to_enlightened()
1857 evmcs->guest_ldtr_selector = vmcs12->guest_ldtr_selector; in copy_vmcs12_to_enlightened()
1858 evmcs->guest_tr_selector = vmcs12->guest_tr_selector; in copy_vmcs12_to_enlightened()
1860 evmcs->guest_es_limit = vmcs12->guest_es_limit; in copy_vmcs12_to_enlightened()
1861 evmcs->guest_cs_limit = vmcs12->guest_cs_limit; in copy_vmcs12_to_enlightened()
1862 evmcs->guest_ss_limit = vmcs12->guest_ss_limit; in copy_vmcs12_to_enlightened()
1863 evmcs->guest_ds_limit = vmcs12->guest_ds_limit; in copy_vmcs12_to_enlightened()
1864 evmcs->guest_fs_limit = vmcs12->guest_fs_limit; in copy_vmcs12_to_enlightened()
1865 evmcs->guest_gs_limit = vmcs12->guest_gs_limit; in copy_vmcs12_to_enlightened()
1866 evmcs->guest_ldtr_limit = vmcs12->guest_ldtr_limit; in copy_vmcs12_to_enlightened()
1867 evmcs->guest_tr_limit = vmcs12->guest_tr_limit; in copy_vmcs12_to_enlightened()
1868 evmcs->guest_gdtr_limit = vmcs12->guest_gdtr_limit; in copy_vmcs12_to_enlightened()
1869 evmcs->guest_idtr_limit = vmcs12->guest_idtr_limit; in copy_vmcs12_to_enlightened()
1871 evmcs->guest_es_ar_bytes = vmcs12->guest_es_ar_bytes; in copy_vmcs12_to_enlightened()
1872 evmcs->guest_cs_ar_bytes = vmcs12->guest_cs_ar_bytes; in copy_vmcs12_to_enlightened()
1873 evmcs->guest_ss_ar_bytes = vmcs12->guest_ss_ar_bytes; in copy_vmcs12_to_enlightened()
1874 evmcs->guest_ds_ar_bytes = vmcs12->guest_ds_ar_bytes; in copy_vmcs12_to_enlightened()
1875 evmcs->guest_fs_ar_bytes = vmcs12->guest_fs_ar_bytes; in copy_vmcs12_to_enlightened()
1876 evmcs->guest_gs_ar_bytes = vmcs12->guest_gs_ar_bytes; in copy_vmcs12_to_enlightened()
1877 evmcs->guest_ldtr_ar_bytes = vmcs12->guest_ldtr_ar_bytes; in copy_vmcs12_to_enlightened()
1878 evmcs->guest_tr_ar_bytes = vmcs12->guest_tr_ar_bytes; in copy_vmcs12_to_enlightened()
1880 evmcs->guest_es_base = vmcs12->guest_es_base; in copy_vmcs12_to_enlightened()
1881 evmcs->guest_cs_base = vmcs12->guest_cs_base; in copy_vmcs12_to_enlightened()
1882 evmcs->guest_ss_base = vmcs12->guest_ss_base; in copy_vmcs12_to_enlightened()
1883 evmcs->guest_ds_base = vmcs12->guest_ds_base; in copy_vmcs12_to_enlightened()
1884 evmcs->guest_fs_base = vmcs12->guest_fs_base; in copy_vmcs12_to_enlightened()
1885 evmcs->guest_gs_base = vmcs12->guest_gs_base; in copy_vmcs12_to_enlightened()
1886 evmcs->guest_ldtr_base = vmcs12->guest_ldtr_base; in copy_vmcs12_to_enlightened()
1887 evmcs->guest_tr_base = vmcs12->guest_tr_base; in copy_vmcs12_to_enlightened()
1888 evmcs->guest_gdtr_base = vmcs12->guest_gdtr_base; in copy_vmcs12_to_enlightened()
1889 evmcs->guest_idtr_base = vmcs12->guest_idtr_base; in copy_vmcs12_to_enlightened()
1891 evmcs->guest_ia32_pat = vmcs12->guest_ia32_pat; in copy_vmcs12_to_enlightened()
1892 evmcs->guest_ia32_efer = vmcs12->guest_ia32_efer; in copy_vmcs12_to_enlightened()
1894 evmcs->guest_pdptr0 = vmcs12->guest_pdptr0; in copy_vmcs12_to_enlightened()
1895 evmcs->guest_pdptr1 = vmcs12->guest_pdptr1; in copy_vmcs12_to_enlightened()
1896 evmcs->guest_pdptr2 = vmcs12->guest_pdptr2; in copy_vmcs12_to_enlightened()
1897 evmcs->guest_pdptr3 = vmcs12->guest_pdptr3; in copy_vmcs12_to_enlightened()
1900 vmcs12->guest_pending_dbg_exceptions; in copy_vmcs12_to_enlightened()
1901 evmcs->guest_sysenter_esp = vmcs12->guest_sysenter_esp; in copy_vmcs12_to_enlightened()
1902 evmcs->guest_sysenter_eip = vmcs12->guest_sysenter_eip; in copy_vmcs12_to_enlightened()
1904 evmcs->guest_activity_state = vmcs12->guest_activity_state; in copy_vmcs12_to_enlightened()
1905 evmcs->guest_sysenter_cs = vmcs12->guest_sysenter_cs; in copy_vmcs12_to_enlightened()
1907 evmcs->guest_cr0 = vmcs12->guest_cr0; in copy_vmcs12_to_enlightened()
1908 evmcs->guest_cr3 = vmcs12->guest_cr3; in copy_vmcs12_to_enlightened()
1909 evmcs->guest_cr4 = vmcs12->guest_cr4; in copy_vmcs12_to_enlightened()
1910 evmcs->guest_dr7 = vmcs12->guest_dr7; in copy_vmcs12_to_enlightened()
1912 evmcs->guest_physical_address = vmcs12->guest_physical_address; in copy_vmcs12_to_enlightened()
1914 evmcs->vm_instruction_error = vmcs12->vm_instruction_error; in copy_vmcs12_to_enlightened()
1915 evmcs->vm_exit_reason = vmcs12->vm_exit_reason; in copy_vmcs12_to_enlightened()
1916 evmcs->vm_exit_intr_info = vmcs12->vm_exit_intr_info; in copy_vmcs12_to_enlightened()
1917 evmcs->vm_exit_intr_error_code = vmcs12->vm_exit_intr_error_code; in copy_vmcs12_to_enlightened()
1918 evmcs->idt_vectoring_info_field = vmcs12->idt_vectoring_info_field; in copy_vmcs12_to_enlightened()
1919 evmcs->idt_vectoring_error_code = vmcs12->idt_vectoring_error_code; in copy_vmcs12_to_enlightened()
1920 evmcs->vm_exit_instruction_len = vmcs12->vm_exit_instruction_len; in copy_vmcs12_to_enlightened()
1921 evmcs->vmx_instruction_info = vmcs12->vmx_instruction_info; in copy_vmcs12_to_enlightened()
1923 evmcs->exit_qualification = vmcs12->exit_qualification; in copy_vmcs12_to_enlightened()
1925 evmcs->guest_linear_address = vmcs12->guest_linear_address; in copy_vmcs12_to_enlightened()
1926 evmcs->guest_rsp = vmcs12->guest_rsp; in copy_vmcs12_to_enlightened()
1927 evmcs->guest_rflags = vmcs12->guest_rflags; in copy_vmcs12_to_enlightened()
1930 vmcs12->guest_interruptibility_info; in copy_vmcs12_to_enlightened()
1931 evmcs->cpu_based_vm_exec_control = vmcs12->cpu_based_vm_exec_control; in copy_vmcs12_to_enlightened()
1932 evmcs->vm_entry_controls = vmcs12->vm_entry_controls; in copy_vmcs12_to_enlightened()
1933 evmcs->vm_entry_intr_info_field = vmcs12->vm_entry_intr_info_field; in copy_vmcs12_to_enlightened()
1935 vmcs12->vm_entry_exception_error_code; in copy_vmcs12_to_enlightened()
1936 evmcs->vm_entry_instruction_len = vmcs12->vm_entry_instruction_len; in copy_vmcs12_to_enlightened()
1938 evmcs->guest_rip = vmcs12->guest_rip; in copy_vmcs12_to_enlightened()
1940 evmcs->guest_bndcfgs = vmcs12->guest_bndcfgs; in copy_vmcs12_to_enlightened()
2013 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_handle_enlightened_vmptrld() local
2014 memset(vmcs12, 0, sizeof(*vmcs12)); in nested_vmx_handle_enlightened_vmptrld()
2015 vmcs12->hdr.revision_id = VMCS12_REVISION; in nested_vmx_handle_enlightened_vmptrld()
2058 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in vmx_calc_preemption_timer_value() local
2065 vmcs12->vmx_preemption_timer_value + l1_scaled_tsc; in vmx_calc_preemption_timer_value()
2096 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer() argument
2099 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) in nested_vmx_calc_efer()
2100 return vmcs12->guest_ia32_efer; in nested_vmx_calc_efer()
2101 else if (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) in nested_vmx_calc_efer()
2164 struct vmcs12 *vmcs12) in prepare_vmcs02_early_rare() argument
2171 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2179 struct vmcs12 *vmcs12) in prepare_vmcs02_early() argument
2182 u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02_early()
2185 prepare_vmcs02_early_rare(vmx, vmcs12); in prepare_vmcs02_early()
2191 exec_control |= (vmcs12->pin_based_vm_exec_control & in prepare_vmcs02_early()
2196 if (nested_cpu_has_posted_intr(vmcs12)) in prepare_vmcs02_early()
2197 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2209 exec_control |= vmcs12->cpu_based_vm_exec_control; in prepare_vmcs02_early()
2213 vmcs_write32(TPR_THRESHOLD, vmcs12->tpr_threshold); in prepare_vmcs02_early()
2257 if (nested_cpu_has(vmcs12, in prepare_vmcs02_early()
2259 exec_control |= vmcs12->secondary_vm_exec_control; in prepare_vmcs02_early()
2272 (vmcs12->guest_cr4 & X86_CR4_UMIP)) in prepare_vmcs02_early()
2277 vmcs12->guest_intr_status); in prepare_vmcs02_early()
2279 if (!nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST)) in prepare_vmcs02_early()
2283 vmx_write_encls_bitmap(&vmx->vcpu, vmcs12); in prepare_vmcs02_early()
2297 exec_control |= vmcs12->vm_entry_controls; in prepare_vmcs02_early()
2326 vmcs12->vm_entry_intr_info_field); in prepare_vmcs02_early()
2328 vmcs12->vm_entry_exception_error_code); in prepare_vmcs02_early()
2330 vmcs12->vm_entry_instruction_len); in prepare_vmcs02_early()
2332 vmcs12->guest_interruptibility_info); in prepare_vmcs02_early()
2334 !(vmcs12->guest_interruptibility_info & GUEST_INTR_STATE_NMI); in prepare_vmcs02_early()
2340 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare() argument
2346 vmcs_write16(GUEST_ES_SELECTOR, vmcs12->guest_es_selector); in prepare_vmcs02_rare()
2347 vmcs_write16(GUEST_CS_SELECTOR, vmcs12->guest_cs_selector); in prepare_vmcs02_rare()
2348 vmcs_write16(GUEST_SS_SELECTOR, vmcs12->guest_ss_selector); in prepare_vmcs02_rare()
2349 vmcs_write16(GUEST_DS_SELECTOR, vmcs12->guest_ds_selector); in prepare_vmcs02_rare()
2350 vmcs_write16(GUEST_FS_SELECTOR, vmcs12->guest_fs_selector); in prepare_vmcs02_rare()
2351 vmcs_write16(GUEST_GS_SELECTOR, vmcs12->guest_gs_selector); in prepare_vmcs02_rare()
2352 vmcs_write16(GUEST_LDTR_SELECTOR, vmcs12->guest_ldtr_selector); in prepare_vmcs02_rare()
2353 vmcs_write16(GUEST_TR_SELECTOR, vmcs12->guest_tr_selector); in prepare_vmcs02_rare()
2354 vmcs_write32(GUEST_ES_LIMIT, vmcs12->guest_es_limit); in prepare_vmcs02_rare()
2355 vmcs_write32(GUEST_CS_LIMIT, vmcs12->guest_cs_limit); in prepare_vmcs02_rare()
2356 vmcs_write32(GUEST_SS_LIMIT, vmcs12->guest_ss_limit); in prepare_vmcs02_rare()
2357 vmcs_write32(GUEST_DS_LIMIT, vmcs12->guest_ds_limit); in prepare_vmcs02_rare()
2358 vmcs_write32(GUEST_FS_LIMIT, vmcs12->guest_fs_limit); in prepare_vmcs02_rare()
2359 vmcs_write32(GUEST_GS_LIMIT, vmcs12->guest_gs_limit); in prepare_vmcs02_rare()
2360 vmcs_write32(GUEST_LDTR_LIMIT, vmcs12->guest_ldtr_limit); in prepare_vmcs02_rare()
2361 vmcs_write32(GUEST_TR_LIMIT, vmcs12->guest_tr_limit); in prepare_vmcs02_rare()
2362 vmcs_write32(GUEST_GDTR_LIMIT, vmcs12->guest_gdtr_limit); in prepare_vmcs02_rare()
2363 vmcs_write32(GUEST_IDTR_LIMIT, vmcs12->guest_idtr_limit); in prepare_vmcs02_rare()
2364 vmcs_write32(GUEST_CS_AR_BYTES, vmcs12->guest_cs_ar_bytes); in prepare_vmcs02_rare()
2365 vmcs_write32(GUEST_SS_AR_BYTES, vmcs12->guest_ss_ar_bytes); in prepare_vmcs02_rare()
2366 vmcs_write32(GUEST_ES_AR_BYTES, vmcs12->guest_es_ar_bytes); in prepare_vmcs02_rare()
2367 vmcs_write32(GUEST_DS_AR_BYTES, vmcs12->guest_ds_ar_bytes); in prepare_vmcs02_rare()
2368 vmcs_write32(GUEST_FS_AR_BYTES, vmcs12->guest_fs_ar_bytes); in prepare_vmcs02_rare()
2369 vmcs_write32(GUEST_GS_AR_BYTES, vmcs12->guest_gs_ar_bytes); in prepare_vmcs02_rare()
2370 vmcs_write32(GUEST_LDTR_AR_BYTES, vmcs12->guest_ldtr_ar_bytes); in prepare_vmcs02_rare()
2371 vmcs_write32(GUEST_TR_AR_BYTES, vmcs12->guest_tr_ar_bytes); in prepare_vmcs02_rare()
2372 vmcs_writel(GUEST_ES_BASE, vmcs12->guest_es_base); in prepare_vmcs02_rare()
2373 vmcs_writel(GUEST_CS_BASE, vmcs12->guest_cs_base); in prepare_vmcs02_rare()
2374 vmcs_writel(GUEST_SS_BASE, vmcs12->guest_ss_base); in prepare_vmcs02_rare()
2375 vmcs_writel(GUEST_DS_BASE, vmcs12->guest_ds_base); in prepare_vmcs02_rare()
2376 vmcs_writel(GUEST_FS_BASE, vmcs12->guest_fs_base); in prepare_vmcs02_rare()
2377 vmcs_writel(GUEST_GS_BASE, vmcs12->guest_gs_base); in prepare_vmcs02_rare()
2378 vmcs_writel(GUEST_LDTR_BASE, vmcs12->guest_ldtr_base); in prepare_vmcs02_rare()
2379 vmcs_writel(GUEST_TR_BASE, vmcs12->guest_tr_base); in prepare_vmcs02_rare()
2380 vmcs_writel(GUEST_GDTR_BASE, vmcs12->guest_gdtr_base); in prepare_vmcs02_rare()
2381 vmcs_writel(GUEST_IDTR_BASE, vmcs12->guest_idtr_base); in prepare_vmcs02_rare()
2388 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->guest_sysenter_cs); in prepare_vmcs02_rare()
2390 vmcs12->guest_pending_dbg_exceptions); in prepare_vmcs02_rare()
2391 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->guest_sysenter_esp); in prepare_vmcs02_rare()
2392 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->guest_sysenter_eip); in prepare_vmcs02_rare()
2399 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02_rare()
2400 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02_rare()
2401 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02_rare()
2402 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02_rare()
2406 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in prepare_vmcs02_rare()
2407 vmcs_write64(GUEST_BNDCFGS, vmcs12->guest_bndcfgs); in prepare_vmcs02_rare()
2410 if (nested_cpu_has_xsaves(vmcs12)) in prepare_vmcs02_rare()
2411 vmcs_write64(XSS_EXIT_BITMAP, vmcs12->xss_exit_bitmap); in prepare_vmcs02_rare()
2434 vmcs_write32(PAGE_FAULT_ERROR_CODE_MASK, vmcs12->page_fault_error_code_mask); in prepare_vmcs02_rare()
2435 vmcs_write32(PAGE_FAULT_ERROR_CODE_MATCH, vmcs12->page_fault_error_code_match); in prepare_vmcs02_rare()
2439 vmcs_write64(EOI_EXIT_BITMAP0, vmcs12->eoi_exit_bitmap0); in prepare_vmcs02_rare()
2440 vmcs_write64(EOI_EXIT_BITMAP1, vmcs12->eoi_exit_bitmap1); in prepare_vmcs02_rare()
2441 vmcs_write64(EOI_EXIT_BITMAP2, vmcs12->eoi_exit_bitmap2); in prepare_vmcs02_rare()
2442 vmcs_write64(EOI_EXIT_BITMAP3, vmcs12->eoi_exit_bitmap3); in prepare_vmcs02_rare()
2469 static int prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs02() argument
2477 prepare_vmcs02_rare(vmx, vmcs12); in prepare_vmcs02()
2486 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) { in prepare_vmcs02()
2487 kvm_set_dr(vcpu, 7, vmcs12->guest_dr7); in prepare_vmcs02()
2488 vmcs_write64(GUEST_IA32_DEBUGCTL, vmcs12->guest_ia32_debugctl); in prepare_vmcs02()
2494 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in prepare_vmcs02()
2496 vmx_set_rflags(vcpu, vmcs12->guest_rflags); in prepare_vmcs02()
2503 vcpu->arch.cr0_guest_owned_bits &= ~vmcs12->cr0_guest_host_mask; in prepare_vmcs02()
2507 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT)) { in prepare_vmcs02()
2508 vmcs_write64(GUEST_IA32_PAT, vmcs12->guest_ia32_pat); in prepare_vmcs02()
2509 vcpu->arch.pat = vmcs12->guest_ia32_pat; in prepare_vmcs02()
2527 nested_vmx_transition_tlb_flush(vcpu, vmcs12, true); in prepare_vmcs02()
2529 if (nested_cpu_has_ept(vmcs12)) in prepare_vmcs02()
2540 vmx_set_cr0(vcpu, vmcs12->guest_cr0); in prepare_vmcs02()
2541 vmcs_writel(CR0_READ_SHADOW, nested_read_cr0(vmcs12)); in prepare_vmcs02()
2543 vmx_set_cr4(vcpu, vmcs12->guest_cr4); in prepare_vmcs02()
2544 vmcs_writel(CR4_READ_SHADOW, nested_read_cr4(vmcs12)); in prepare_vmcs02()
2546 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2566 if (nested_vmx_load_cr3(vcpu, vmcs12->guest_cr3, nested_cpu_has_ept(vmcs12), in prepare_vmcs02()
2578 vmcs_writel(GUEST_CR3, vmcs12->guest_cr3); in prepare_vmcs02()
2581 if (load_guest_pdptrs_vmcs12 && nested_cpu_has_ept(vmcs12) && in prepare_vmcs02()
2583 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02()
2584 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02()
2585 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02()
2586 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02()
2592 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PERF_GLOBAL_CTRL) && in prepare_vmcs02()
2594 vmcs12->guest_ia32_perf_global_ctrl))) { in prepare_vmcs02()
2599 kvm_rsp_write(vcpu, vmcs12->guest_rsp); in prepare_vmcs02()
2600 kvm_rip_write(vcpu, vmcs12->guest_rip); in prepare_vmcs02()
2615 static int nested_vmx_check_nmi_controls(struct vmcs12 *vmcs12) in nested_vmx_check_nmi_controls() argument
2617 if (CC(!nested_cpu_has_nmi_exiting(vmcs12) && in nested_vmx_check_nmi_controls()
2618 nested_cpu_has_virtual_nmis(vmcs12))) in nested_vmx_check_nmi_controls()
2621 if (CC(!nested_cpu_has_virtual_nmis(vmcs12) && in nested_vmx_check_nmi_controls()
2622 nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING))) in nested_vmx_check_nmi_controls()
2677 struct vmcs12 *vmcs12) in nested_check_vm_execution_controls() argument
2681 if (CC(!vmx_control_verify(vmcs12->pin_based_vm_exec_control, in nested_check_vm_execution_controls()
2684 CC(!vmx_control_verify(vmcs12->cpu_based_vm_exec_control, in nested_check_vm_execution_controls()
2689 if (nested_cpu_has(vmcs12, CPU_BASED_ACTIVATE_SECONDARY_CONTROLS) && in nested_check_vm_execution_controls()
2690 CC(!vmx_control_verify(vmcs12->secondary_vm_exec_control, in nested_check_vm_execution_controls()
2695 if (CC(vmcs12->cr3_target_count > nested_cpu_vmx_misc_cr3_count(vcpu)) || in nested_check_vm_execution_controls()
2696 nested_vmx_check_io_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2697 nested_vmx_check_msr_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2698 nested_vmx_check_tpr_shadow_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2699 nested_vmx_check_apic_access_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2700 nested_vmx_check_apicv_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2701 nested_vmx_check_nmi_controls(vmcs12) || in nested_check_vm_execution_controls()
2702 nested_vmx_check_pml_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2703 nested_vmx_check_unrestricted_guest_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2704 nested_vmx_check_mode_based_ept_exec_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2705 nested_vmx_check_shadow_vmcs_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2706 CC(nested_cpu_has_vpid(vmcs12) && !vmcs12->virtual_processor_id)) in nested_check_vm_execution_controls()
2709 if (!nested_cpu_has_preemption_timer(vmcs12) && in nested_check_vm_execution_controls()
2710 nested_cpu_has_save_preemption_timer(vmcs12)) in nested_check_vm_execution_controls()
2713 if (nested_cpu_has_ept(vmcs12) && in nested_check_vm_execution_controls()
2714 CC(!nested_vmx_check_eptp(vcpu, vmcs12->ept_pointer))) in nested_check_vm_execution_controls()
2717 if (nested_cpu_has_vmfunc(vmcs12)) { in nested_check_vm_execution_controls()
2718 if (CC(vmcs12->vm_function_control & in nested_check_vm_execution_controls()
2722 if (nested_cpu_has_eptp_switching(vmcs12)) { in nested_check_vm_execution_controls()
2723 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_check_vm_execution_controls()
2724 CC(!page_address_valid(vcpu, vmcs12->eptp_list_address))) in nested_check_vm_execution_controls()
2736 struct vmcs12 *vmcs12) in nested_check_vm_exit_controls() argument
2740 if (CC(!vmx_control_verify(vmcs12->vm_exit_controls, in nested_check_vm_exit_controls()
2743 CC(nested_vmx_check_exit_msr_switch_controls(vcpu, vmcs12))) in nested_check_vm_exit_controls()
2753 struct vmcs12 *vmcs12) in nested_check_vm_entry_controls() argument
2757 if (CC(!vmx_control_verify(vmcs12->vm_entry_controls, in nested_check_vm_entry_controls()
2768 if (vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) { in nested_check_vm_entry_controls()
2769 u32 intr_info = vmcs12->vm_entry_intr_info_field; in nested_check_vm_entry_controls()
2774 bool urg = nested_cpu_has2(vmcs12, in nested_check_vm_entry_controls()
2776 bool prot_mode = !urg || vmcs12->guest_cr0 & X86_CR0_PE; in nested_check_vm_entry_controls()
2799 vmcs12->vm_entry_exception_error_code & GENMASK(31, 16))) in nested_check_vm_entry_controls()
2811 if (CC(vmcs12->vm_entry_instruction_len > 15) || in nested_check_vm_entry_controls()
2812 CC(vmcs12->vm_entry_instruction_len == 0 && in nested_check_vm_entry_controls()
2818 if (nested_vmx_check_entry_msr_switch_controls(vcpu, vmcs12)) in nested_check_vm_entry_controls()
2825 struct vmcs12 *vmcs12) in nested_vmx_check_controls() argument
2827 if (nested_check_vm_execution_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2828 nested_check_vm_exit_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2829 nested_check_vm_entry_controls(vcpu, vmcs12)) in nested_vmx_check_controls()
2833 return nested_evmcs_check_controls(vmcs12); in nested_vmx_check_controls()
2839 struct vmcs12 *vmcs12) in nested_vmx_check_address_space_size() argument
2842 if (CC(!!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) != in nested_vmx_check_address_space_size()
2850 struct vmcs12 *vmcs12) in nested_vmx_check_host_state() argument
2854 if (CC(!nested_host_cr0_valid(vcpu, vmcs12->host_cr0)) || in nested_vmx_check_host_state()
2855 CC(!nested_host_cr4_valid(vcpu, vmcs12->host_cr4)) || in nested_vmx_check_host_state()
2856 CC(kvm_vcpu_is_illegal_gpa(vcpu, vmcs12->host_cr3))) in nested_vmx_check_host_state()
2859 if (CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_esp, vcpu)) || in nested_vmx_check_host_state()
2860 CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_eip, vcpu))) in nested_vmx_check_host_state()
2863 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) && in nested_vmx_check_host_state()
2864 CC(!kvm_pat_valid(vmcs12->host_ia32_pat))) in nested_vmx_check_host_state()
2867 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) && in nested_vmx_check_host_state()
2869 vmcs12->host_ia32_perf_global_ctrl))) in nested_vmx_check_host_state()
2873 ia32e = !!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE); in nested_vmx_check_host_state()
2879 if (CC(!(vmcs12->host_cr4 & X86_CR4_PAE))) in nested_vmx_check_host_state()
2882 if (CC(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) || in nested_vmx_check_host_state()
2883 CC(vmcs12->host_cr4 & X86_CR4_PCIDE) || in nested_vmx_check_host_state()
2884 CC((vmcs12->host_rip) >> 32)) in nested_vmx_check_host_state()
2888 if (CC(vmcs12->host_cs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2889 CC(vmcs12->host_ss_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2890 CC(vmcs12->host_ds_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2891 CC(vmcs12->host_es_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2892 CC(vmcs12->host_fs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2893 CC(vmcs12->host_gs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2894 CC(vmcs12->host_tr_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2895 CC(vmcs12->host_cs_selector == 0) || in nested_vmx_check_host_state()
2896 CC(vmcs12->host_tr_selector == 0) || in nested_vmx_check_host_state()
2897 CC(vmcs12->host_ss_selector == 0 && !ia32e)) in nested_vmx_check_host_state()
2900 if (CC(is_noncanonical_address(vmcs12->host_fs_base, vcpu)) || in nested_vmx_check_host_state()
2901 CC(is_noncanonical_address(vmcs12->host_gs_base, vcpu)) || in nested_vmx_check_host_state()
2902 CC(is_noncanonical_address(vmcs12->host_gdtr_base, vcpu)) || in nested_vmx_check_host_state()
2903 CC(is_noncanonical_address(vmcs12->host_idtr_base, vcpu)) || in nested_vmx_check_host_state()
2904 CC(is_noncanonical_address(vmcs12->host_tr_base, vcpu)) || in nested_vmx_check_host_state()
2905 CC(is_noncanonical_address(vmcs12->host_rip, vcpu))) in nested_vmx_check_host_state()
2914 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) { in nested_vmx_check_host_state()
2915 if (CC(!kvm_valid_efer(vcpu, vmcs12->host_ia32_efer)) || in nested_vmx_check_host_state()
2916 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()
2917 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()
2925 struct vmcs12 *vmcs12) in nested_vmx_check_vmcs_link_ptr() argument
2931 if (vmcs12->vmcs_link_pointer == INVALID_GPA) in nested_vmx_check_vmcs_link_ptr()
2934 if (CC(!page_address_valid(vcpu, vmcs12->vmcs_link_pointer))) in nested_vmx_check_vmcs_link_ptr()
2937 if (ghc->gpa != vmcs12->vmcs_link_pointer && in nested_vmx_check_vmcs_link_ptr()
2939 vmcs12->vmcs_link_pointer, VMCS12_SIZE))) in nested_vmx_check_vmcs_link_ptr()
2943 offsetof(struct vmcs12, hdr), in nested_vmx_check_vmcs_link_ptr()
2948 CC(hdr.shadow_vmcs != nested_cpu_has_shadow_vmcs(vmcs12))) in nested_vmx_check_vmcs_link_ptr()
2957 static int nested_check_guest_non_reg_state(struct vmcs12 *vmcs12) in nested_check_guest_non_reg_state() argument
2959 if (CC(vmcs12->guest_activity_state != GUEST_ACTIVITY_ACTIVE && in nested_check_guest_non_reg_state()
2960 vmcs12->guest_activity_state != GUEST_ACTIVITY_HLT && in nested_check_guest_non_reg_state()
2961 vmcs12->guest_activity_state != GUEST_ACTIVITY_WAIT_SIPI)) in nested_check_guest_non_reg_state()
2968 struct vmcs12 *vmcs12, in nested_vmx_check_guest_state() argument
2975 if (CC(!nested_guest_cr0_valid(vcpu, vmcs12->guest_cr0)) || in nested_vmx_check_guest_state()
2976 CC(!nested_guest_cr4_valid(vcpu, vmcs12->guest_cr4))) in nested_vmx_check_guest_state()
2979 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) && in nested_vmx_check_guest_state()
2980 CC(!kvm_dr7_valid(vmcs12->guest_dr7))) in nested_vmx_check_guest_state()
2983 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT) && in nested_vmx_check_guest_state()
2984 CC(!kvm_pat_valid(vmcs12->guest_ia32_pat))) in nested_vmx_check_guest_state()
2987 if (nested_vmx_check_vmcs_link_ptr(vcpu, vmcs12)) { in nested_vmx_check_guest_state()
2992 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PERF_GLOBAL_CTRL) && in nested_vmx_check_guest_state()
2994 vmcs12->guest_ia32_perf_global_ctrl))) in nested_vmx_check_guest_state()
3007 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) { in nested_vmx_check_guest_state()
3008 ia32e = (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) != 0; in nested_vmx_check_guest_state()
3009 if (CC(!kvm_valid_efer(vcpu, vmcs12->guest_ia32_efer)) || in nested_vmx_check_guest_state()
3010 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()
3011 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
3012 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()
3016 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS) && in nested_vmx_check_guest_state()
3017 (CC(is_noncanonical_address(vmcs12->guest_bndcfgs & PAGE_MASK, vcpu)) || in nested_vmx_check_guest_state()
3018 CC((vmcs12->guest_bndcfgs & MSR_IA32_BNDCFGS_RSVD)))) in nested_vmx_check_guest_state()
3021 if (nested_check_guest_non_reg_state(vmcs12)) in nested_vmx_check_guest_state()
3135 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_get_vmcs12_pages() local
3142 !nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in nested_get_vmcs12_pages()
3153 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
3164 page = kvm_vcpu_gpa_to_page(vcpu, vmcs12->apic_access_addr); in nested_get_vmcs12_pages()
3180 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_get_vmcs12_pages()
3183 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { in nested_get_vmcs12_pages()
3185 } else if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING) && in nested_get_vmcs12_pages()
3186 nested_cpu_has(vmcs12, CPU_BASED_CR8_STORE_EXITING) && in nested_get_vmcs12_pages()
3187 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
3206 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_get_vmcs12_pages()
3209 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { in nested_get_vmcs12_pages()
3212 offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
3214 pfn_to_hpa(map->pfn) + offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
3226 if (nested_vmx_prepare_msr_bitmap(vcpu, vmcs12)) in nested_get_vmcs12_pages()
3255 struct vmcs12 *vmcs12; in nested_vmx_write_pml_buffer() local
3269 vmcs12 = get_vmcs12(vcpu); in nested_vmx_write_pml_buffer()
3270 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_write_pml_buffer()
3273 if (vmcs12->guest_pml_index >= PML_ENTITY_NUM) { in nested_vmx_write_pml_buffer()
3279 dst = vmcs12->pml_address + sizeof(u64) * vmcs12->guest_pml_index; in nested_vmx_write_pml_buffer()
3285 vmcs12->guest_pml_index--; in nested_vmx_write_pml_buffer()
3321 struct vmcs12 *vmcs12);
3337 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_enter_non_root_mode() local
3353 if (!(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) in nested_vmx_enter_non_root_mode()
3356 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in nested_vmx_enter_non_root_mode()
3380 prepare_vmcs02_early(vmx, &vmx->vmcs01, vmcs12); in nested_vmx_enter_non_root_mode()
3393 if (nested_vmx_check_guest_state(vcpu, vmcs12, in nested_vmx_enter_non_root_mode()
3396 vmcs12->exit_qualification = entry_failure_code; in nested_vmx_enter_non_root_mode()
3403 if (prepare_vmcs02(vcpu, vmcs12, from_vmentry, &entry_failure_code)) { in nested_vmx_enter_non_root_mode()
3405 vmcs12->exit_qualification = entry_failure_code; in nested_vmx_enter_non_root_mode()
3411 vmcs12->vm_entry_msr_load_addr, in nested_vmx_enter_non_root_mode()
3412 vmcs12->vm_entry_msr_load_count); in nested_vmx_enter_non_root_mode()
3415 vmcs12->exit_qualification = failed_index; in nested_vmx_enter_non_root_mode()
3452 if (nested_cpu_has_preemption_timer(vmcs12)) { in nested_vmx_enter_non_root_mode()
3471 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETTING) in nested_vmx_enter_non_root_mode()
3472 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3481 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_enter_non_root_mode()
3482 vmcs12->vm_exit_reason = exit_reason.full; in nested_vmx_enter_non_root_mode()
3494 struct vmcs12 *vmcs12; in nested_vmx_run() local
3515 vmcs12 = get_vmcs12(vcpu); in nested_vmx_run()
3523 if (CC(vmcs12->hdr.shadow_vmcs)) in nested_vmx_run()
3529 vmcs12->launch_state = !launch; in nested_vmx_run()
3547 if (CC(vmcs12->launch_state == launch)) in nested_vmx_run()
3552 if (nested_vmx_check_controls(vcpu, vmcs12)) in nested_vmx_run()
3555 if (nested_vmx_check_address_space_size(vcpu, vmcs12)) in nested_vmx_run()
3558 if (nested_vmx_check_host_state(vcpu, vmcs12)) in nested_vmx_run()
3572 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_run()
3592 nested_cache_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_run()
3594 switch (vmcs12->guest_activity_state) { in nested_vmx_run()
3601 if (!(vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) && in nested_vmx_run()
3602 !nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING) && in nested_vmx_run()
3603 !(nested_cpu_has(vmcs12, CPU_BASED_INTR_WINDOW_EXITING) && in nested_vmx_run()
3604 (vmcs12->guest_rflags & X86_EFLAGS_IF))) { in nested_vmx_run()
3647 vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr0() argument
3651 /*2*/ (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask) | in vmcs12_guest_cr0()
3652 /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | in vmcs12_guest_cr0()
3657 vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr4() argument
3661 /*2*/ (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask) | in vmcs12_guest_cr4()
3662 /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | in vmcs12_guest_cr4()
3667 struct vmcs12 *vmcs12) in vmcs12_save_pending_event() argument
3677 vmcs12->vm_exit_instruction_len = in vmcs12_save_pending_event()
3685 vmcs12->idt_vectoring_error_code = in vmcs12_save_pending_event()
3689 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3691 vmcs12->idt_vectoring_info_field = in vmcs12_save_pending_event()
3699 vmcs12->vm_entry_instruction_len = in vmcs12_save_pending_event()
3704 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3711 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_mark_vmcs12_pages_dirty() local
3719 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_mark_vmcs12_pages_dirty()
3720 gfn = vmcs12->virtual_apic_page_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3724 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_mark_vmcs12_pages_dirty()
3725 gfn = vmcs12->posted_intr_desc_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3775 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_inject_exception_vmexit() local
3780 vmcs12->vm_exit_intr_error_code = vcpu->arch.exception.error_code; in nested_vmx_inject_exception_vmexit()
3789 if (!(vmcs12->idt_vectoring_info_field & VECTORING_INFO_VALID_MASK) && in nested_vmx_inject_exception_vmexit()
4009 struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12_rare() argument
4013 vmcs12->guest_es_selector = vmcs_read16(GUEST_ES_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4014 vmcs12->guest_cs_selector = vmcs_read16(GUEST_CS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4015 vmcs12->guest_ss_selector = vmcs_read16(GUEST_SS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4016 vmcs12->guest_ds_selector = vmcs_read16(GUEST_DS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4017 vmcs12->guest_fs_selector = vmcs_read16(GUEST_FS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4018 vmcs12->guest_gs_selector = vmcs_read16(GUEST_GS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4019 vmcs12->guest_ldtr_selector = vmcs_read16(GUEST_LDTR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4020 vmcs12->guest_tr_selector = vmcs_read16(GUEST_TR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
4021 vmcs12->guest_es_limit = vmcs_read32(GUEST_ES_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4022 vmcs12->guest_cs_limit = vmcs_read32(GUEST_CS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4023 vmcs12->guest_ss_limit = vmcs_read32(GUEST_SS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4024 vmcs12->guest_ds_limit = vmcs_read32(GUEST_DS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4025 vmcs12->guest_fs_limit = vmcs_read32(GUEST_FS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4026 vmcs12->guest_gs_limit = vmcs_read32(GUEST_GS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4027 vmcs12->guest_ldtr_limit = vmcs_read32(GUEST_LDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4028 vmcs12->guest_tr_limit = vmcs_read32(GUEST_TR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4029 vmcs12->guest_gdtr_limit = vmcs_read32(GUEST_GDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4030 vmcs12->guest_idtr_limit = vmcs_read32(GUEST_IDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
4031 vmcs12->guest_es_ar_bytes = vmcs_read32(GUEST_ES_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4032 vmcs12->guest_ds_ar_bytes = vmcs_read32(GUEST_DS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4033 vmcs12->guest_fs_ar_bytes = vmcs_read32(GUEST_FS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4034 vmcs12->guest_gs_ar_bytes = vmcs_read32(GUEST_GS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4035 vmcs12->guest_ldtr_ar_bytes = vmcs_read32(GUEST_LDTR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4036 vmcs12->guest_tr_ar_bytes = vmcs_read32(GUEST_TR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
4037 vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); in sync_vmcs02_to_vmcs12_rare()
4038 vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); in sync_vmcs02_to_vmcs12_rare()
4039 vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); in sync_vmcs02_to_vmcs12_rare()
4040 vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); in sync_vmcs02_to_vmcs12_rare()
4041 vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); in sync_vmcs02_to_vmcs12_rare()
4042 vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); in sync_vmcs02_to_vmcs12_rare()
4043 vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4044 vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); in sync_vmcs02_to_vmcs12_rare()
4045 vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4046 vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
4047 vmcs12->guest_pending_dbg_exceptions = in sync_vmcs02_to_vmcs12_rare()
4050 vmcs12->guest_bndcfgs = vmcs_read64(GUEST_BNDCFGS); in sync_vmcs02_to_vmcs12_rare()
4056 struct vmcs12 *vmcs12) in copy_vmcs02_to_vmcs12_rare() argument
4071 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in copy_vmcs02_to_vmcs12_rare()
4084 static void sync_vmcs02_to_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12() argument
4089 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4094 vmcs12->guest_cr0 = vmcs12_guest_cr0(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4095 vmcs12->guest_cr4 = vmcs12_guest_cr4(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
4097 vmcs12->guest_rsp = kvm_rsp_read(vcpu); in sync_vmcs02_to_vmcs12()
4098 vmcs12->guest_rip = kvm_rip_read(vcpu); in sync_vmcs02_to_vmcs12()
4099 vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); in sync_vmcs02_to_vmcs12()
4101 vmcs12->guest_cs_ar_bytes = vmcs_read32(GUEST_CS_AR_BYTES); in sync_vmcs02_to_vmcs12()
4102 vmcs12->guest_ss_ar_bytes = vmcs_read32(GUEST_SS_AR_BYTES); in sync_vmcs02_to_vmcs12()
4104 vmcs12->guest_interruptibility_info = in sync_vmcs02_to_vmcs12()
4108 vmcs12->guest_activity_state = GUEST_ACTIVITY_HLT; in sync_vmcs02_to_vmcs12()
4110 vmcs12->guest_activity_state = GUEST_ACTIVITY_WAIT_SIPI; in sync_vmcs02_to_vmcs12()
4112 vmcs12->guest_activity_state = GUEST_ACTIVITY_ACTIVE; in sync_vmcs02_to_vmcs12()
4114 if (nested_cpu_has_preemption_timer(vmcs12) && in sync_vmcs02_to_vmcs12()
4115 vmcs12->vm_exit_controls & VM_EXIT_SAVE_VMX_PREEMPTION_TIMER && in sync_vmcs02_to_vmcs12()
4117 vmcs12->vmx_preemption_timer_value = in sync_vmcs02_to_vmcs12()
4129 vmcs12->guest_cr3 = vmcs_readl(GUEST_CR3); in sync_vmcs02_to_vmcs12()
4130 if (nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in sync_vmcs02_to_vmcs12()
4131 vmcs12->guest_pdptr0 = vmcs_read64(GUEST_PDPTR0); in sync_vmcs02_to_vmcs12()
4132 vmcs12->guest_pdptr1 = vmcs_read64(GUEST_PDPTR1); in sync_vmcs02_to_vmcs12()
4133 vmcs12->guest_pdptr2 = vmcs_read64(GUEST_PDPTR2); in sync_vmcs02_to_vmcs12()
4134 vmcs12->guest_pdptr3 = vmcs_read64(GUEST_PDPTR3); in sync_vmcs02_to_vmcs12()
4138 vmcs12->guest_linear_address = vmcs_readl(GUEST_LINEAR_ADDRESS); in sync_vmcs02_to_vmcs12()
4140 if (nested_cpu_has_vid(vmcs12)) in sync_vmcs02_to_vmcs12()
4141 vmcs12->guest_intr_status = vmcs_read16(GUEST_INTR_STATUS); in sync_vmcs02_to_vmcs12()
4143 vmcs12->vm_entry_controls = in sync_vmcs02_to_vmcs12()
4144 (vmcs12->vm_entry_controls & ~VM_ENTRY_IA32E_MODE) | in sync_vmcs02_to_vmcs12()
4147 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_DEBUG_CONTROLS) in sync_vmcs02_to_vmcs12()
4148 kvm_get_dr(vcpu, 7, (unsigned long *)&vmcs12->guest_dr7); in sync_vmcs02_to_vmcs12()
4150 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_IA32_EFER) in sync_vmcs02_to_vmcs12()
4151 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
4165 static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs12() argument
4170 vmcs12->vm_exit_reason = vm_exit_reason; in prepare_vmcs12()
4172 vmcs12->vm_exit_reason |= VMX_EXIT_REASONS_SGX_ENCLAVE_MODE; in prepare_vmcs12()
4173 vmcs12->exit_qualification = exit_qualification; in prepare_vmcs12()
4174 vmcs12->vm_exit_intr_info = exit_intr_info; in prepare_vmcs12()
4176 vmcs12->idt_vectoring_info_field = 0; in prepare_vmcs12()
4177 vmcs12->vm_exit_instruction_len = vmcs_read32(VM_EXIT_INSTRUCTION_LEN); in prepare_vmcs12()
4178 vmcs12->vmx_instruction_info = vmcs_read32(VMX_INSTRUCTION_INFO); in prepare_vmcs12()
4180 if (!(vmcs12->vm_exit_reason & VMX_EXIT_REASONS_FAILED_VMENTRY)) { in prepare_vmcs12()
4181 vmcs12->launch_state = 1; in prepare_vmcs12()
4185 vmcs12->vm_entry_intr_info_field &= ~INTR_INFO_VALID_MASK; in prepare_vmcs12()
4191 vmcs12_save_pending_event(vcpu, vmcs12); in prepare_vmcs12()
4200 vmcs12->vm_exit_msr_store_addr, in prepare_vmcs12()
4201 vmcs12->vm_exit_msr_store_count)) in prepare_vmcs12()
4225 struct vmcs12 *vmcs12) in load_vmcs12_host_state() argument
4230 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) in load_vmcs12_host_state()
4231 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
4232 else if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
4238 kvm_rsp_write(vcpu, vmcs12->host_rsp); in load_vmcs12_host_state()
4239 kvm_rip_write(vcpu, vmcs12->host_rip); in load_vmcs12_host_state()
4251 vmx_set_cr0(vcpu, vmcs12->host_cr0); in load_vmcs12_host_state()
4255 vmx_set_cr4(vcpu, vmcs12->host_cr4); in load_vmcs12_host_state()
4263 if (nested_vmx_load_cr3(vcpu, vmcs12->host_cr3, false, true, &ignored)) in load_vmcs12_host_state()
4266 nested_vmx_transition_tlb_flush(vcpu, vmcs12, false); in load_vmcs12_host_state()
4268 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->host_ia32_sysenter_cs); in load_vmcs12_host_state()
4269 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->host_ia32_sysenter_esp); in load_vmcs12_host_state()
4270 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->host_ia32_sysenter_eip); in load_vmcs12_host_state()
4271 vmcs_writel(GUEST_IDTR_BASE, vmcs12->host_idtr_base); in load_vmcs12_host_state()
4272 vmcs_writel(GUEST_GDTR_BASE, vmcs12->host_gdtr_base); in load_vmcs12_host_state()
4277 if (vmcs12->vm_exit_controls & VM_EXIT_CLEAR_BNDCFGS) in load_vmcs12_host_state()
4280 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) { in load_vmcs12_host_state()
4281 vmcs_write64(GUEST_IA32_PAT, vmcs12->host_ia32_pat); in load_vmcs12_host_state()
4282 vcpu->arch.pat = vmcs12->host_ia32_pat; in load_vmcs12_host_state()
4284 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) in load_vmcs12_host_state()
4286 vmcs12->host_ia32_perf_global_ctrl)); in load_vmcs12_host_state()
4293 .selector = vmcs12->host_cs_selector, in load_vmcs12_host_state()
4299 if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
4313 seg.selector = vmcs12->host_ds_selector; in load_vmcs12_host_state()
4315 seg.selector = vmcs12->host_es_selector; in load_vmcs12_host_state()
4317 seg.selector = vmcs12->host_ss_selector; in load_vmcs12_host_state()
4319 seg.selector = vmcs12->host_fs_selector; in load_vmcs12_host_state()
4320 seg.base = vmcs12->host_fs_base; in load_vmcs12_host_state()
4322 seg.selector = vmcs12->host_gs_selector; in load_vmcs12_host_state()
4323 seg.base = vmcs12->host_gs_base; in load_vmcs12_host_state()
4326 .base = vmcs12->host_tr_base, in load_vmcs12_host_state()
4328 .selector = vmcs12->host_tr_selector, in load_vmcs12_host_state()
4341 if (nested_vmx_load_msr(vcpu, vmcs12->vm_exit_msr_load_addr, in load_vmcs12_host_state()
4342 vmcs12->vm_exit_msr_load_count)) in load_vmcs12_host_state()
4373 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_restore_host_state() local
4381 if (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) { in nested_vmx_restore_host_state()
4432 for (i = 0; i < vmcs12->vm_entry_msr_load_count; i++) { in nested_vmx_restore_host_state()
4433 gpa = vmcs12->vm_entry_msr_load_addr + (i * sizeof(g)); in nested_vmx_restore_host_state()
4441 for (j = 0; j < vmcs12->vm_exit_msr_load_count; j++) { in nested_vmx_restore_host_state()
4442 gpa = vmcs12->vm_exit_msr_load_addr + (j * sizeof(h)); in nested_vmx_restore_host_state()
4485 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_vmexit() local
4516 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_vmexit()
4519 if (nested_cpu_has(vmcs12, CPU_BASED_USE_TSC_OFFSETTING)) { in nested_vmx_vmexit()
4521 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_TSC_SCALING)) in nested_vmx_vmexit()
4526 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4529 prepare_vmcs12(vcpu, vmcs12, vm_exit_reason, in nested_vmx_vmexit()
4541 nested_flush_cached_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4602 vmcs12->vm_exit_intr_info = irq | in nested_vmx_vmexit()
4607 trace_kvm_nested_vmexit_inject(vmcs12->vm_exit_reason, in nested_vmx_vmexit()
4608 vmcs12->exit_qualification, in nested_vmx_vmexit()
4609 vmcs12->idt_vectoring_info_field, in nested_vmx_vmexit()
4610 vmcs12->vm_exit_intr_info, in nested_vmx_vmexit()
4611 vmcs12->vm_exit_intr_error_code, in nested_vmx_vmexit()
4614 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_vmexit()
5037 vmptr + offsetof(struct vmcs12, in handle_vmclear()
5062 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread() local
5094 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmread()
5097 value = vmcs12_read_any(vmcs12, field, offset); in handle_vmread()
5146 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite() local
5209 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmwrite()
5222 vmcs12_write_any(vmcs12, field, offset, value); in handle_vmwrite()
5302 offsetof(struct vmcs12, hdr), in handle_vmptrld()
5532 struct vmcs12 *vmcs12) in nested_vmx_eptp_switching() argument
5537 if (WARN_ON_ONCE(!nested_cpu_has_ept(vmcs12))) in nested_vmx_eptp_switching()
5542 if (kvm_vcpu_read_guest_page(vcpu, vmcs12->eptp_list_address >> PAGE_SHIFT, in nested_vmx_eptp_switching()
5550 if (vmcs12->ept_pointer != new_eptp) { in nested_vmx_eptp_switching()
5554 vmcs12->ept_pointer = new_eptp; in nested_vmx_eptp_switching()
5557 if (!nested_cpu_has_vpid(vmcs12)) in nested_vmx_eptp_switching()
5567 struct vmcs12 *vmcs12; in handle_vmfunc() local
5580 vmcs12 = get_vmcs12(vcpu); in handle_vmfunc()
5586 if (WARN_ON_ONCE((function > 63) || !nested_cpu_has_vmfunc(vmcs12))) { in handle_vmfunc()
5591 if (!(vmcs12->vm_function_control & BIT_ULL(function))) in handle_vmfunc()
5596 if (nested_vmx_eptp_switching(vcpu, vmcs12)) in handle_vmfunc()
5623 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_io_bitmaps() local
5632 bitmap = vmcs12->io_bitmap_a; in nested_vmx_check_io_bitmaps()
5634 bitmap = vmcs12->io_bitmap_b; in nested_vmx_check_io_bitmaps()
5654 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_io() argument
5660 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_exit_handled_io()
5661 return nested_cpu_has(vmcs12, CPU_BASED_UNCOND_IO_EXITING); in nested_vmx_exit_handled_io()
5678 struct vmcs12 *vmcs12, in nested_vmx_exit_handled_msr() argument
5684 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_exit_handled_msr()
5692 bitmap = vmcs12->msr_bitmap; in nested_vmx_exit_handled_msr()
5716 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_cr() argument
5729 if (vmcs12->cr0_guest_host_mask & in nested_vmx_exit_handled_cr()
5730 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5734 if (nested_cpu_has(vmcs12, CPU_BASED_CR3_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5738 if (vmcs12->cr4_guest_host_mask & in nested_vmx_exit_handled_cr()
5739 (vmcs12->cr4_read_shadow ^ val)) in nested_vmx_exit_handled_cr()
5743 if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5749 if ((vmcs12->cr0_guest_host_mask & X86_CR0_TS) && in nested_vmx_exit_handled_cr()
5750 (vmcs12->cr0_read_shadow & X86_CR0_TS)) in nested_vmx_exit_handled_cr()
5756 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5761 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5773 if (vmcs12->cr0_guest_host_mask & 0xe & in nested_vmx_exit_handled_cr()
5774 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5776 if ((vmcs12->cr0_guest_host_mask & 0x1) && in nested_vmx_exit_handled_cr()
5777 !(vmcs12->cr0_read_shadow & 0x1) && in nested_vmx_exit_handled_cr()
5786 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_encls() argument
5791 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENCLS_EXITING)) in nested_vmx_exit_handled_encls()
5797 return vmcs12->encls_exiting_bitmap & BIT_ULL(encls_leaf); in nested_vmx_exit_handled_encls()
5801 struct vmcs12 *vmcs12, gpa_t bitmap) in nested_vmx_exit_handled_vmcs_access() argument
5807 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_exit_handled_vmcs_access()
5824 static bool nested_vmx_exit_handled_mtf(struct vmcs12 *vmcs12) in nested_vmx_exit_handled_mtf() argument
5826 u32 entry_intr_info = vmcs12->vm_entry_intr_info_field; in nested_vmx_exit_handled_mtf()
5828 if (nested_cpu_has_mtf(vmcs12)) in nested_vmx_exit_handled_mtf()
5919 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_l1_wants_exit() local
5929 return vmcs12->exception_bitmap & in nested_vmx_l1_wants_exit()
5936 return nested_cpu_has(vmcs12, CPU_BASED_INTR_WINDOW_EXITING); in nested_vmx_l1_wants_exit()
5938 return nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING); in nested_vmx_l1_wants_exit()
5944 return nested_cpu_has(vmcs12, CPU_BASED_HLT_EXITING); in nested_vmx_l1_wants_exit()
5948 return nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_l1_wants_exit()
5950 return nested_cpu_has(vmcs12, CPU_BASED_RDPMC_EXITING); in nested_vmx_l1_wants_exit()
5952 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDRAND_EXITING); in nested_vmx_l1_wants_exit()
5954 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDSEED_EXITING); in nested_vmx_l1_wants_exit()
5956 return nested_cpu_has(vmcs12, CPU_BASED_RDTSC_EXITING); in nested_vmx_l1_wants_exit()
5958 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_l1_wants_exit()
5959 vmcs12->vmread_bitmap); in nested_vmx_l1_wants_exit()
5961 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_l1_wants_exit()
5962 vmcs12->vmwrite_bitmap); in nested_vmx_l1_wants_exit()
5974 return nested_vmx_exit_handled_cr(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
5976 return nested_cpu_has(vmcs12, CPU_BASED_MOV_DR_EXITING); in nested_vmx_l1_wants_exit()
5978 return nested_vmx_exit_handled_io(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
5980 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC); in nested_vmx_l1_wants_exit()
5983 return nested_vmx_exit_handled_msr(vcpu, vmcs12, exit_reason); in nested_vmx_l1_wants_exit()
5987 return nested_cpu_has(vmcs12, CPU_BASED_MWAIT_EXITING); in nested_vmx_l1_wants_exit()
5989 return nested_vmx_exit_handled_mtf(vmcs12); in nested_vmx_l1_wants_exit()
5991 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_EXITING); in nested_vmx_l1_wants_exit()
5993 return nested_cpu_has(vmcs12, CPU_BASED_PAUSE_EXITING) || in nested_vmx_l1_wants_exit()
5994 nested_cpu_has2(vmcs12, in nested_vmx_l1_wants_exit()
5999 return nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW); in nested_vmx_l1_wants_exit()
6011 nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_INVPCID) && in nested_vmx_l1_wants_exit()
6012 nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_l1_wants_exit()
6014 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_WBINVD_EXITING); in nested_vmx_l1_wants_exit()
6024 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); in nested_vmx_l1_wants_exit()
6027 return nested_cpu_has2(vmcs12, in nested_vmx_l1_wants_exit()
6030 return nested_vmx_exit_handled_encls(vcpu, vmcs12); in nested_vmx_l1_wants_exit()
6080 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_reflect_vmexit() local
6082 vmcs12->vm_exit_intr_error_code = in nested_vmx_reflect_vmexit()
6097 struct vmcs12 *vmcs12; in vmx_get_nested_state() local
6114 vmcs12 = get_vmcs12(vcpu); in vmx_get_nested_state()
6122 kvm_state.size += sizeof(user_vmx_nested_state->vmcs12); in vmx_get_nested_state()
6129 nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
6130 vmcs12->vmcs_link_pointer != INVALID_GPA) in vmx_get_nested_state()
6149 if (nested_cpu_has_preemption_timer(vmcs12) && in vmx_get_nested_state()
6176 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in vmx_get_nested_state()
6177 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in vmx_get_nested_state()
6195 BUILD_BUG_ON(sizeof(user_vmx_nested_state->vmcs12) < VMCS12_SIZE); in vmx_get_nested_state()
6202 if (copy_to_user(user_vmx_nested_state->vmcs12, vmcs12, VMCS12_SIZE)) in vmx_get_nested_state()
6205 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
6206 vmcs12->vmcs_link_pointer != INVALID_GPA) { in vmx_get_nested_state()
6232 struct vmcs12 *vmcs12; in vmx_set_nested_state() local
6308 if (kvm_state->size < sizeof(*kvm_state) + sizeof(*vmcs12)) { in vmx_set_nested_state()
6345 vmcs12 = get_vmcs12(vcpu); in vmx_set_nested_state()
6346 if (copy_from_user(vmcs12, user_vmx_nested_state->vmcs12, sizeof(*vmcs12))) in vmx_set_nested_state()
6349 if (vmcs12->hdr.revision_id != VMCS12_REVISION) in vmx_set_nested_state()
6362 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_set_nested_state()
6363 vmcs12->vmcs_link_pointer != INVALID_GPA) { in vmx_set_nested_state()
6364 struct vmcs12 *shadow_vmcs12 = get_shadow_vmcs12(vcpu); in vmx_set_nested_state()
6368 sizeof(user_vmx_nested_state->vmcs12) + sizeof(*shadow_vmcs12)) in vmx_set_nested_state()
6390 if (nested_vmx_check_controls(vcpu, vmcs12) || in vmx_set_nested_state()
6391 nested_vmx_check_host_state(vcpu, vmcs12) || in vmx_set_nested_state()
6392 nested_vmx_check_guest_state(vcpu, vmcs12, &ignored)) in vmx_set_nested_state()