/xen/xen/arch/arm/ |
A D | guest_walk.c | 39 short_desc_t pte; in guest_walk_sd() local 102 switch ( pte.walk.dt ) in guest_walk_sd() 136 if ( !pte.pg.xn ) in guest_walk_sd() 145 if ( !pte.lpg.xn ) in guest_walk_sd() 150 if ( !pte.pg.ro ) in guest_walk_sd() 172 if ( !pte.sec.ro ) in guest_walk_sd() 174 if ( !pte.sec.xn ) in guest_walk_sd() 370 lpae_t pte; in guest_walk_ld() local 549 if ( level == 3 || !lpae_is_valid(pte) || lpae_is_superpage(pte, level) ) in guest_walk_ld() 569 if ( !lpae_is_valid(pte) || !lpae_is_mapping(pte, level) ) in guest_walk_ld() [all …]
|
A D | mm.c | 267 if ( level == 3 || !pte.walk.valid || !pte.walk.table ) in dump_pt_walk() 394 lpae_t pte, *p; in create_mappings() local 432 lpae_t pte; in map_domain_page() local 604 lpae_t pte = {0}; in remove_early_mappings() local 607 pte); in remove_early_mappings() 642 lpae_t pte, *p; in setup_pagetables() local 697 pte.pt.table = 1; in setup_pagetables() 702 pte.pt.table = 1; in setup_pagetables() 965 lpae_t pte; in create_xen_table() local 972 pte.pt.table = 1; in create_xen_table() [all …]
|
A D | p2m.c | 275 return p2m_is_valid(pte) && lpae_is_mapping(pte, level); in p2m_is_mapping() 280 return p2m_is_valid(pte) && lpae_is_superpage(pte, level); in p2m_is_superpage() 645 write_pte(p, pte); in p2m_write_pte() 652 lpae_t pte; in p2m_remove_pte() local 654 memset(&pte, 0x00, sizeof(pte)); in p2m_remove_pte() 721 ASSERT(p2m_is_valid(pte)); in p2m_put_l3_page() 795 lpae_t pte, *table; in p2m_split_superpage() local 829 pte = *entry; in p2m_split_superpage() 1138 lpae_t pte = table[i]; in p2m_invalidate_table() local 1145 if ( !pte.p2m.valid ) in p2m_invalidate_table() [all …]
|
/xen/xen/include/asm-arm/ |
A D | lpae.h | 105 #define P2M_CLEAR_PERM(pte) ((pte).bits & ~P2M_PERM_MASK) argument 131 static inline bool lpae_is_valid(lpae_t pte) in lpae_is_valid() argument 133 return pte.walk.valid; in lpae_is_valid() 141 static inline bool lpae_is_table(lpae_t pte, unsigned int level) in lpae_is_table() argument 143 return (level < 3) && pte.walk.table; in lpae_is_table() 146 static inline bool lpae_is_mapping(lpae_t pte, unsigned int level) in lpae_is_mapping() argument 149 return pte.walk.table; in lpae_is_mapping() 151 return !pte.walk.table; in lpae_is_mapping() 156 return (level < 3) && lpae_is_mapping(pte, level); in lpae_is_superpage() 159 #define lpae_get_mfn(pte) (_mfn((pte).walk.base)) argument [all …]
|
/xen/xen/arch/x86/pv/ |
A D | ro-page-fault.c | 35 l1_pgentry_t pte; member 66 l1_pgentry_t pte, ol1e, nl1e, *pl1e; in ptwr_emulated_update() local 112 pte = ptwr_ctxt->pte; in ptwr_emulated_update() 113 mfn = l1e_get_mfn(pte); in ptwr_emulated_update() 262 .pte = pte, in ptwr_do_page_fault() 267 page = get_page_from_mfn(l1e_get_mfn(pte), current->domain); in ptwr_do_page_fault() 316 mfn_t mfn = l1e_get_mfn(pte); in mmio_ro_do_page_fault() 338 l1_pgentry_t pte; in pv_ro_page_fault() local 352 pte = guest_get_eff_l1e(addr); in pv_ro_page_fault() 361 rc = mmio_ro_do_page_fault(&ctxt, addr, pte); in pv_ro_page_fault() [all …]
|
/xen/xen/include/asm-x86/ |
A D | shadow.h | 168 static inline bool is_l1tf_safe_maddr(intpte_t pte) in is_l1tf_safe_maddr() argument 170 paddr_t maddr = pte & l1tf_addr_mask; in is_l1tf_safe_maddr() 178 intpte_t pte) in pv_l1tf_check_pte() argument 181 ASSERT(!(pte & _PAGE_PRESENT)); in pv_l1tf_check_pte() 184 (((level > 1) && (pte & _PAGE_PSE)) || !is_l1tf_safe_maddr(pte)) ) in pv_l1tf_check_pte() 191 d->domain_id, level, pte); in pv_l1tf_check_pte() 205 d->domain_id, level, pte); in pv_l1tf_check_pte()
|
/xen/tools/libxc/ |
A D | xc_sr_common_x86_pv.h | 53 static inline xen_pfn_t pte_to_frame(uint64_t pte) in pte_to_frame() argument 55 uint64_t frame = (pte & PTE_FRAME_MASK) >> PAGE_SHIFT; in pte_to_frame() 68 static inline uint64_t merge_pte(uint64_t pte, xen_pfn_t mfn) in merge_pte() argument 70 return (pte & ~PTE_FRAME_MASK) | ((uint64_t)mfn << PAGE_SHIFT); in merge_pte()
|
A D | xc_pagetab.c | 33 uint64_t paddr, mask, pte = 0; in xc_translate_foreign_address() local 89 memcpy(&pte, map + (paddr & (PAGE_SIZE - 1)), size); in xc_translate_foreign_address() 91 if (!(pte & 1)) { in xc_translate_foreign_address() 95 paddr = pte & 0x000ffffffffff000ull; in xc_translate_foreign_address() 96 if ((level == 2 || (level == 3 && pt_levels == 4)) && (pte & PTE_PSE)) { in xc_translate_foreign_address()
|
A D | xc_offline_page.c | 206 uint64_t pte, uint64_t *new_pte, 212 uint64_t pte, uint64_t *new_pte, in __clear_pte() argument 221 if ( !(pte & _PAGE_PRESENT)) in __clear_pte() 228 *new_pte = pte & ~_PAGE_PRESENT; in __clear_pte() 237 uint64_t pte, uint64_t *new_pte, in __update_pte() argument 254 if (pte & _PAGE_PRESENT) in __update_pte() 256 pte &= ~MFN_MASK_X86; in __update_pte() 258 *new_pte = pte; in __update_pte() 282 uint64_t pte, new_pte; in change_pte() local 300 pte = ((const uint32_t*)content)[j]; in change_pte() [all …]
|
A D | xc_sr_save_x86_pv.c | 874 uint64_t pte; in normalise_pagetable() local 925 pte = src[i]; in normalise_pagetable() 929 pte = 0; in normalise_pagetable() 939 if ( pte & _PAGE_PRESENT ) in normalise_pagetable() 941 mfn = pte_to_frame(pte); in normalise_pagetable() 951 type >> XEN_DOMCTL_PFINFO_LTAB_SHIFT, i, pte); in normalise_pagetable() 958 if ( (type > XEN_DOMCTL_PFINFO_L1TAB) && (pte & _PAGE_PSE) ) in normalise_pagetable() 961 type >> XEN_DOMCTL_PFINFO_LTAB_SHIFT, i, pte); in normalise_pagetable() 980 pte = merge_pte(pte, mfn_to_pfn(ctx, mfn)); in normalise_pagetable() 983 dst[i] = pte; in normalise_pagetable()
|
A D | xc_sr_restore_x86_pv.c | 972 uint64_t pte; in x86_pv_localise_page() local 985 pte = table[i]; in x86_pv_localise_page() 987 if ( pte & _PAGE_PRESENT ) in x86_pv_localise_page() 989 xen_pfn_t pfn = pte_to_frame(pte); in x86_pv_localise_page() 995 type >> XEN_DOMCTL_PFINFO_LTAB_SHIFT, i, pte); in x86_pv_localise_page() 1011 pte = table[i]; in x86_pv_localise_page() 1013 if ( pte & _PAGE_PRESENT ) in x86_pv_localise_page() 1017 pfn = pte_to_frame(pte); in x86_pv_localise_page() 1023 type >> XEN_DOMCTL_PFINFO_LTAB_SHIFT, i, pte); in x86_pv_localise_page() 1029 table[i] = merge_pte(pte, mfn); in x86_pv_localise_page()
|
/xen/xen/arch/x86/ |
A D | trace.c | 134 l1_pgentry_t pte; in __trace_ptwr_emulation() member 139 d.pte = npte; in __trace_ptwr_emulation() 146 l1_pgentry_t pte; in __trace_ptwr_emulation() member 153 d.pte = npte; in __trace_ptwr_emulation()
|
A D | tboot.c | 157 struct dma_pte *pt_vaddr, *pte; in update_iommu_mac() local 168 pte = &pt_vaddr[i]; in update_iommu_mac() 169 if ( !dma_pte_present(*pte) ) in update_iommu_mac() 173 update_iommu_mac(ctx, dma_pte_addr(*pte), next_level); in update_iommu_mac()
|
/xen/xen/include/asm-x86/x86_64/ |
A D | page.h | 99 #define pte_write_atomic(ptep, pte) write_atomic(ptep, pte) argument 100 #define pte_write(ptep, pte) write_atomic(ptep, pte) argument
|
/xen/xen/drivers/passthrough/vtd/ |
A D | utils.c | 94 struct dma_pte pte; in print_vtd_entries() local 160 pte.val = l[l_index]; in print_vtd_entries() 162 printk(" l%u[%03x] = %"PRIx64"\n", level, l_index, pte.val); in print_vtd_entries() 164 if ( !dma_pte_present(pte) ) in print_vtd_entries() 169 if ( dma_pte_superpage(pte) ) in print_vtd_entries() 171 val = dma_pte_addr(pte); in print_vtd_entries()
|
A D | iommu.c | 280 pte = &parent[offset]; in addr_to_dma_page_maddr() 665 if ( !dma_pte_present(*pte) ) in dma_pte_clear_one() 672 dma_clear_pte(*pte); in dma_pte_clear_one() 698 struct dma_pte *pt_vaddr, *pte; in iommu_free_page_table() local 705 pte = &pt_vaddr[i]; in iommu_free_page_table() 706 if ( !dma_pte_present(*pte) ) in iommu_free_page_table() 712 dma_clear_pte(*pte); in iommu_free_page_table() 1803 old = *pte; in intel_iommu_map_page() 1821 *pte = new; in intel_iommu_map_page() 2632 struct dma_pte *pt_vaddr, *pte; in vtd_dump_p2m_table_level() local [all …]
|
/xen/xen/drivers/passthrough/amd/ |
A D | iommu_map.c | 37 union amd_iommu_pte *table, *pte; in clear_iommu_pte_present() local 41 pte = &table[pfn_to_pde_idx(dfn, 1)]; in clear_iommu_pte_present() 43 flush_flags = pte->pr ? IOMMU_FLUSHF_modified : 0; in clear_iommu_pte_present() 44 write_atomic(&pte->raw, 0); in clear_iommu_pte_present() 51 static unsigned int set_iommu_pde_present(union amd_iommu_pte *pte, in set_iommu_pde_present() argument 71 old.raw = read_atomic(&pte->raw); in set_iommu_pde_present() 79 write_atomic(&pte->raw, new.raw); in set_iommu_pde_present()
|
/xen/xen/include/asm-arm/arm64/ |
A D | page.h | 9 static inline void write_pte(lpae_t *p, lpae_t pte) in write_pte() argument 16 : : "r" (pte.bits), "r" (p) : "memory"); in write_pte()
|
/xen/xen/include/asm-arm/arm32/ |
A D | page.h | 11 static inline void write_pte(lpae_t *p, lpae_t pte) in write_pte() argument 19 : : "r" (pte.bits), "r" (p) : "memory"); in write_pte()
|
/xen/tools/misc/ |
A D | xen-mfndump.c | 309 uint64_t pte = ((const uint64_t*)page)[j]; in lookup_pte_func() local 313 if ( ((pte >> PAGE_SHIFT_X86) & __MFN_MASK_X86) == mfn) in lookup_pte_func() 315 mfn, minfo.p2m_table[i], j, pte); in lookup_pte_func()
|
/xen/xen/xsm/flask/include/ |
A D | avc.h | 54 unsigned long pte; member
|
/xen/xen/drivers/passthrough/arm/ |
A D | smmu.c | 1591 pte_t *pte, *start; 1635 pte = start; 1666 } else if (pte_val(*pte) & 1679 sizeof(*pte) * 1684 *pte = pfn_pte(pfn, __pgprot(pteval)); 1685 } while (pte++, pfn++, addr += PAGE_SIZE, --i); 1688 arm_smmu_flush_pgtable(smmu, start, sizeof(*pte) * (pte - start)); 1842 pte_t pte; 1862 pte = *(pmd_page_vaddr(pmd) + pte_index(iova)); 1863 if (pte_none(pte)) [all …]
|
/xen/xen/arch/x86/efi/ |
A D | efi-boot.h | 625 l2_pgentry_t pte = l2e_from_paddr(i << L2_PAGETABLE_SHIFT, in efi_arch_memory_setup() local 628 l2_bootmap[i] = pte; in efi_arch_memory_setup() 631 l2e_add_flags(pte, PAGE_HYPERVISOR); in efi_arch_memory_setup() 633 l2_directmap[i] = pte; in efi_arch_memory_setup()
|
/xen/xen/include/xsm/ |
A D | xsm.h | 175 int (*update_va_mapping) (struct domain *d, struct domain *f, l1_pgentry_t pte); 666 l1_pgentry_t pte) in xsm_update_va_mapping() argument 668 return xsm_ops->update_va_mapping(d, f, pte); in xsm_update_va_mapping()
|
/xen/xen/xsm/flask/ |
A D | hooks.c | 1627 l1_pgentry_t pte) in flask_update_va_mapping() argument 1630 if ( !(l1e_get_flags(pte) & _PAGE_PRESENT) ) in flask_update_va_mapping() 1632 if ( l1e_get_flags(pte) & _PAGE_RW ) in flask_update_va_mapping()
|