Home
last modified time | relevance | path

Searched refs:gfn_x (Results 1 – 25 of 44) sorted by relevance

12

/xen/xen/arch/x86/mm/hap/
A Dguest_walk.c72 return gfn_x(INVALID_GFN); in hap_p2m_ga_to_gfn()
79 return gfn_x(INVALID_GFN); in hap_p2m_ga_to_gfn()
114 return gfn_x(INVALID_GFN); in hap_p2m_ga_to_gfn()
119 return gfn_x(INVALID_GFN); in hap_p2m_ga_to_gfn()
125 return gfn_x(gfn); in hap_p2m_ga_to_gfn()
138 return gfn_x(INVALID_GFN); in hap_p2m_ga_to_gfn()
A Dnested_ept.c185 start = _gfn((gfn_x(start) & ~gfn_lvl_mask) + in nept_walk_tables()
189 gw->lxe[0].epte = (gfn_x(start) << PAGE_SHIFT) | gflags; in nept_walk_tables()
233 *l1gfn = gfn_x(INVALID_GFN); in nept_translate_l2ga()
/xen/xen/include/asm-x86/
A Dguest_pt.h38 return ((paddr_t)gfn_x(gfn)) << PAGE_SHIFT; in gfn_to_paddr()
43 #define get_gfn(d, g, t) get_gfn_type((d), gfn_x(g), (t), P2M_ALLOC)
93 { return (guest_l1e_t) { (gfn_x(gfn) << PAGE_SHIFT) | flags }; } in guest_l1e_from_gfn()
95 { return (guest_l2e_t) { (gfn_x(gfn) << PAGE_SHIFT) | flags }; } in guest_l2e_from_gfn()
180 { return l1e_from_pfn(gfn_x(gfn), flags); } in guest_l1e_from_gfn()
182 { return l2e_from_pfn(gfn_x(gfn), flags); } in guest_l2e_from_gfn()
184 { return l3e_from_pfn(gfn_x(gfn), flags); } in guest_l3e_from_gfn()
187 { return l4e_from_pfn(gfn_x(gfn), flags); } in guest_l4e_from_gfn()
387 return (gfn_x(gfn) << PAGE_SHIFT) | (gw->va & ~PAGE_MASK); in guest_walk_to_gpa()
A Dp2m.h332 #define POD_LAST_SUPERPAGE (gfn_x(INVALID_GFN) & ~(gfn_x(INVALID_GFN) >> 1))
556 ((rd == ld) && (gfn_x(rgfn) <= gfn_x(lgfn))) ) in get_two_gfns()
569 gfn_x(rval->first_gfn), first_t, first_a, q, NULL, lock); in get_two_gfns()
571 gfn_x(rval->second_gfn), second_t, second_a, q, NULL, lock); in get_two_gfns()
579 put_gfn(arg->second_domain, gfn_x(arg->second_gfn)); in put_two_gfns()
580 put_gfn(arg->first_domain, gfn_x(arg->first_gfn)); in put_two_gfns()
/xen/xen/arch/x86/mm/
A Dp2m.c611 put_gfn(p2m->domain, gfn_x(gfn)); in p2m_get_page_from_gfn()
633 fn_mask |= gfn_x(gfn) | todo; in p2m_set_entry()
985 gfn_x(gfn_add(gfn, i))); in guest_physmap_add_entry()
1190 unsigned long gfn = gfn_x(first_gfn); in finish_type_change()
1789 if ( l2_gfn == gfn_x(INVALID_GFN) ) in paging_gva_to_gfn()
1790 return gfn_x(INVALID_GFN); in paging_gva_to_gfn()
1799 return gfn_x(INVALID_GFN); in paging_gva_to_gfn()
2121 p2m->min_remapped_gfn = gfn_x(INVALID_GFN); in p2m_reset_altp2m()
2402 __put_gfn(p2m, gfn_x(gfn)); in p2m_altp2m_propagate_change()
2426 __put_gfn(p2m, gfn_x(gfn)); in p2m_altp2m_propagate_change()
[all …]
A Dmem_paging.c43 .u.mem_paging.gfn = gfn_x(gfn) in p2m_mem_paging_drop_page()
96 .u.mem_paging.gfn = gfn_x(gfn) in p2m_mem_paging_populate()
108 d, gfn_x(gfn)); in p2m_mem_paging_populate()
196 set_gpfn_from_mfn(mfn_x(mfn), gfn_x(gfn)); in p2m_mem_paging_resume()
390 d, gfn_x(gfn)); in prepare()
418 set_gpfn_from_mfn(mfn_x(mfn), gfn_x(gfn)); in prepare()
A Dp2m-pod.c698 if ( !superpage_aligned(gfn_x(gfn)) ) in p2m_pod_zero_check_superpage()
823 t.gfn = gfn_x(gfn); in p2m_pod_zero_check_superpage()
989 t.gfn = gfn_x(gfns[i]); in p2m_pod_zero_check()
1026 start = gfn_x(p2m->pod.reclaim_single); in p2m_pod_emergency_sweep()
1036 for ( i = gfn_x(p2m->pod.reclaim_single); i > 0 ; i-- ) in p2m_pod_emergency_sweep()
1088 if ( gfn_x(gfn) & POD_LAST_SUPERPAGE ) in pod_eager_reclaim()
1090 gfn = _gfn(gfn_x(gfn) & ~POD_LAST_SUPERPAGE); in pod_eager_reclaim()
1103 mrp->list[idx] = gfn_x(INVALID_GFN); in pod_eager_reclaim()
1217 t.gfn = gfn_x(gfn); in p2m_pod_demand_populate()
1260 t.gfn = gfn_x(gfn); in p2m_pod_demand_populate()
[all …]
A Dguest_walk.c218 if ( !(gfn_x(start) & 1) ) in guest_walk_tables()
223 start = _gfn((gfn_x(start) & ~GUEST_L3_GFN_MASK) + in guest_walk_tables()
335 if ( !(gfn_x(start) & 1) ) in guest_walk_tables()
340 start = _gfn((gfn_x(start) & ~GUEST_L2_GFN_MASK) + in guest_walk_tables()
344 gw->el1e = (gfn_x(start) << PAGE_SHIFT) | flags; in guest_walk_tables()
A Dmem_sharing.c789 mfn = get_gfn_query(d, gfn_x(gfn), &p2mt); in debug_gfn()
792 d, gfn_x(gfn)); in debug_gfn()
795 put_gfn(d, gfn_x(gfn)); in debug_gfn()
833 mfn = get_gfn_type_access(hp2m, gfn_x(gfn), &p2mt, &p2ma, 0, NULL); in nominate_page()
878 amfn = __get_gfn_type_access(ap2m, gfn_x(gfn), &ap2mt, &ap2ma, in nominate_page()
919 if ( !mem_sharing_gfn_alloc(page, d, gfn_x(gfn)) ) in nominate_page()
928 BUG_ON(p2m_change_type_one(d, gfn_x(gfn), p2mt, p2m_ram_shared)); in nominate_page()
942 put_gfn(d, gfn_x(gfn)); in nominate_page()
1477 unsigned long gfn_l = gfn_x(gfn); in mem_sharing_fork_page()
1595 unsigned long gfn_l = gfn_x(gfn); in copy_vcpu_settings()
A Dmem_access.c236 req->u.mem_access.gfn = gfn_x(gfn); in p2m_mem_access_check()
308 mfn_t mfn = __get_gfn_type_access(p2m, gfn_x(gfn), &t, &_a, in set_mem_access()
393 for ( gfn_l = gfn_x(gfn) + start; nr > start; ++gfn_l ) in p2m_set_mem_access()
/xen/xen/include/xen/
A Dmm.h112 #define gfn_x macro
115 #undef gfn_x
120 return _gfn(gfn_x(gfn) + i); in gfn_add()
125 return _gfn(max(gfn_x(x), gfn_x(y))); in gfn_max()
130 return _gfn(min(gfn_x(x), gfn_x(y))); in gfn_min()
135 return gfn_x(x) == gfn_x(y); in gfn_eq()
/xen/xen/arch/arm/
A Dp2m.c253 ptr = radix_tree_lookup(&p2m->mem_access_settings, gfn_x(gfn)); in p2m_mem_access_radix_get()
377 if ( gfn_x(gfn) > gfn_x(p2m->max_mapped_gfn) ) in p2m_get_entry()
381 gfn_x(p2m->max_mapped_gfn) ) in p2m_get_entry()
695 radix_tree_delete(&p2m->mem_access_settings, gfn_x(gfn)); in p2m_mem_access_radix_set()
699 rc = radix_tree_insert(&p2m->mem_access_settings, gfn_x(gfn), in p2m_mem_access_radix_set()
706 &p2m->mem_access_settings, gfn_x(gfn)), in p2m_mem_access_radix_set()
1058 rc = iommu_iotlb_flush(p2m->domain, _dfn(gfn_x(sgfn)), in __p2m_set_entry()
1100 mask |= gfn_x(sgfn) | nr; in p2m_set_entry()
1191 if ( gfn_x(gfn) > gfn_x(p2m->max_mapped_gfn) ) in p2m_resolve_translation_fault()
1581 for ( ; gfn_x(start) < gfn_x(end); in relinquish_p2m_mapping()
[all …]
A Ddomctl.c62 if ( gfn_x(e) < gfn_x(s) ) in arch_do_domctl()
A Dmem_access.c66 i = radix_tree_lookup(&p2m->mem_access_settings, gfn_x(gfn)); in __p2m_get_mem_access()
420 start += gfn_x(gfn_next_boundary(gfn, order)) - gfn_x(gfn); in p2m_set_mem_access()
/xen/xen/arch/x86/
A Ddebug.c40 mfn = get_gfn(dp, gfn_x(*gfn), &gfntype); in dbg_hvm_va2mfn()
46 put_gfn(dp, gfn_x(*gfn)); in dbg_hvm_va2mfn()
146 put_gfn(dp, gfn_x(gfn)); in dbg_rw_guest_mem()
A Ddomain.c761 if ( !mfn_eq(get_gfn_query(d, gfn_x(gfn), &p2mt), mfn) ) in arch_domain_soft_reset()
765 d, gfn_x(gfn)); in arch_domain_soft_reset()
775 d, gfn_x(gfn)); in arch_domain_soft_reset()
785 d, gfn_x(gfn)); in arch_domain_soft_reset()
796 d, gfn_x(gfn)); in arch_domain_soft_reset()
800 put_gfn(d, gfn_x(gfn)); in arch_domain_soft_reset()
/xen/xen/arch/x86/mm/shadow/
A Dmulti.c2700 d.gfn=gfn_x(gfn); in trace_shadow_emulate_other()
3046 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3092 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3101 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3128 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3242 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3318 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3495 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3509 put_gfn(d, gfn_x(gfn)); in sh_page_fault()
3683 return gfn_x(gfn); in sh_gva_to_gfn()
[all …]
A Dnone.c50 return gfn_x(INVALID_GFN); in _gva_to_gfn()
A Dtypes.h198 #define get_gfn_query(d, g, t) get_gfn_type((d), gfn_x(g), (t), 0)
326 | MASK_INSR(gfn_x(gfn), SH_L1E_MMIO_GFN_MASK) in sh_l1e_mmio()
/xen/xen/arch/x86/hvm/
A Dmtrr.c572 if ( ((gfn_x(gfn) & mask) >= range->start) && in hvm_get_mem_pinned_cacheattr()
573 ((gfn_x(gfn) | ~mask) <= range->end) ) in hvm_get_mem_pinned_cacheattr()
578 if ( ((gfn_x(gfn) & mask) <= range->end) && in hvm_get_mem_pinned_cacheattr()
579 ((gfn_x(gfn) | ~mask) >= range->start) ) in hvm_get_mem_pinned_cacheattr()
A Dioreq.c298 unsigned int i = gfn_x(gfn) - d->arch.hvm.ioreq_gfn.base; in hvm_free_ioreq_gfn()
350 rc = prepare_ring_for_helper(d, gfn_x(iorp->gfn), &iorp->page, in hvm_map_ioreq_gfn()
475 paging_mark_pfn_dirty(d, _pfn(gfn_x(iorp->gfn))); in hvm_add_ioreq_gfn()
922 *ioreq_gfn = gfn_x(s->ioreq.gfn); in hvm_get_ioreq_server_info()
927 *bufioreq_gfn = gfn_x(s->bufioreq.gfn); in hvm_get_ioreq_server_info()
A Dmonitor.c296 req.u.mem_access.gfn = gfn_x(gfn); in hvm_monitor_check_p2m()
/xen/xen/arch/x86/pv/
A Ddescriptor-tables.c201 page = get_page_from_gfn(currd, gfn_x(gfn), NULL, P2M_ALLOC); in do_update_descriptor()
/xen/xen/include/asm-arm/
A Dp2m.h390 gfn = _gfn(gfn_x(gfn) & ~((1UL << order) - 1)); in gfn_next_boundary()
A Dmm.h233 #define gfn_to_gaddr(gfn) pfn_to_paddr(gfn_x(gfn))

Completed in 67 milliseconds

12