Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 72) sorted by relevance

123

/xen/xen/drivers/passthrough/x86/
A Diommu.c152 unsigned long pfn, in hwdom_iommu_map() argument
155 mfn_t mfn = _mfn(pfn); in hwdom_iommu_map()
164 if ( (pfn > max_pfn && !mfn_valid(mfn)) || xen_in_range(pfn) ) in hwdom_iommu_map()
188 if ( pfn >= 0xfee00 && pfn <= 0xfeeff ) in hwdom_iommu_map()
192 if ( pfn == PFN_DOWN(domain_vioapic(d, i)->base_address) ) in hwdom_iommu_map()
199 if ( has_vpci(d) && vpci_is_mmcfg_address(d, pfn_to_paddr(pfn)) ) in hwdom_iommu_map()
237 unsigned long pfn = pdx_to_pfn(i); in arch_iommu_hwdom_init() local
240 if ( !hwdom_iommu_map(d, pfn, max_pfn) ) in arch_iommu_hwdom_init()
243 rc = set_identity_p2m_entry(d, pfn, p2m_access_rw, 0); in arch_iommu_hwdom_init()
245 rc = iommu_map(d, _dfn(pfn), _mfn(pfn), PAGE_ORDER_4K, in arch_iommu_hwdom_init()
[all …]
/xen/stubdom/grub/
A Dkexec.c217 xen_pfn_t pfn; in kexec() local
345 for (pfn = 0; pfn < allocated; pfn++) in kexec()
386 for (nr_m2p_updates = pfn = 0; pfn < start_info.nr_pages; pfn++) in kexec()
387 if (dom->pv_p2m[pfn] != pfn_to_mfn(pfn)) in kexec()
391 for (i = pfn = 0; pfn < start_info.nr_pages; pfn++) in kexec()
392 if (dom->pv_p2m[pfn] != pfn_to_mfn(pfn)) { in kexec()
394 m2p_updates[i].val = pfn; in kexec()
420 for (pfn = 0; pfn < allocated; pfn++) in kexec()
421 do_map_frames(pages[pfn], &pages_mfns[pfn], 1, 0, 0, DOMID_SELF, 0, L1_PROT); in kexec()
424 for (pfn = 0; pfn < allocated; pfn++) in kexec()
[all …]
/xen/tools/libs/foreignmemory/
A Dcompat.c28 xen_pfn_t *pfn; in osdep_xenforeignmemory_map() local
37 pfn = malloc(num * sizeof(*pfn)); in osdep_xenforeignmemory_map()
38 if (!pfn) { in osdep_xenforeignmemory_map()
43 memcpy(pfn, arr, num * sizeof(*arr)); in osdep_xenforeignmemory_map()
44 ret = osdep_map_foreign_batch(fmem, dom, addr, prot, flags, pfn, num); in osdep_xenforeignmemory_map()
48 switch (pfn[i] ^ arr[i]) { in osdep_xenforeignmemory_map()
59 free(pfn); in osdep_xenforeignmemory_map()
A Dlinux.c204 xen_pfn_t *pfn; in osdep_xenforeignmemory_map() local
205 unsigned int pfn_arr_size = ROUNDUP((num * sizeof(*pfn)), PAGE_SHIFT); in osdep_xenforeignmemory_map()
208 pfn = alloca(num * sizeof(*pfn)); in osdep_xenforeignmemory_map()
211 pfn = mmap(NULL, pfn_arr_size, PROT_READ | PROT_WRITE, in osdep_xenforeignmemory_map()
213 if ( pfn == MAP_FAILED ) in osdep_xenforeignmemory_map()
221 memcpy(pfn, arr, num * sizeof(*arr)); in osdep_xenforeignmemory_map()
226 ioctlx.arr = pfn; in osdep_xenforeignmemory_map()
234 switch ( pfn[i] ^ arr[i] ) in osdep_xenforeignmemory_map()
248 rc = map_foreign_batch_single(fd, dom, pfn + i, in osdep_xenforeignmemory_map()
262 munmap(pfn, pfn_arr_size); in osdep_xenforeignmemory_map()
/xen/tools/libxc/
A Dxc_sr_restore_x86_pv.c7 assert(pfn <= ctx->x86.pv.max_pfn); in pfn_to_mfn()
137 xen_pfn_t pfn, mfn; in process_start_info() local
143 if ( pfn > ctx->x86.pv.max_pfn ) in process_start_info()
157 mfn = pfn_to_mfn(ctx, pfn); in process_start_info()
182 mfn = pfn_to_mfn(ctx, pfn); in process_start_info()
203 mfn = pfn_to_mfn(ctx, pfn); in process_start_info()
240 xen_pfn_t pfn, mfn; in process_vcpu_basic() local
285 mfn = pfn_to_mfn(ctx, pfn); in process_vcpu_basic()
315 mfn = pfn_to_mfn(ctx, pfn); in process_vcpu_basic()
540 pfn, i); in update_guest_p2m()
[all …]
A Dxc_mem_access.c37 .pfn = first_pfn, in xc_set_mem_access()
60 .pfn = ~0UL, /* Invalid GFN */ in xc_set_mem_access_multi()
84 uint64_t pfn, in xc_get_mem_access() argument
92 .pfn = pfn in xc_get_mem_access()
A Dxc_dom_x86.c93 xen_pfn_t pfn; member
263 map->area.pfn = 0; in alloc_pgtables_pv()
294 #define pfn_to_paddr(pfn) ((xen_paddr_t)(pfn) << PAGE_SHIFT_X86) argument
398 if ( pfn >= pfn_s && pfn < pfn_e ) in get_pg_prot()
411 xen_pfn_t pfn, p, p_s, p_e; in setup_pgtables_pv() local
443 pfn++; in setup_pgtables_pv()
1257 for ( pfn = 0; pfn < dom->p2m_size; pfn++ ) in meminit_pv()
1277 for ( pfn = pfn_base; pfn < pfn_base+pages; pfn++ ) in meminit_pv()
1278 dom->pv_p2m[pfn] = pfn; in meminit_pv()
1296 pfn = pfn_base_idx; in meminit_pv()
[all …]
A Dxc_sr_common_x86_pv.c22 xen_pfn_t pfn = ~0UL; in dump_bad_pseudophysmap_entry() local
28 pfn = ctx->x86.pv.m2p[mfn]; in dump_bad_pseudophysmap_entry()
30 mfn, pfn, ctx->x86.pv.max_pfn); in dump_bad_pseudophysmap_entry()
33 if ( (pfn != ~0UL) && (pfn <= ctx->x86.pv.max_pfn) ) in dump_bad_pseudophysmap_entry()
35 pfn, xc_pfn_to_mfn(pfn, ctx->x86.pv.p2m, ctx->x86.pv.width)); in dump_bad_pseudophysmap_entry()
A Dxc_dom_core.c431 offset = pfn - dom->rambase_pfn; in xc_dom_pfn_to_ptr_retcount()
451 if ( (pfn < phys->first) || in xc_dom_pfn_to_ptr_retcount()
467 if ( pfn < phys->first ) in xc_dom_pfn_to_ptr_retcount()
481 __FUNCTION__, pfn); in xc_dom_pfn_to_ptr_retcount()
490 phys->first = pfn; in xc_dom_pfn_to_ptr_retcount()
594 seg->pfn = dom->pfn_alloc_end; in xc_dom_alloc_segment()
619 xen_pfn_t pfn; in xc_dom_alloc_page() local
628 __FUNCTION__, name, start, pfn); in xc_dom_alloc_page()
629 return pfn; in xc_dom_alloc_page()
639 if ( (pfn >= phys->first) && (pfn < (phys->first + phys->count)) ) in xc_dom_unmap_one()
[all …]
A Dxc_sr_restore.c79 if ( pfn > ctx->restore.max_populated_pfn ) in pfn_is_populated()
81 return test_bit(pfn, ctx->restore.populated_pfns); in pfn_is_populated()
93 if ( pfn > ctx->restore.max_populated_pfn ) in pfn_set_populated()
100 new_max = pfn; in pfn_set_populated()
126 assert(!test_bit(pfn, ctx->restore.populated_pfns)); in pfn_set_populated()
127 set_bit(pfn, ctx->restore.populated_pfns); in pfn_set_populated()
342 xen_pfn_t *pfns = NULL, pfn; in handle_page_data() local
401 pfn = pages->pfn[i] & PAGE_DATA_PFN_MASK; in handle_page_data()
413 type, pfn, i); in handle_page_data()
422 pfns[i] = pfn; in handle_page_data()
[all …]
A Dxc_dom_arm.c292 xen_pfn_t allocsz, pfn, *extents; in populate_guest_memory() local
307 for ( pfn = 0; pfn < nr_pfns; pfn += allocsz ) in populate_guest_memory()
309 allocsz = min_t(int, 1024*1024, nr_pfns - pfn); in populate_guest_memory()
312 if ( pfn == 0 ) in populate_guest_memory()
316 base_pfn + pfn, &allocsz, extents); in populate_guest_memory()
325 base_pfn + pfn, &allocsz, extents); in populate_guest_memory()
330 base_pfn + pfn, &allocsz, extents); in populate_guest_memory()
335 base_pfn + pfn, &allocsz, extents); in populate_guest_memory()
340 base_pfn + pfn, &allocsz, extents); in populate_guest_memory()
A Dxc_core_x86.c28 unsigned long pfn) in xc_core_arch_gpfn_may_present() argument
30 if ((pfn >= 0xa0 && pfn < 0xc0) /* VGA hole */ in xc_core_arch_gpfn_may_present()
31 || (pfn >= (HVM_BELOW_4G_MMIO_START >> PAGE_SHIFT) in xc_core_arch_gpfn_may_present()
32 && pfn < (1ULL<<32) >> PAGE_SHIFT)) /* MMIO */ in xc_core_arch_gpfn_may_present()
A Dxc_sr_common.h31 xen_pfn_t (*pfn_to_gfn)(const struct xc_sr_context *ctx, xen_pfn_t pfn);
117 xen_pfn_t (*pfn_to_gfn)(const struct xc_sr_context *ctx, xen_pfn_t pfn);
120 bool (*pfn_is_valid)(const struct xc_sr_context *ctx, xen_pfn_t pfn);
123 void (*set_gfn)(struct xc_sr_context *ctx, xen_pfn_t pfn, xen_pfn_t gfn);
126 void (*set_page_type)(struct xc_sr_context *ctx, xen_pfn_t pfn,
A Dxc_vm_event.c46 uint64_t pfn; in xc_vm_event_enable() local
66 rc1 = xc_hvm_param_get(xch, domain_id, param, &pfn); in xc_vm_event_enable()
73 ring_pfn = pfn; in xc_vm_event_enable()
74 mmap_pfn = pfn; in xc_vm_event_enable()
/xen/xen/common/
A Dmem_access.c63 if ( (mao.pfn != ~0ull) && in mem_access_memop()
65 ((mao.pfn + mao.nr - 1) < mao.pfn) || in mem_access_memop()
66 ((mao.pfn + mao.nr - 1) > domain_get_maximum_gpfn(d))) ) in mem_access_memop()
69 rc = p2m_set_mem_access(d, _gfn(mao.pfn), mao.nr, start_iter, in mem_access_memop()
99 if ( (mao.pfn > domain_get_maximum_gpfn(d)) && mao.pfn != ~0ull ) in mem_access_memop()
102 rc = p2m_get_mem_access(d, _gfn(mao.pfn), &access, 0); in mem_access_memop()
/xen/tools/misc/
A Dxen-mfndump.c157 xen_pfn_t pfn, mfn, *m2p_table; in dump_ptes_func() local
192 pfn = m2p_table[mfn]; in dump_ptes_func()
193 if ( pfn >= minfo.p2m_size ) in dump_ptes_func()
200 if ( !(minfo.pfn_type[pfn] & XEN_DOMCTL_PFINFO_LTABTYPE_MASK) ) in dump_ptes_func()
202 ERROR("pfn 0x%lx for domain %d is not a PT\n", pfn, domid); in dump_ptes_func()
208 minfo.p2m_table[pfn]); in dump_ptes_func()
211 ERROR("Failed to map 0x%lx\n", minfo.p2m_table[pfn]); in dump_ptes_func()
222 pfn, minfo.p2m_table[pfn]); in dump_ptes_func()
238 if ( minfo.pfn_type[pfn] & XEN_DOMCTL_PFINFO_LPINTAB ) in dump_ptes_func()
240 if ( is_mapped(minfo.p2m_table[pfn]) ) in dump_ptes_func()
[all …]
/xen/xen/arch/x86/pv/
A Ddom0_build.c42 set_gpfn_from_mfn(mfn, pfn); in dom0_update_physmap()
293 unsigned long pfn, mfn; in dom0_construct_pv() local
620 pfn = alloc_spfn; in dom0_construct_pv()
659 mfn = pfn++; in dom0_construct_pv()
776 for ( pfn = 0; pfn < count; pfn++ ) in dom0_construct_pv()
788 if ( pfn > REVERSE_START && (vinitrd_start || pfn < initrd_pfn) ) in dom0_construct_pv()
795 si->first_p2m_pfn = pfn; in dom0_construct_pv()
809 ++pfn; in dom0_construct_pv()
826 #define pfn (nr_pages - 1 - (pfn - (alloc_epfn - alloc_spfn))) in dom0_construct_pv() macro
829 #undef pfn in dom0_construct_pv()
[all …]
/xen/xen/include/asm-x86/
A Dpage.h115 #define l1e_from_pfn(pfn, flags) \ argument
117 #define l2e_from_pfn(pfn, flags) \ argument
119 #define l3e_from_pfn(pfn, flags) \ argument
121 #define l4e_from_pfn(pfn, flags) \ argument
236 typedef struct { u64 pfn; } pagetable_t; member
239 #define pagetable_get_pfn(x) ((x).pfn)
240 #define pagetable_get_mfn(x) _mfn(((x).pfn))
241 #define pagetable_is_null(x) ((x).pfn == 0)
242 #define pagetable_from_pfn(pfn) ((pagetable_t) { (pfn) }) argument
271 #define __pfn_to_paddr(pfn) ((paddr_t)(pfn) << PAGE_SHIFT) argument
[all …]
A Dpaging.h172 void paging_mark_pfn_dirty(struct domain *d, pfn_t pfn);
188 #define L1_LOGDIRTY_IDX(pfn) (pfn_x(pfn) & ((1 << (PAGE_SHIFT + 3)) - 1)) argument
189 #define L2_LOGDIRTY_IDX(pfn) ((pfn_x(pfn) >> (PAGE_SHIFT + 3)) & \ argument
191 #define L3_LOGDIRTY_IDX(pfn) ((pfn_x(pfn) >> (PAGE_SHIFT + 3 + PAGETABLE_ORDER)) & \ argument
193 #define L4_LOGDIRTY_IDX(pfn) ((pfn_x(pfn) >> (PAGE_SHIFT + 3 + PAGETABLE_ORDER * 2)) & \ argument
/xen/xen/arch/x86/boot/
A Dx86_64.S87 pfn = 0 define
90 .if pfn >= 0xa0 && pfn < 0xc0
91 .quad (pfn << PAGE_SHIFT) | __PAGE_HYPERVISOR_UCMINUS | _PAGE_GLOBAL | MAP_SMALL_PAGES
93 .quad (pfn << PAGE_SHIFT) | PAGE_HYPERVISOR_RWX | MAP_SMALL_PAGES
95 pfn = pfn + 1 define
/xen/xen/arch/x86/mm/
A Dpaging.c281 i1 = L1_LOGDIRTY_IDX(pfn); in paging_mark_pfn_dirty()
282 i2 = L2_LOGDIRTY_IDX(pfn); in paging_mark_pfn_dirty()
283 i3 = L3_LOGDIRTY_IDX(pfn); in paging_mark_pfn_dirty()
284 i4 = L4_LOGDIRTY_IDX(pfn); in paging_mark_pfn_dirty()
340 pfn_t pfn; in paging_mark_dirty() local
349 paging_mark_pfn_dirty(d, pfn); in paging_mark_dirty()
356 pfn_t pfn; in paging_mfn_is_dirty() local
375 mfn = l4[L4_LOGDIRTY_IDX(pfn)]; in paging_mfn_is_dirty()
381 mfn = l3[L3_LOGDIRTY_IDX(pfn)]; in paging_mfn_is_dirty()
596 unsigned long pfn; in paging_log_dirty_range() local
[all …]
/xen/xen/include/xen/
A Dpdx.h26 static inline unsigned long pfn_to_pdx(unsigned long pfn) in pfn_to_pdx() argument
28 return (pfn & pfn_pdx_bottom_mask) | in pfn_to_pdx()
29 ((pfn & pfn_top_mask) >> pfn_pdx_hole_shift); in pfn_to_pdx()
/xen/xen/arch/x86/x86_64/
A Dtraps.c187 unsigned long pfn, mfn = read_cr3() >> PAGE_SHIFT; in show_page_walk() local
201 pfn = mfn_valid(_mfn(mfn)) && machine_to_phys_mapping_valid ? in show_page_walk()
204 l4_table_offset(addr), l4e_get_intpte(l4e), pfn); in show_page_walk()
213 pfn = mfn_valid(_mfn(mfn)) && machine_to_phys_mapping_valid ? in show_page_walk()
216 l3_table_offset(addr), l3e_get_intpte(l3e), pfn, in show_page_walk()
227 pfn = mfn_valid(_mfn(mfn)) && machine_to_phys_mapping_valid ? in show_page_walk()
230 l2_table_offset(addr), l2e_get_intpte(l2e), pfn, in show_page_walk()
241 pfn = mfn_valid(_mfn(mfn)) && machine_to_phys_mapping_valid ? in show_page_walk()
244 l1_table_offset(addr), l1e_get_intpte(l1e), pfn); in show_page_walk()
/xen/xen/arch/x86/hvm/
A Ddm.c152 xen_pfn_t pfn, end_pfn; in modified_memory() local
170 pfn = extent.first_pfn + *pfns_done; in modified_memory()
177 end_pfn = pfn + batch_nr; in modified_memory()
187 for ( ; pfn < end_pfn; pfn++ ) in modified_memory()
191 page = get_page_from_gfn(d, pfn, NULL, P2M_UNSHARE); in modified_memory()
194 paging_mark_pfn_dirty(d, _pfn(pfn)); in modified_memory()
274 unsigned long pfn = data->first_pfn + iter; in set_mem_type() local
277 get_gfn_unshare(d, pfn, &t); in set_mem_type()
280 put_gfn(d, pfn); in set_mem_type()
281 p2m_mem_paging_populate(d, _gfn(pfn)); in set_mem_type()
[all …]
/xen/tools/libxc/include/
A Dxc_dom.h37 xen_pfn_t pfn; member
332 void *xc_dom_boot_domU_map(struct xc_dom_image *dom, xen_pfn_t pfn,
386 void xc_dom_unmap_one(struct xc_dom_image *dom, xen_pfn_t pfn);
395 retval = xc_dom_pfn_to_ptr(dom, seg->pfn, seg->pages); in xc_dom_seg_to_ptr_pages()
427 static inline xen_pfn_t xc_dom_p2m(struct xc_dom_image *dom, xen_pfn_t pfn) in xc_dom_p2m() argument
430 return pfn; in xc_dom_p2m()
433 if ( pfn >= dom->total_pages ) in xc_dom_p2m()
436 return dom->pv_p2m[pfn]; in xc_dom_p2m()

Completed in 46 milliseconds

123