Searched refs:end_pfn (Results 1 – 13 of 13) sorted by relevance
/xen/xen/arch/x86/ |
A D | numa.c | 170 unsigned long start_pfn, end_pfn; in setup_node_bootmem() local 173 end_pfn = end >> PAGE_SHIFT; in setup_node_bootmem() 176 NODE_DATA(nodeid)->node_spanned_pages = end_pfn - start_pfn; in setup_node_bootmem() 204 static int __init numa_emulation(u64 start_pfn, u64 end_pfn) in numa_emulation() argument 208 u64 sz = ((end_pfn - start_pfn)<<PAGE_SHIFT) / numa_fake; in numa_emulation() 226 sz = (end_pfn<<PAGE_SHIFT) - nodes[i].start; in numa_emulation() 249 void __init numa_initmem_init(unsigned long start_pfn, unsigned long end_pfn) in numa_initmem_init() argument 254 if ( numa_fake && !numa_emulation(start_pfn, end_pfn) ) in numa_initmem_init() 260 (u64)end_pfn << PAGE_SHIFT) ) in numa_initmem_init() 269 (u64)end_pfn << PAGE_SHIFT); in numa_initmem_init() [all …]
|
/xen/xen/arch/x86/hvm/ |
A D | dm.c | 152 xen_pfn_t pfn, end_pfn; in modified_memory() local 161 end_pfn = extent.first_pfn + extent.nr; in modified_memory() 163 if ( end_pfn <= extent.first_pfn || in modified_memory() 164 end_pfn > domain_get_maximum_gpfn(d) ) in modified_memory() 177 end_pfn = pfn + batch_nr; in modified_memory() 187 for ( ; pfn < end_pfn; pfn++ ) in modified_memory()
|
/xen/xen/include/asm-x86/ |
A D | setup.h | 20 void numa_initmem_init(unsigned long start_pfn, unsigned long end_pfn);
|
A D | paging.h | 199 unsigned long end_pfn; member
|
/xen/tools/libxc/ |
A D | xc_dom_arm.c | 251 xen_pfn_t end_pfn = base_pfn + *nr_pfns; in populate_one_size() local 265 if ( (base_pfn & next_mask) && end_pfn > next_boundary ) in populate_one_size() 266 end_pfn = next_boundary; in populate_one_size() 268 count = ( end_pfn - base_pfn ) >> pfn_shift; in populate_one_size()
|
A D | xc_sr_stream_format.h | 105 uint32_t end_pfn; member
|
A D | xc_sr_restore_x86_pv.c | 712 if ( data->start_pfn > data->end_pfn ) in handle_x86_pv_p2m_frames() 715 data->end_pfn, data->start_pfn); in handle_x86_pv_p2m_frames() 720 end = data->end_pfn / fpp + 1; in handle_x86_pv_p2m_frames() 726 data->start_pfn, data->end_pfn, rec->length, in handle_x86_pv_p2m_frames() 731 if ( data->end_pfn > ctx->x86.pv.max_pfn ) in handle_x86_pv_p2m_frames() 733 rc = expand_p2m(ctx, data->end_pfn); in handle_x86_pv_p2m_frames()
|
A D | xc_sr_save_x86_pv.c | 820 .end_pfn = ctx->x86.pv.max_pfn, in write_x86_pv_p2m_frames()
|
/xen/xen/arch/x86/mm/hap/ |
A D | hap.c | 100 begin_pfn + nr != dirty_vram->end_pfn ) in hap_track_dirty_vram() 103 unsigned long oend = dirty_vram->end_pfn; in hap_track_dirty_vram() 106 dirty_vram->end_pfn = begin_pfn + nr; in hap_track_dirty_vram() 156 nr = dirty_vram->end_pfn - dirty_vram->begin_pfn; in hap_track_dirty_vram()
|
/xen/xen/drivers/passthrough/vtd/ |
A D | dmar.c | 841 unsigned long base_pfn, end_pfn; member 851 #define ERMRRU_ARG(eru) eru.base_pfn, eru.end_pfn 863 end = user_rmrrs[i].end_pfn; in add_user_rmrr() 946 rmrr->end_address = pfn_to_paddr(user_rmrrs[i].end_pfn) | ~PAGE_MASK; in add_user_rmrr() 1098 user_rmrrs[nr_rmrr].end_pfn = end; in parse_rmrr_param()
|
A D | iommu.c | 1920 unsigned long end_pfn = PAGE_ALIGN_4K(rmrr->end_address) >> PAGE_SHIFT_4K; in rmrr_identity_mapping() local 1947 while ( base_pfn < end_pfn ) in rmrr_identity_mapping() 1963 while ( base_pfn < end_pfn ) in rmrr_identity_mapping()
|
/xen/xen/arch/x86/mm/shadow/ |
A D | common.c | 3173 unsigned long end_pfn = begin_pfn + nr; in shadow_track_dirty_vram() local 3182 if ( end_pfn < begin_pfn || end_pfn > p2m->max_mapped_pfn + 1 ) in shadow_track_dirty_vram() 3193 || end_pfn != dirty_vram->end_pfn )) ) in shadow_track_dirty_vram() 3196 …ntk(XENLOG_INFO, "stopping tracking VRAM %lx - %lx\n", dirty_vram->begin_pfn, dirty_vram->end_pfn); in shadow_track_dirty_vram() 3220 gdprintk(XENLOG_INFO, "tracking VRAM %lx - %lx\n", begin_pfn, end_pfn); in shadow_track_dirty_vram() 3226 dirty_vram->end_pfn = end_pfn; in shadow_track_dirty_vram() 3327 for ( i = begin_pfn; i < end_pfn; i++ ) in shadow_track_dirty_vram()
|
A D | multi.c | 655 && gfn_x(target_gfn) < dirty_vram->end_pfn) ) in _sh_propagate() 1094 if ( (gfn >= dirty_vram->begin_pfn) && (gfn < dirty_vram->end_pfn) ) in shadow_vram_get_l1e() 1125 if ( (gfn >= dirty_vram->begin_pfn) && (gfn < dirty_vram->end_pfn) ) in shadow_vram_put_l1e()
|
Completed in 40 milliseconds