Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 2895) sorted by relevance

12345678910>>...116

/linux/arch/x86/xen/
A Dp2m.c242 for (pfn = 0; pfn < xen_max_p2m_pfn && pfn < MAX_P2M_PFN; in xen_build_mfn_list_list()
297 unsigned long pfn; in xen_build_dynamic_phys_to_machine() local
302 for (pfn = xen_start_info->nr_pages; pfn < xen_p2m_size; pfn++) in xen_build_dynamic_phys_to_machine()
334 unsigned long pfn; in xen_rebuild_p2m_list() local
356 for (pfn = 0; pfn < xen_max_p2m_pfn; pfn += chunk) { in xen_rebuild_p2m_list()
633 unsigned long pfn; in set_phys_range_identity() local
644 for (pfn = pfn_s; pfn < pfn_e; pfn++) in set_phys_range_identity()
645 xen_p2m_addr[pfn] = IDENTITY_FRAME(pfn); in set_phys_range_identity()
647 return pfn - pfn_s; in set_phys_range_identity()
818 for (pfn = 0; pfn < xen_p2m_size; pfn++) { in p2m_dump_show()
[all …]
A Dsetup.c174 return IDENTITY_FRAME(pfn); in xen_chk_extra_mem()
190 for (pfn = pfn_s; pfn < pfn_e; pfn++) in xen_inv_extra_mem()
257 unsigned long pfn, end; in xen_set_identity_and_release_chunk() local
264 for (pfn = start_pfn; pfn < end; pfn++) { in xen_set_identity_and_release_chunk()
292 .val = pfn in xen_update_mem_tables()
298 pfn, mfn); in xen_update_mem_tables()
305 mfn, pfn); in xen_update_mem_tables()
312 mfn, pfn); in xen_update_mem_tables()
387 unsigned long pfn; in xen_set_identity_and_remap_chunk() local
431 for (pfn = start_pfn; pfn <= max_pfn_mapped && pfn < end_pfn; pfn++) in xen_set_identity_and_remap_chunk()
[all …]
/linux/include/linux/
A Dpfn_t.h39 return __pfn_to_pfn_t(pfn, 0); in pfn_to_pfn_t()
49 return (pfn.val & PFN_MAP) == PFN_MAP || (pfn.val & PFN_DEV) == 0; in pfn_t_has_page()
54 return pfn.val & ~PFN_FLAGS_MASK; in pfn_t_to_pfn()
59 if (pfn_t_has_page(pfn)) in pfn_t_to_page()
60 return pfn_to_page(pfn_t_to_pfn(pfn)); in pfn_t_to_page()
66 return PFN_PHYS(pfn_t_to_pfn(pfn)); in pfn_t_to_phys()
74 static inline int pfn_t_valid(pfn_t pfn) in pfn_t_valid() argument
76 return pfn_valid(pfn_t_to_pfn(pfn)); in pfn_t_valid()
101 static inline bool pfn_t_devmap(pfn_t pfn) in pfn_t_devmap() argument
105 return (pfn.val & flags) == flags; in pfn_t_devmap()
[all …]
A Dmmzone.h708 return zone->zone_start_pfn <= pfn && pfn < zone_end_pfn(zone); in zone_spans_pfn()
1266 #define pfn_to_nid(pfn) (0) argument
1292 return pfn >> PFN_SECTION_SHIFT; in pfn_to_section_nr()
1299 #define SECTION_ALIGN_UP(pfn) (((pfn) + PAGES_PER_SECTION - 1) & PAGE_SECTION_MASK) argument
1300 #define SECTION_ALIGN_DOWN(pfn) ((pfn) & PAGE_SECTION_MASK) argument
1315 #define SUBSECTION_ALIGN_UP(pfn) ALIGN((pfn), PAGES_PER_SUBSECTION) argument
1316 #define SUBSECTION_ALIGN_DOWN(pfn) ((pfn) & PAGE_SUBSECTION_MASK) argument
1517 if (PHYS_PFN(PFN_PHYS(pfn)) != pfn) in pfn_valid()
1522 ms = __pfn_to_section(pfn); in pfn_valid()
1556 #define pfn_to_nid(pfn) \ argument
[all …]
/linux/arch/x86/include/asm/xen/
A Dpage.h160 return pfn; in __pfn_to_mfn()
174 return pfn; in pfn_to_mfn()
194 unsigned long pfn; in mfn_to_pfn_no_overrides() local
209 return pfn; in mfn_to_pfn_no_overrides()
214 unsigned long pfn; in mfn_to_pfn() local
226 pfn = ~0; in mfn_to_pfn()
233 pfn = mfn; in mfn_to_pfn()
235 return pfn; in mfn_to_pfn()
254 return pfn; in pfn_to_gfn()
268 #define pfn_to_bfn(pfn) pfn_to_gfn(pfn) argument
[all …]
/linux/mm/
A Dpage_isolation.c190 unsigned long pfn; in start_isolate_page_range() local
196 for (pfn = start_pfn; in start_isolate_page_range()
197 pfn < end_pfn; in start_isolate_page_range()
214 unsigned long pfn; in undo_isolate_page_range() local
220 for (pfn = start_pfn; in undo_isolate_page_range()
221 pfn < end_pfn; in undo_isolate_page_range()
242 while (pfn < end_pfn) { in __test_page_isolated_in_pageblock()
253 pfn++; in __test_page_isolated_in_pageblock()
261 pfn++; in __test_page_isolated_in_pageblock()
266 return pfn; in __test_page_isolated_in_pageblock()
[all …]
A Dpage_ext.c267 pfn &= PAGE_SECTION_MASK; in init_section_page_ext()
294 ms = __pfn_to_section(pfn); in __free_page_ext()
322 for (pfn = start; !fail && pfn < end; pfn += PAGES_PER_SECTION) in online_page_ext()
328 for (pfn = start; pfn < end; pfn += PAGES_PER_SECTION) in online_page_ext()
329 __free_page_ext(pfn); in online_page_ext()
342 for (pfn = start; pfn < end; pfn += PAGES_PER_SECTION) in offline_page_ext()
343 __free_page_ext(pfn); in offline_page_ext()
379 unsigned long pfn; in page_ext_init() local
395 for (pfn = start_pfn; pfn < end_pfn; in page_ext_init()
396 pfn = ALIGN(pfn + 1, PAGES_PER_SECTION)) { in page_ext_init()
[all …]
A Dmemory_hotplug.c244 reason, pfn, pfn + nr_pages - 1); in check_pfn_span()
333 for (; pfn < end_pfn; pfn += cur_nr_pages) { in __add_pages()
336 SECTION_ALIGN_UP(pfn + 1) - pfn); in __add_pages()
469 for (pfn = start_pfn; pfn < end_pfn; pfn += cur_nr_pages) { in remove_pfn_range_from_zone()
474 min(end_pfn - pfn, SECTION_ALIGN_UP(pfn + 1) - pfn); in remove_pfn_range_from_zone()
530 for (; pfn < end_pfn; pfn += cur_nr_pages) { in __remove_pages()
534 SECTION_ALIGN_UP(pfn + 1) - pfn); in __remove_pages()
605 for (pfn = start_pfn; pfn < end_pfn;) { in online_pages_range()
920 pfn = ALIGN_DOWN(pfn, group->d.unit_pages); in auto_movable_zone_for_pfn()
1640 for (pfn = start; pfn < end; pfn++) { in scan_movable_pages()
[all …]
A Dsparse.c207 - (pfn & ~PAGE_SECTION_MASK)); in subsection_map_init()
215 pfn += pfns; in subsection_map_init()
228 unsigned long pfn; in memory_present() local
245 for (pfn = start; pfn < end; pfn += PAGES_PER_SECTION) { in memory_present()
594 unsigned long pfn; in online_mem_sections() local
596 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in online_mem_sections()
612 unsigned long pfn; in offline_mem_sections() local
614 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in offline_mem_sections()
667 pfn, nr_pages)) in clear_subsection_map()
712 kvfree(pfn_to_page(pfn)); in depopulate_section_memmap()
[all …]
A Dmemory-failure.c596 if (!pfn || pfn != poisoned_pfn) in check_hwpoisoned_entry()
614 if (pfn <= hwp->pfn && hwp->pfn < pfn + HPAGE_PMD_NR) { in check_hwpoisoned_pmd_entry()
700 .pfn = pfn, in kill_accessing_process()
787 pfn, err); in truncate_error_page()
791 pfn); in truncate_error_page()
804 pfn); in truncate_error_page()
1329 pfn); in hwpoison_user_mappings()
1348 pfn); in hwpoison_user_mappings()
1658 pfn); in memory_failure()
1672 pfn); in memory_failure()
[all …]
A Dpage_owner.c269 pfn = ALIGN(pfn + 1, MAX_ORDER_NR_PAGES); in pagetypeinfo_showmixedcount_print()
278 for (; pfn < block_end_pfn; pfn++) { in pagetypeinfo_showmixedcount_print()
312 pfn = block_end_pfn; in pagetypeinfo_showmixedcount_print()
353 pfn, in print_page_owner()
442 unsigned long pfn; in read_page_owner() local
455 while (!pfn_valid(pfn) && (pfn & (MAX_ORDER_NR_PAGES - 1)) != 0) in read_page_owner()
456 pfn++; in read_page_owner()
461 for (; pfn < max_pfn; pfn++) { in read_page_owner()
466 if ((pfn & (MAX_ORDER_NR_PAGES - 1)) == 0 && !pfn_valid(pfn)) { in read_page_owner()
540 pfn = ALIGN(pfn + 1, MAX_ORDER_NR_PAGES); in init_pages_in_zone()
[all …]
A Dcma.c111 for (pfn = base_pfn + 1; pfn < base_pfn + cma->count; pfn++) { in cma_activate_area()
117 for (pfn = base_pfn; pfn < base_pfn + cma->count; in cma_activate_area()
134 for (pfn = base_pfn; pfn < base_pfn + cma->count; pfn++) in cma_activate_area()
430 unsigned long pfn = -1; in cma_alloc() local
474 ret = alloc_contig_range(pfn, pfn + count, MIGRATE_CMA, in cma_alloc()
489 trace_cma_alloc_busy_retry(cma->name, pfn, pfn_to_page(pfn), in cma_alloc()
530 unsigned long pfn; in cma_pages_valid() local
535 pfn = page_to_pfn(pages); in cma_pages_valid()
537 if (pfn < cma->base_pfn || pfn >= cma->base_pfn + cma->count) { in cma_pages_valid()
559 unsigned long pfn; in cma_release() local
[all …]
A Dpage_idle.c120 unsigned long pfn, end_pfn; in page_idle_bitmap_read() local
126 pfn = pos * BITS_PER_BYTE; in page_idle_bitmap_read()
127 if (pfn >= max_pfn) in page_idle_bitmap_read()
134 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_read()
135 bit = pfn % BITMAP_CHUNK_BITS; in page_idle_bitmap_read()
138 page = page_idle_get_page(pfn); in page_idle_bitmap_read()
165 unsigned long pfn, end_pfn; in page_idle_bitmap_write() local
171 pfn = pos * BITS_PER_BYTE; in page_idle_bitmap_write()
172 if (pfn >= max_pfn) in page_idle_bitmap_write()
179 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_write()
[all …]
/linux/tools/testing/scatterlist/
A Dmain.c11 unsigned *pfn; member
28 #define pfn(...) (unsigned []){ __VA_ARGS__ } macro
42 printf(" %x", test->pfn[i]); in fail()
56 { -EINVAL, 1, pfn(0), NULL, PAGE_SIZE, 0, 1 }, in main()
58 { 0, 1, pfn(0), NULL, PAGE_SIZE, sgmax, 1 }, in main()
59 { 0, 1, pfn(0), NULL, 1, sgmax, 1 }, in main()
60 { 0, 2, pfn(0, 1), NULL, 2 * PAGE_SIZE, sgmax, 1 }, in main()
64 { 0, 3, pfn(0, 1, 2), pfn(3, 4, 5), 3 * PAGE_SIZE, sgmax, 1 }, in main()
65 { 0, 3, pfn(0, 1, 2), pfn(4, 5, 6), 3 * PAGE_SIZE, sgmax, 2 }, in main()
80 { 0, 6, pfn(0, 1, 3, 4, 5, 6), pfn(7, 8, 9, 10, 11, 12), in main()
[all …]
/linux/arch/arm/xen/
A Dp2m.c44 if (new->pfn == entry->pfn) in xen_add_phys_to_mach_entry()
47 if (new->pfn < entry->pfn) in xen_add_phys_to_mach_entry()
59 __func__, &new->pfn, &new->mfn, &entry->pfn, &entry->mfn); in xen_add_phys_to_mach_entry()
73 if (entry->pfn <= pfn && in __pfn_to_mfn()
74 entry->pfn + entry->nr_pages > pfn) { in __pfn_to_mfn()
75 unsigned long mfn = entry->mfn + (pfn - entry->pfn); in __pfn_to_mfn()
79 if (pfn < entry->pfn) in __pfn_to_mfn()
161 if (p2m_entry->pfn <= pfn && in __set_phys_to_machine_multi()
162 p2m_entry->pfn + p2m_entry->nr_pages > pfn) { in __set_phys_to_machine_multi()
168 if (pfn < p2m_entry->pfn) in __set_phys_to_machine_multi()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Dscatterlist.c50 pfn = pt->start; in expect_pfn_sg()
70 pfn += npages; in expect_pfn_sg()
86 unsigned long pfn; in expect_pfn_sg_page_iter() local
88 pfn = pt->start; in expect_pfn_sg_page_iter()
101 pfn++; in expect_pfn_sg_page_iter()
118 unsigned long pfn; in expect_pfn_sgtiter() local
120 pfn = pt->start; in expect_pfn_sgtiter()
131 pfn++; in expect_pfn_sgtiter()
235 pfn = pt->start; in alloc_table()
256 pfn += npages; in alloc_table()
[all …]
/linux/include/trace/events/
A Dcma.h13 TP_PROTO(const char *name, unsigned long pfn, const struct page *page,
16 TP_ARGS(name, pfn, page, count, align),
20 __field(unsigned long, pfn)
28 __entry->pfn = pfn;
36 __entry->pfn,
47 TP_ARGS(name, pfn, page, count),
51 __field(unsigned long, pfn)
58 __entry->pfn = pfn;
65 __entry->pfn,
99 TP_ARGS(name, pfn, page, count, align)
[all …]
A Dkmem.h177 pfn_to_page(__entry->pfn),
178 __entry->pfn,
197 pfn_to_page(__entry->pfn),
198 __entry->pfn)
223 __entry->pfn != -1UL ? pfn_to_page(__entry->pfn) : NULL,
224 __entry->pfn != -1UL ? __entry->pfn : 0,
249 __entry->pfn != -1UL ? pfn_to_page(__entry->pfn) : NULL,
250 __entry->pfn != -1UL ? __entry->pfn : 0,
282 pfn_to_page(__entry->pfn), __entry->pfn,
316 pfn_to_page(__entry->pfn),
[all …]
/linux/arch/arm/mach-omap2/
A Dio.c71 .pfn = __phys_to_pfn(L3_24XX_PHYS),
77 .pfn = __phys_to_pfn(L4_24XX_PHYS),
111 .pfn = __phys_to_pfn(L4_WK_243X_PHYS),
141 .pfn = __phys_to_pfn(L3_34XX_PHYS),
147 .pfn = __phys_to_pfn(L4_34XX_PHYS),
188 .pfn = __phys_to_pfn(L4_34XX_PHYS),
199 .pfn = __phys_to_pfn(L4_34XX_PHYS),
216 .pfn = __phys_to_pfn(L3_44XX_PHYS),
222 .pfn = __phys_to_pfn(L4_44XX_PHYS),
239 .pfn = __phys_to_pfn(L3_54XX_PHYS),
[all …]
/linux/kernel/power/
A Dsnapshot.c749 if (pfn >= zone->start_pfn && pfn < zone->end_pfn) in memory_bm_find_bit()
756 if (pfn >= curr->start_pfn && pfn < curr->end_pfn) { in memory_bm_find_bit()
928 return pfn; in memory_bm_next_pfn()
1091 for (pfn = region->start_pfn; pfn < region->end_pfn; pfn++) in mark_nosave_pages()
1192 unsigned long pfn; in clear_or_poison_free_pages() local
1301 for (pfn = zone->zone_start_pfn; pfn < max_zone_pfn; pfn++) in count_highmem_pages()
1368 for (pfn = zone->zone_start_pfn; pfn < max_zone_pfn; pfn++) in count_data_pages()
1410 saveable_highmem_page(zone, pfn) : saveable_page(zone, pfn); in page_is_saveable()
1442 #define page_is_saveable(zone, pfn) saveable_page(zone, pfn) argument
1455 unsigned long pfn; in copy_data_pages() local
[all …]
/linux/include/xen/arm/
A Dpage.h15 #define phys_to_machine_mapping_valid(pfn) (1) argument
43 unsigned long __pfn_to_mfn(unsigned long pfn);
47 static inline unsigned long pfn_to_gfn(unsigned long pfn) in pfn_to_gfn() argument
49 return pfn; in pfn_to_gfn()
58 static inline unsigned long pfn_to_bfn(unsigned long pfn) in pfn_to_bfn() argument
63 mfn = __pfn_to_mfn(pfn); in pfn_to_bfn()
68 return pfn; in pfn_to_bfn()
103 bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
104 bool __set_phys_to_machine_multi(unsigned long pfn, unsigned long mfn,
107 static inline bool set_phys_to_machine(unsigned long pfn, unsigned long mfn) in set_phys_to_machine() argument
[all …]
/linux/arch/x86/mm/
A Dinit_32.c264 unsigned long pfn; in kernel_physical_mapping_init() local
295 pfn = start_pfn; in kernel_physical_mapping_init()
301 if (pfn >= end_pfn) in kernel_physical_mapping_init()
328 pfn &= PMD_MASK >> PAGE_SHIFT; in kernel_physical_mapping_init()
340 set_pmd(pmd, pfn_pmd(pfn, prot)); in kernel_physical_mapping_init()
342 pfn += PTRS_PER_PTE; in kernel_physical_mapping_init()
414 for ( ; pfn < e_pfn; pfn++) in add_highpages_with_active_regions()
415 if (pfn_valid(pfn)) in add_highpages_with_active_regions()
442 unsigned long pfn, va; in native_pagetable_init() local
458 for (pfn = max_low_pfn; pfn < 1<<(32-PAGE_SHIFT); pfn++) { in native_pagetable_init()
[all …]
/linux/arch/arm/mm/
A Dflush.c43 set_top_pte(to, pfn_pte(pfn, PAGE_KERNEL)); in flush_pfn_alias()
58 set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); in flush_icache_alias()
106 flush_pfn_alias(pfn, user_addr); in flush_cache_page()
271 unsigned long pfn; in __sync_icache_dcache() local
278 pfn = pte_pfn(pteval); in __sync_icache_dcache()
279 if (!pfn_valid(pfn)) in __sync_icache_dcache()
282 page = pfn_to_page(pfn); in __sync_icache_dcache()
359 unsigned long pfn; in __flush_anon_page() local
368 pfn = page_to_pfn(page); in __flush_anon_page()
370 flush_cache_page(vma, vmaddr, pfn); in __flush_anon_page()
[all …]
A Dioremap.c197 pmd[0] = __pmd(__pfn_to_phys(pfn) | type->prot_sect); in remap_area_sections()
198 pfn += SZ_1M >> PAGE_SHIFT; in remap_area_sections()
199 pmd[1] = __pmd(__pfn_to_phys(pfn) | type->prot_sect); in remap_area_sections()
200 pfn += SZ_1M >> PAGE_SHIFT; in remap_area_sections()
238 pfn += SUPERSECTION_SIZE >> PAGE_SHIFT; in remap_area_supersections()
252 phys_addr_t paddr = __pfn_to_phys(pfn); in __arm_ioremap_pfn_caller()
258 if (pfn >= 0x100000 && (paddr & ~SUPERSECTION_MASK)) in __arm_ioremap_pfn_caller()
289 if (WARN_ON(memblock_is_map_memory(PFN_PHYS(pfn)) && in __arm_ioremap_pfn_caller()
302 cpu_is_xsc3()) && pfn >= 0x100000 && in __arm_ioremap_pfn_caller()
308 err = remap_area_sections(addr, pfn, size, type); in __arm_ioremap_pfn_caller()
[all …]
/linux/arch/powerpc/platforms/powernv/
A Dmemtrace.c94 unsigned long pfn; in memtrace_clear_range() local
97 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) { in memtrace_clear_range()
98 if (IS_ALIGNED(pfn, PAGES_PER_SECTION)) in memtrace_clear_range()
100 clear_page(__va(PFN_PHYS(pfn))); in memtrace_clear_range()
114 unsigned long pfn, start_pfn; in memtrace_alloc_node() local
138 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_alloc_node()
139 __SetPageOffline(pfn_to_page(pfn)); in memtrace_alloc_node()
218 unsigned long pfn; in memtrace_free() local
225 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_free()
226 __ClearPageOffline(pfn_to_page(pfn)); in memtrace_free()

Completed in 49 milliseconds

12345678910>>...116