Home
last modified time | relevance | path

Searched refs:start_pfn (Results 1 – 25 of 91) sorted by relevance

1234

/linux/arch/x86/xen/
A Dsetup.c106 xen_extra_mem[i].start_pfn = start_pfn; in xen_add_extra_mem()
112 start_pfn) { in xen_add_extra_mem()
130 start_r = xen_extra_mem[i].start_pfn; in xen_del_extra_mem()
134 if (start_r == start_pfn) { in xen_del_extra_mem()
136 xen_extra_mem[i].start_pfn += n_pfns; in xen_del_extra_mem()
147 if (start_pfn > start_r && start_pfn < start_r + size_r) { in xen_del_extra_mem()
152 (start_pfn + n_pfns)); in xen_del_extra_mem()
188 pfn_s = xen_extra_mem[i].start_pfn; in xen_inv_extra_mem()
260 WARN_ON(start_pfn > end_pfn); in xen_set_identity_and_release_chunk()
443 if (start_pfn >= nr_pages) in xen_count_remap_pages()
[all …]
/linux/mm/
A Dmemory_hotplug.c348 unsigned long start_pfn, in find_smallest_section_pfn() argument
351 for (; start_pfn < end_pfn; start_pfn += PAGES_PER_SUBSECTION) { in find_smallest_section_pfn()
361 return start_pfn; in find_smallest_section_pfn()
369 unsigned long start_pfn, in find_biggest_section_pfn() argument
422 start_pfn); in shrink_zone_span()
461 unsigned long start_pfn, in remove_pfn_range_from_zone() argument
489 shrink_zone_span(zone, start_pfn, start_pfn + nr_pages); in remove_pfn_range_from_zone()
1087 arg.start_pfn = pfn; in online_pages()
1603 for (pfn = start_pfn, sec_end_pfn = SECTION_ALIGN_UP(start_pfn + 1); in test_pages_in_a_zone()
1913 arg.start_pfn = start_pfn; in offline_pages()
[all …]
A Dpage_isolation.c187 int start_isolate_page_range(unsigned long start_pfn, unsigned long end_pfn, in start_isolate_page_range() argument
193 BUG_ON(!IS_ALIGNED(start_pfn, pageblock_nr_pages)); in start_isolate_page_range()
196 for (pfn = start_pfn; in start_isolate_page_range()
201 undo_isolate_page_range(start_pfn, pfn, migratetype); in start_isolate_page_range()
217 BUG_ON(!IS_ALIGNED(start_pfn, pageblock_nr_pages)); in undo_isolate_page_range()
220 for (pfn = start_pfn; in undo_isolate_page_range()
270 int test_pages_isolated(unsigned long start_pfn, unsigned long end_pfn, in test_pages_isolated() argument
283 for (pfn = start_pfn; pfn < end_pfn; pfn += pageblock_nr_pages) { in test_pages_isolated()
288 page = __first_valid_page(start_pfn, end_pfn - start_pfn); in test_pages_isolated()
297 pfn = __test_page_isolated_in_pageblock(start_pfn, end_pfn, isol_flags); in test_pages_isolated()
[all …]
A Dpage_ext.c309 start = SECTION_ALIGN_DOWN(start_pfn); in online_page_ext()
310 end = SECTION_ALIGN_UP(start_pfn + nr_pages); in online_page_ext()
318 nid = pfn_to_nid(start_pfn); in online_page_ext()
339 start = SECTION_ALIGN_DOWN(start_pfn); in offline_page_ext()
340 end = SECTION_ALIGN_UP(start_pfn + nr_pages); in offline_page_ext()
356 ret = online_page_ext(mn->start_pfn, in page_ext_callback()
360 offline_page_ext(mn->start_pfn, in page_ext_callback()
364 offline_page_ext(mn->start_pfn, in page_ext_callback()
386 unsigned long start_pfn, end_pfn; in page_ext_init() local
388 start_pfn = node_start_pfn(nid); in page_ext_init()
[all …]
A Dpage_alloc.c590 start_pfn, start_pfn + sp); in page_outside_zone_boundaries()
1608 for (; start_pfn < end_pfn; start_pfn++) { in reserve_bootmem_region()
2546 start_pfn = pfn; in move_freepages_block()
6696 start_pfn = clamp(start_pfn, zone_start_pfn, zone_end_pfn); in memmap_init_zone_range()
6702 memmap_init_range(end_pfn - start_pfn, nid, zone_id, start_pfn, in memmap_init_zone_range()
7050 *start_pfn = -1UL; in get_pfn_range_for_nid()
7054 *start_pfn = min(*start_pfn, this_start_pfn); in get_pfn_range_for_nid()
7059 *start_pfn = 0; in get_pfn_range_for_nid()
7169 start_pfn = clamp(start_pfn, range_start_pfn, range_end_pfn); in __absent_pages_in_range()
7846 start_pfn = max(start_pfn, zone_movable_pfn[nid]); in find_zone_movable_pfns_for_nodes()
[all …]
A Dsparse.c129 void __meminit mminit_validate_memmodel_limits(unsigned long *start_pfn, in mminit_validate_memmodel_limits() argument
138 if (*start_pfn > max_sparsemem_pfn) { in mminit_validate_memmodel_limits()
141 *start_pfn, *end_pfn, max_sparsemem_pfn); in mminit_validate_memmodel_limits()
143 *start_pfn = max_sparsemem_pfn; in mminit_validate_memmodel_limits()
148 *start_pfn, *end_pfn, max_sparsemem_pfn); in mminit_validate_memmodel_limits()
596 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in online_mem_sections()
614 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in offline_mem_sections()
885 int __meminit sparse_add_section(int nid, unsigned long start_pfn, in sparse_add_section() argument
888 unsigned long section_nr = pfn_to_section_nr(start_pfn); in sparse_add_section()
897 memmap = section_activate(nid, start_pfn, nr_pages, altmap); in sparse_add_section()
[all …]
A Dshuffle.c84 unsigned long start_pfn = z->zone_start_pfn; in __shuffle_zone() local
90 start_pfn = ALIGN(start_pfn, order_pages); in __shuffle_zone()
91 for (i = start_pfn; i < end_pfn; i += order_pages) { in __shuffle_zone()
A Dbootmem_info.c42 static void __init register_page_bootmem_info_section(unsigned long start_pfn) in register_page_bootmem_info_section() argument
49 section_nr = pfn_to_section_nr(start_pfn); in register_page_bootmem_info_section()
77 static void __init register_page_bootmem_info_section(unsigned long start_pfn) in register_page_bootmem_info_section() argument
84 section_nr = pfn_to_section_nr(start_pfn); in register_page_bootmem_info_section()
/linux/arch/x86/mm/
A Dinit.c306 if (start_pfn < end_pfn) { in save_mr()
388 pfn = start_pfn = PFN_DOWN(start); in split_mem_range()
405 if (start_pfn < end_pfn) { in split_mem_range()
420 if (start_pfn < end_pfn) { in split_mem_range()
430 if (start_pfn < end_pfn) { in split_mem_range()
440 if (start_pfn < end_pfn) { in split_mem_range()
448 start_pfn = pfn; in split_mem_range()
550 unsigned long start_pfn, end_pfn; in init_range_memory_mapping() local
972 unsigned long start_pfn, end_pfn; in memblock_find_dma_reserve() local
983 start_pfn = min(start_pfn, MAX_DMA_PFN); in memblock_find_dma_reserve()
[all …]
/linux/arch/powerpc/platforms/powernv/
A Dmemtrace.c91 static void memtrace_clear_range(unsigned long start_pfn, in memtrace_clear_range() argument
97 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) { in memtrace_clear_range()
114 unsigned long pfn, start_pfn; in memtrace_alloc_node() local
125 start_pfn = page_to_pfn(page); in memtrace_alloc_node()
132 memtrace_clear_range(start_pfn, nr_pages); in memtrace_alloc_node()
138 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_alloc_node()
141 arch_remove_linear_mapping(PFN_PHYS(start_pfn), size); in memtrace_alloc_node()
143 return PFN_PHYS(start_pfn); in memtrace_alloc_node()
217 const unsigned long start_pfn = PHYS_PFN(start); in memtrace_free() local
225 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_free()
[all …]
/linux/arch/sparc/mm/
A Dinit_32.c65 unsigned long start_pfn = sp_banks[i].base_addr >> PAGE_SHIFT; in calc_highpages() local
71 if (start_pfn < max_low_pfn) in calc_highpages()
72 start_pfn = max_low_pfn; in calc_highpages()
74 nr += end_pfn - start_pfn; in calc_highpages()
134 unsigned long start_pfn, bytes_avail, size; in bootmem_init() local
175 start_pfn >>= PAGE_SHIFT; in bootmem_init()
193 size = (start_pfn << PAGE_SHIFT) - phys_base; in bootmem_init()
244 for (tmp = start_pfn; tmp < end_pfn; tmp++) in map_high_region()
290 if (start_pfn < highstart_pfn) in mem_init()
291 start_pfn = highstart_pfn; in mem_init()
[all …]
/linux/include/trace/events/
A Dpage_isolation.h13 unsigned long start_pfn,
17 TP_ARGS(start_pfn, end_pfn, fin_pfn),
20 __field(unsigned long, start_pfn)
26 __entry->start_pfn = start_pfn;
32 __entry->start_pfn, __entry->end_pfn, __entry->fin_pfn,
A Dcompaction.h17 unsigned long start_pfn,
22 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken),
25 __field(unsigned long, start_pfn)
32 __entry->start_pfn = start_pfn;
39 __entry->start_pfn,
48 unsigned long start_pfn,
53 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken)
59 unsigned long start_pfn,
64 TP_ARGS(start_pfn, end_pfn, nr_scanned, nr_taken)
/linux/arch/sh/mm/
A Dinit.c210 unsigned long start_pfn, end_pfn; in allocate_pgdat() local
212 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in allocate_pgdat()
223 NODE_DATA(nid)->node_start_pfn = start_pfn; in allocate_pgdat()
229 unsigned long start_pfn, end_pfn; in do_init_bootmem() local
234 __add_active_range(0, start_pfn, end_pfn); in do_init_bootmem()
247 unsigned long start_pfn; in early_reserve_mem() local
255 start_pfn = PFN_UP(__pa(_end)); in early_reserve_mem()
402 unsigned long start_pfn = PFN_DOWN(start); in arch_add_memory() local
410 ret = __add_pages(nid, start_pfn, nr_pages, params); in arch_add_memory()
419 unsigned long start_pfn = PFN_DOWN(start); in arch_remove_memory() local
[all …]
A Dnuma.c28 unsigned long start_pfn, end_pfn; in setup_bootmem_node() local
33 start_pfn = PFN_DOWN(start); in setup_bootmem_node()
41 __add_active_range(nid, start_pfn, end_pfn); in setup_bootmem_node()
51 NODE_DATA(nid)->node_start_pfn = start_pfn; in setup_bootmem_node()
52 NODE_DATA(nid)->node_spanned_pages = end_pfn - start_pfn; in setup_bootmem_node()
/linux/include/linux/
A Dmemory_hotplug.h110 extern struct zone *test_pages_in_a_zone(unsigned long start_pfn,
112 extern void __offline_isolated_pages(unsigned long start_pfn,
139 extern void __remove_pages(unsigned long start_pfn, unsigned long nr_pages,
143 extern int __add_pages(int nid, unsigned long start_pfn, unsigned long nr_pages,
147 static inline int add_pages(int nid, unsigned long start_pfn, in add_pages() argument
150 return __add_pages(nid, start_pfn, nr_pages, params); in add_pages()
153 int add_pages(int nid, unsigned long start_pfn, unsigned long nr_pages,
299 extern int offline_pages(unsigned long start_pfn, unsigned long nr_pages,
334 extern void move_pfn_range_to_zone(struct zone *zone, unsigned long start_pfn,
338 unsigned long start_pfn,
[all …]
A Dnode.h102 void link_mem_sections(int nid, unsigned long start_pfn,
106 static inline void link_mem_sections(int nid, unsigned long start_pfn, in link_mem_sections() argument
125 unsigned long start_pfn = pgdat->node_start_pfn; in register_one_node() local
126 unsigned long end_pfn = start_pfn + pgdat->node_spanned_pages; in register_one_node()
132 link_mem_sections(nid, start_pfn, end_pfn, MEMINIT_EARLY); in register_one_node()
A Dpage-isolation.h46 start_isolate_page_range(unsigned long start_pfn, unsigned long end_pfn,
54 undo_isolate_page_range(unsigned long start_pfn, unsigned long end_pfn,
60 int test_pages_isolated(unsigned long start_pfn, unsigned long end_pfn,
/linux/drivers/hv/
A Dhv_balloon.c434 unsigned long start_pfn; member
447 unsigned long start_pfn; member
608 while ((pfn >= has->start_pfn) && in hv_page_offline_check()
706 unsigned long start_pfn; in hv_mem_hot_add() local
775 if ((pfn < has->start_pfn) || in hv_online_page()
799 if (start_pfn < has->start_pfn || start_pfn >= has->end_pfn) in pfn_covered()
815 gap->end_pfn = start_pfn; in pfn_covered()
818 has->covered_end_pfn = start_pfn; in pfn_covered()
865 if (start_pfn < has->start_pfn || start_pfn >= has->end_pfn) in handle_pg_range()
890 if (start_pfn > has->start_pfn && in handle_pg_range()
[all …]
/linux/drivers/base/
A Dmemory.c187 start_pfn, nr_pages); in memory_block_online()
202 ret = online_pages(start_pfn + nr_vmemmap_pages, in memory_block_online()
206 mhp_deinit_memmap_on_memory(start_pfn, nr_vmemmap_pages); in memory_block_online()
236 ret = offline_pages(start_pfn + nr_vmemmap_pages, in memory_block_offline()
241 adjust_present_page_count(pfn_to_page(start_pfn), in memory_block_offline()
247 mhp_deinit_memmap_on_memory(start_pfn, nr_vmemmap_pages); in memory_block_offline()
379 arch_get_memory_phys_device(start_pfn)); in phys_device_show()
385 unsigned long start_pfn, unsigned long nr_pages, in print_allowed_zone() argument
417 default_zone = test_pages_in_a_zone(start_pfn, in valid_zones_show()
418 start_pfn + nr_pages); in valid_zones_show()
[all …]
/linux/arch/parisc/mm/
A Dinit.c130 if (pmem_ranges[j-1].start_pfn < in setup_bootmem()
131 pmem_ranges[j].start_pfn) { in setup_bootmem()
146 if (pmem_ranges[i].start_pfn - in setup_bootmem()
147 (pmem_ranges[i-1].start_pfn + in setup_bootmem()
152 pmem_ranges[i].start_pfn - in setup_bootmem()
153 (pmem_ranges[i-1].start_pfn + in setup_bootmem()
246 unsigned long start_pfn; in setup_bootmem() local
251 start_pfn = pmem_ranges[i].start_pfn; in setup_bootmem()
254 start = start_pfn << PAGE_SHIFT; in setup_bootmem()
260 if ((start_pfn + npages) > max_pfn) in setup_bootmem()
[all …]
/linux/arch/mips/loongson64/
A Dnuma.c88 unsigned long start_pfn, end_pfn; in node_mem_init() local
97 get_pfn_range_for_nid(node, &start_pfn, &end_pfn); in node_mem_init()
99 node, start_pfn, end_pfn); in node_mem_init()
111 NODE_DATA(node)->node_start_pfn = start_pfn; in node_mem_init()
112 NODE_DATA(node)->node_spanned_pages = end_pfn - start_pfn; in node_mem_init()
134 memblock_reserve(0, PAGE_SIZE * start_pfn); in node_mem_init()
A Dinit.c53 u64 node_id, node_psize, start_pfn, end_pfn, mem_start, mem_size; in szmem() local
72 start_pfn = ((node_id << 44) + mem_start) >> PAGE_SHIFT; in szmem()
74 end_pfn = start_pfn + node_psize; in szmem()
79 start_pfn, end_pfn, num_physpages); in szmem()
80 memblock_add_node(PFN_PHYS(start_pfn), in szmem()
/linux/arch/powerpc/mm/
A Dinit_64.c74 unsigned long start_pfn; in vmemmap_subsection_start() local
78 start_pfn = (offset / sizeof(struct page)) & PAGE_SUBSECTION_MASK; in vmemmap_subsection_start()
79 return pfn_to_page(start_pfn); in vmemmap_subsection_start()
189 unsigned long start_pfn = page_to_pfn((struct page *)start); in altmap_cross_boundary() local
191 if ((start_pfn + nr_pfn) > altmap->end_pfn) in altmap_cross_boundary()
194 if (start_pfn < altmap->base_pfn) in altmap_cross_boundary()
/linux/arch/sh/kernel/
A Dsetup.c197 void __init __add_active_range(unsigned int nid, unsigned long start_pfn, in __add_active_range() argument
205 start = start_pfn << PAGE_SHIFT; in __add_active_range()
215 start_pfn, end_pfn); in __add_active_range()
239 memblock_set_node(PFN_PHYS(start_pfn), PFN_PHYS(end_pfn - start_pfn), in __add_active_range()

Completed in 72 milliseconds

1234