| /linux/arch/riscv/mm/ |
| A D | kasan_init.c | 30 pfn_pmd(PFN_DOWN in kasan_early_init() 37 pfn_pgd(PFN_DOWN in kasan_early_init() 47 pfn_pgd(PFN_DOWN in kasan_early_init() 69 set_pte(ptep, pfn_pte(PFN_DOWN(phys_addr), PAGE_KERNEL)); in kasan_populate_pte() 73 set_pmd(pmd, pfn_pmd(PFN_DOWN(__pa(base_pte)), PAGE_TABLE)); in kasan_populate_pte() 94 set_pmd(pmdp, pfn_pmd(PFN_DOWN(phys_addr), PAGE_KERNEL)); in kasan_populate_pmd() 108 set_pgd(pgd, pfn_pgd(PFN_DOWN(__pa(base_pmd)), PAGE_TABLE)); in kasan_populate_pmd() 129 set_pgd(pgdp, pfn_pgd(PFN_DOWN(phys_addr), PAGE_KERNEL)); in kasan_populate_pgd() 159 set_pgd(pgd_k, pfn_pgd(PFN_DOWN(__pa(p)), PAGE_TABLE)); in kasan_shallow_populate_pgd()
|
| A D | init.c | 72 max_zone_pfns[ZONE_DMA32] = PFN_DOWN(dma32_phys_limit); in zone_sizes_init() 122 max_pfn > PFN_DOWN(dma32_phys_limit)) in mem_init() 209 max_low_pfn = max_pfn = PFN_DOWN(phys_ram_end); in setup_bootmem() 321 ptep[pte_idx] = pfn_pte(PFN_DOWN(pa), prot); in create_pte_mapping() 384 pmdp[pmd_idx] = pfn_pmd(PFN_DOWN(pa), prot); in create_pmd_mapping() 390 pmdp[pmd_idx] = pfn_pmd(PFN_DOWN(pte_phys), PAGE_TABLE); in create_pmd_mapping() 427 pgdp[pgd_idx] = pfn_pgd(PFN_DOWN(pa), prot); in create_pgd_mapping() 433 pgdp[pgd_idx] = pfn_pgd(PFN_DOWN(next_phys), PAGE_TABLE); in create_pgd_mapping() 613 riscv_pfn_base = PFN_DOWN(kernel_map.phys_addr); in setup_vm() 748 csr_write(CSR_SATP, PFN_DOWN(__pa_symbol(swapper_pg_dir)) | SATP_MODE); in setup_vm_final()
|
| /linux/arch/x86/mm/ |
| A D | init.c | 385 limit_pfn = PFN_DOWN(end); in split_mem_range() 388 pfn = start_pfn = PFN_DOWN(start); in split_mem_range() 397 end_pfn = PFN_DOWN(PMD_SIZE); in split_mem_range() 399 end_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range() 401 end_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range() 411 start_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range() 413 end_pfn = round_down(limit_pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range() 415 end_pfn = round_up(pfn, PFN_DOWN(PUD_SIZE)); in split_mem_range() 428 start_pfn = round_up(pfn, PFN_DOWN(PUD_SIZE)); in split_mem_range() 429 end_pfn = round_down(limit_pfn, PFN_DOWN(PUD_SIZE)); in split_mem_range() [all …]
|
| /linux/arch/x86/xen/ |
| A D | p2m.c | 351 pfn_pte(PFN_DOWN(__pa(p2m_missing)), PAGE_KERNEL_RO)); in xen_rebuild_p2m_list() 386 pfn_pte(PFN_DOWN(__pa(mfns)), PAGE_KERNEL)); in xen_rebuild_p2m_list() 396 pfn_pte(PFN_DOWN(__pa(mfns)), PAGE_KERNEL_RO)); in xen_rebuild_p2m_list() 455 if (pte_pfn(*ptep) == PFN_DOWN(__pa(p2m_identity))) in get_phys_to_machine() 586 if (p2m_pfn == PFN_DOWN(__pa(p2m_identity)) || in xen_alloc_p2m_entry() 587 p2m_pfn == PFN_DOWN(__pa(p2m_missing))) { in xen_alloc_p2m_entry() 595 if (p2m_pfn == PFN_DOWN(__pa(p2m_missing))) in xen_alloc_p2m_entry() 606 pfn_pte(PFN_DOWN(__pa(p2m)), PAGE_KERNEL)); in xen_alloc_p2m_entry() 669 if (pte_pfn(*ptep) == PFN_DOWN(__pa(p2m_missing))) in __set_phys_to_machine() 672 if (pte_pfn(*ptep) == PFN_DOWN(__pa(p2m_identity))) in __set_phys_to_machine() [all …]
|
| A D | mmu_pv.c | 743 PFN_DOWN(__pa(user_pgd))); in __xen_pgd_pin() 852 PFN_DOWN(__pa(user_pgd))); in __xen_pgd_unpin() 1316 mfn = pfn_to_mfn(PFN_DOWN(cr3)); in __xen_write_cr3() 1672 if (*pt_base == PFN_DOWN(__pa(addr))) { in check_pt_base() 1677 if (*pt_end == PFN_DOWN(__pa(addr))) { in check_pt_base() 1779 PFN_DOWN(__pa_symbol(init_top_pgt))); in xen_setup_kernel_pagetable() 1788 PFN_DOWN(__pa_symbol(level3_user_vsyscall))); in xen_setup_kernel_pagetable() 1915 p2m_pfn = PFN_DOWN(pt_phys) + n_pt; in xen_relocate_p2m() 1941 PFN_DOWN(pt_phys)); in xen_relocate_p2m() 1949 PFN_DOWN(pmd_phys)); in xen_relocate_p2m() [all …]
|
| /linux/arch/csky/kernel/ |
| A D | setup.c | 29 unsigned long lowmem_size = PFN_DOWN(LOWMEM_LIMIT - PHYS_OFFSET_OFFSET); in csky_memblock_init() 30 unsigned long sseg_size = PFN_DOWN(SSEG_SIZE - PHYS_OFFSET_OFFSET); in csky_memblock_init() 42 max_low_pfn = max_pfn = PFN_DOWN(memblock_end_of_DRAM()); in csky_memblock_init()
|
| /linux/arch/mips/kernel/ |
| A D | setup.c | 304 max_pfn = PFN_DOWN(ramend); in bootmem_init() 312 if (start >= PFN_DOWN(HIGHMEM_START)) in bootmem_init() 314 if (end > PFN_DOWN(HIGHMEM_START)) in bootmem_init() 315 end = PFN_DOWN(HIGHMEM_START); in bootmem_init() 323 if (max_pfn > PFN_DOWN(HIGHMEM_START)) { in bootmem_init() 325 highstart_pfn = PFN_DOWN(HIGHMEM_START); in bootmem_init() 328 max_low_pfn = PFN_DOWN(HIGHMEM_START); in bootmem_init()
|
| /linux/arch/csky/include/asm/ |
| A D | page.h | 51 #define phys_to_page(paddr) (pfn_to_page(PFN_DOWN(paddr))) 77 #define ARCH_PFN_OFFSET PFN_DOWN(va_pa_offset + PHYS_OFFSET_OFFSET) 84 #define MAP_NR(x) PFN_DOWN((unsigned long)(x) - PAGE_OFFSET - \
|
| /linux/arch/m68k/mm/ |
| A D | mcfmmu.c | 82 max_zone_pfn[ZONE_DMA] = PFN_DOWN(_ramend); in paging_init() 181 num_pages = PFN_DOWN(_ramend - _rambase); in cf_bootmem_alloc() 185 min_low_pfn = PFN_DOWN(_rambase); in cf_bootmem_alloc() 186 max_pfn = max_low_pfn = PFN_DOWN(_ramend); in cf_bootmem_alloc()
|
| /linux/arch/hexagon/mm/ |
| A D | init.c | 132 bootmem_lastpg = PFN_DOWN(size); in early_mem() 157 bootmem_lastpg = PFN_DOWN((bootmem_lastpg << PAGE_SHIFT) & in setup_arch_memory() 170 max_low_pfn = bootmem_lastpg - PFN_DOWN(DMA_RESERVED_BYTES); in setup_arch_memory()
|
| /linux/arch/arc/mm/ |
| A D | init.c | 98 max_low_pfn = max_pfn = PFN_DOWN(low_mem_start + low_mem_sz); in setup_arch_memory() 143 min_high_pfn = PFN_DOWN(high_mem_start); in setup_arch_memory() 144 max_high_pfn = PFN_DOWN(high_mem_start + high_mem_sz); in setup_arch_memory()
|
| /linux/arch/sparc/power/ |
| A D | hibernate.c | 24 unsigned long nosave_begin_pfn = PFN_DOWN((unsigned long)&__nosave_begin); in pfn_is_nosave() 25 unsigned long nosave_end_pfn = PFN_DOWN((unsigned long)&__nosave_end); in pfn_is_nosave()
|
| /linux/arch/arm64/mm/ |
| A D | init.c | 149 max_zone_pfns[ZONE_DMA] = PFN_DOWN(arm64_dma_phys_limit); in zone_sizes_init() 152 max_zone_pfns[ZONE_DMA32] = PFN_DOWN(dma32_phys_limit); in zone_sizes_init() 326 max = PFN_DOWN(memblock_end_of_DRAM()); in bootmem_init() 377 max_pfn > PFN_DOWN(arm64_dma_phys_limit)) in mem_init()
|
| /linux/arch/powerpc/mm/ |
| A D | pgtable_32.c | 136 PFN_DOWN((unsigned long)_sinittext); in mark_initmem_nx() 156 PFN_DOWN((unsigned long)_stext); in mark_rodata_ro() 164 PFN_DOWN((unsigned long)__start_rodata); in mark_rodata_ro()
|
| /linux/arch/sh/mm/ |
| A D | numa.c | 33 start_pfn = PFN_DOWN(start); in setup_bootmem_node() 34 end_pfn = PFN_DOWN(end); in setup_bootmem_node()
|
| /linux/arch/x86/kernel/cpu/sgx/ |
| A D | encl.c | 31 page_index = PFN_DOWN(encl_page->desc - encl_page->encl->base); in __sgx_encl_eldu() 33 page_index = PFN_DOWN(encl->size); in __sgx_encl_eldu() 101 entry = xa_load(&encl->page_array, PFN_DOWN(addr)); in sgx_encl_load_page() 170 ret = vmf_insert_pfn(vma, addr, PFN_DOWN(phys_addr)); in sgx_vma_fault() 226 XA_STATE(xas, &encl->page_array, PFN_DOWN(start)); in sgx_encl_may_map() 237 xas_for_each(&xas, page, PFN_DOWN(end - 1)) { in sgx_encl_may_map() 580 pgoff_t pcmd_index = PFN_DOWN(encl->size) + 1 + (page_index >> 5); in sgx_encl_get_backing()
|
| /linux/arch/arm/include/asm/ |
| A D | dma-direct.h | 15 pfn = PFN_DOWN(translate_phys_to_dma(dev, PFN_PHYS(pfn))); in pfn_to_dma() 24 pfn = PFN_DOWN(translate_dma_to_phys(dev, PFN_PHYS(pfn))); in dma_to_pfn()
|
| /linux/arch/riscv/include/asm/ |
| A D | kfence.h | 14 unsigned long pfn = PFN_DOWN(__pa((addr & PMD_MASK))); in split_pmd_page() 23 set_pmd(pmd, pfn_pmd(PFN_DOWN(__pa(pte)), PAGE_TABLE)); in split_pmd_page()
|
| /linux/arch/nios2/include/asm/ |
| A D | page.h | 98 # define virt_to_page(vaddr) pfn_to_page(PFN_DOWN(virt_to_phys(vaddr))) 99 # define virt_addr_valid(vaddr) pfn_valid(PFN_DOWN(virt_to_phys(vaddr)))
|
| /linux/arch/s390/mm/ |
| A D | init.c | 128 max_zone_pfns[ZONE_DMA] = PFN_DOWN(MAX_DMA_ADDRESS); in paging_init() 286 unsigned long start_pfn = PFN_DOWN(start); in arch_add_memory() 287 unsigned long size_pages = PFN_DOWN(size); in arch_add_memory()
|
| /linux/arch/mips/mm/ |
| A D | ioremap.c | 79 pfn = PFN_DOWN(phys_addr); in ioremap_prot() 80 last_pfn = PFN_DOWN(last_addr); in ioremap_prot()
|
| /linux/arch/nios2/kernel/ |
| A D | setup.c | 145 *max_low = PFN_DOWN(memblock_get_current_limit()); in find_limits() 147 *max_high = PFN_DOWN(memblock_end_of_DRAM()); in find_limits()
|
| /linux/drivers/xen/ |
| A D | swiotlb-xen.c | 102 if (pfn_valid(PFN_DOWN(paddr))) in is_xen_swiotlb_buffer() 401 if (pfn_valid(PFN_DOWN(dma_to_phys(dev, dev_addr)))) in xen_swiotlb_map_page() 425 if (pfn_valid(PFN_DOWN(dma_to_phys(hwdev, dev_addr)))) in xen_swiotlb_unmap_page() 443 if (pfn_valid(PFN_DOWN(dma_to_phys(dev, dma_addr)))) in xen_swiotlb_sync_single_for_cpu() 463 if (pfn_valid(PFN_DOWN(dma_to_phys(dev, dma_addr)))) in xen_swiotlb_sync_single_for_device()
|
| /linux/arch/x86/include/asm/xen/ |
| A D | page.h | 241 return XMADDR(PFN_PHYS(pfn_to_mfn(PFN_DOWN(phys.paddr))) | offset); in phys_to_machine() 247 return XPADDR(PFN_PHYS(mfn_to_pfn(PFN_DOWN(machine.maddr))) | offset); in machine_to_phys() 306 #define virt_to_pfn(v) (PFN_DOWN(__pa(v)))
|
| /linux/arch/m68k/kernel/ |
| A D | setup_no.c | 159 min_low_pfn = PFN_DOWN(memory_start); in setup_arch() 160 max_pfn = max_low_pfn = PFN_DOWN(memory_end); in setup_arch()
|