/linux/arch/x86/mm/ |
A D | ioremap.c | 222 retval = memtype_reserve(phys_addr, (u64)phys_addr + size, in __ioremap_caller() 278 area->phys_addr = phys_addr; in __ioremap_caller() 301 memtype_free(phys_addr, phys_addr + size); in __ioremap_caller() 477 memtype_free(p->phys_addr, p->phys_addr + get_vm_area_size(p)); in iounmap() 553 switch (e820__get_entry_type(phys_addr, phys_addr + size - 1)) { in memremap_should_map_decrypted() 588 if (phys_addr == paddr) in memremap_is_efi_data() 594 if (phys_addr == paddr) in memremap_is_efi_data() 625 if (phys_addr == paddr) in memremap_is_setup_data() 634 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) { in memremap_is_setup_data() 647 if ((phys_addr > paddr) && (phys_addr < (paddr + len))) in memremap_is_setup_data() [all …]
|
/linux/arch/ia64/mm/ |
A D | ioremap.c | 17 __ioremap_uc(unsigned long phys_addr) in __ioremap_uc() argument 26 attr = kern_mem_attribute(phys_addr, size); in early_ioremap() 29 return __ioremap_uc(phys_addr); in early_ioremap() 52 return __ioremap_uc(phys_addr); in ioremap() 58 gran_base = GRANULEROUNDDOWN(phys_addr); in ioremap() 69 page_base = phys_addr & PAGE_MASK; in ioremap() 77 offset = phys_addr & ~PAGE_MASK; in ioremap() 78 phys_addr &= PAGE_MASK; in ioremap() 87 area->phys_addr = phys_addr; in ioremap() 98 return __ioremap_uc(phys_addr); in ioremap() [all …]
|
/linux/mm/ |
A D | early_ioremap.c | 120 __func__, &phys_addr, size)) in __early_ioremap() 124 last_addr = phys_addr + size - 1; in __early_ioremap() 125 if (WARN_ON(!size || last_addr < phys_addr)) in __early_ioremap() 132 offset = offset_in_page(phys_addr); in __early_ioremap() 133 phys_addr &= PAGE_MASK; in __early_ioremap() 149 __late_set_fixmap(idx, phys_addr, prot); in __early_ioremap() 151 __early_set_fixmap(idx, phys_addr, prot); in __early_ioremap() 152 phys_addr += PAGE_SIZE; in __early_ioremap() 273 return (__force void __iomem *)phys_addr; in early_ioremap() 280 return (void *)phys_addr; in early_memremap() [all …]
|
/linux/arch/arm64/mm/ |
A D | ioremap.c | 24 unsigned long offset = phys_addr & ~PAGE_MASK; in __ioremap_caller() 33 phys_addr &= PAGE_MASK; in __ioremap_caller() 39 last_addr = phys_addr + size - 1; in __ioremap_caller() 40 if (!size || last_addr < phys_addr || (last_addr & ~PHYS_MASK)) in __ioremap_caller() 46 if (WARN_ON(pfn_is_map_memory(__phys_to_pfn(phys_addr)))) in __ioremap_caller() 53 area->phys_addr = phys_addr; in __ioremap_caller() 55 err = ioremap_page_range(addr, addr + size, phys_addr, prot); in __ioremap_caller() 66 return __ioremap_caller(phys_addr, size, prot, in __ioremap() 84 void __iomem *ioremap_cache(phys_addr_t phys_addr, size_t size) in ioremap_cache() argument 87 if (pfn_is_map_memory(__phys_to_pfn(phys_addr))) in ioremap_cache() [all …]
|
/linux/arch/nios2/mm/ |
A D | ioremap.c | 37 pfn = PFN_DOWN(phys_addr); in remap_area_pte() 60 phys_addr -= address; in remap_area_pmd() 83 phys_addr -= address; in remap_area_pages() 104 phys_addr + address, flags)) in remap_area_pages() 127 last_addr = phys_addr + size - 1; in ioremap() 129 if (!size || last_addr < phys_addr) in ioremap() 133 if (phys_addr > PHYS_OFFSET && phys_addr < virt_to_phys(high_memory)) { in ioremap() 137 t_addr = __va(phys_addr); in ioremap() 149 if (IS_MAPPABLE_UNCACHEABLE(phys_addr) && in ioremap() 154 offset = phys_addr & ~PAGE_MASK; in ioremap() [all …]
|
/linux/arch/parisc/mm/ |
A D | ioremap.c | 36 unsigned long end = phys_addr + size - 1; in ioremap() 39 (phys_addr >= 0x00500000 && end < 0x03bfffff)) in ioremap() 40 phys_addr |= F_EXTEND(0xfc000000); in ioremap() 44 last_addr = phys_addr + size - 1; in ioremap() 45 if (!size || last_addr < phys_addr) in ioremap() 51 if (phys_addr < virt_to_phys(high_memory)) { in ioremap() 55 t_addr = __va(phys_addr); in ioremap() 71 offset = phys_addr & ~PAGE_MASK; in ioremap() 72 phys_addr &= PAGE_MASK; in ioremap() 73 size = PAGE_ALIGN(last_addr + 1) - phys_addr; in ioremap() [all …]
|
/linux/arch/mips/mm/ |
A D | ioremap.c | 56 cpu_addr = plat_ioremap(phys_addr, size, flags); in ioremap_prot() 60 phys_addr = fixup_bigphys_addr(phys_addr, size); in ioremap_prot() 63 last_addr = phys_addr + size - 1; in ioremap_prot() 64 if (!size || last_addr < phys_addr) in ioremap_prot() 71 if (IS_LOW512(phys_addr) && IS_LOW512(last_addr) && in ioremap_prot() 73 return (void __iomem *) CKSEG1ADDR(phys_addr); in ioremap_prot() 79 pfn = PFN_DOWN(phys_addr); in ioremap_prot() 84 &phys_addr, &last_addr); in ioremap_prot() 91 offset = phys_addr & ~PAGE_MASK; in ioremap_prot() 92 phys_addr &= PAGE_MASK; in ioremap_prot() [all …]
|
/linux/arch/ia64/kernel/ |
A D | efi.c | 423 return __va(md->phys_addr); in efi_get_pal_addr() 592 md->phys_addr, in efi_init() 702 return md->phys_addr; in efi_get_iobase() 734 if (phys_addr - md->phys_addr < efi_md_size(md)) in efi_memory_descriptor() 752 end = phys_addr + size; in efi_memmap_intersects() 756 if (md->phys_addr < end && efi_md_end(md) > phys_addr) in efi_memmap_intersects() 1070 k->start = md->phys_addr; in efi_memmap_init() 1105 (lim - md->phys_addr) in efi_memmap_init() 1109 k->start = md->phys_addr; in efi_memmap_init() 1117 as = md->phys_addr; in efi_memmap_init() [all …]
|
/linux/arch/sh/mm/ |
A D | ioremap.c | 85 __ioremap_caller(phys_addr_t phys_addr, unsigned long size, in __ioremap_caller() argument 92 mapped = __ioremap_trapped(phys_addr, size); in __ioremap_caller() 96 mapped = __ioremap_29bit(phys_addr, size, pgprot); in __ioremap_caller() 101 last_addr = phys_addr + size - 1; in __ioremap_caller() 102 if (!size || last_addr < phys_addr) in __ioremap_caller() 109 return ioremap_fixed(phys_addr, size, pgprot); in __ioremap_caller() 115 mapped = pmb_remap_caller(phys_addr, size, pgprot, caller); in __ioremap_caller() 122 offset = phys_addr & ~PAGE_MASK; in __ioremap_caller() 123 phys_addr &= PAGE_MASK; in __ioremap_caller() 124 size = PAGE_ALIGN(last_addr+1) - phys_addr; in __ioremap_caller() [all …]
|
A D | ioremap_fixed.c | 48 ioremap_fixed(phys_addr_t phys_addr, unsigned long size, pgprot_t prot) in ioremap_fixed() argument 59 offset = phys_addr & ~PAGE_MASK; in ioremap_fixed() 60 phys_addr &= PAGE_MASK; in ioremap_fixed() 61 size = PAGE_ALIGN(phys_addr + size) - phys_addr; in ioremap_fixed() 90 __set_fixmap(idx, phys_addr, prot); in ioremap_fixed() 91 phys_addr += PAGE_SIZE; in ioremap_fixed()
|
/linux/tools/perf/scripts/python/ |
A D | mem-phys-addr.py | 62 def is_system_ram(phys_addr): argument 64 position = bisect.bisect(system_ram, phys_addr) 69 def is_persistent_mem(phys_addr): argument 70 position = bisect.bisect(pmem, phys_addr) 75 def find_memory_type(phys_addr): argument 76 if phys_addr == 0: 78 if is_system_ram(phys_addr): 81 if is_persistent_mem(phys_addr): 88 if int(m[0], 16) <= phys_addr <= int(m[1], 16): 95 phys_addr = sample["phys_addr"] [all …]
|
/linux/arch/mips/alchemy/common/ |
A D | setup.c | 82 phys_addr_t fixup_bigphys_addr(phys_addr_t phys_addr, phys_addr_t size) in fixup_bigphys_addr() argument 88 if ((phys_addr >> 32) != 0) in fixup_bigphys_addr() 89 return phys_addr; in fixup_bigphys_addr() 92 if (phys_addr >= start && (phys_addr + size - 1) <= end) in fixup_bigphys_addr() 93 return (phys_addr_t)(AU1500_PCI_MEM_PHYS_ADDR + phys_addr); in fixup_bigphys_addr() 96 return phys_addr; in fixup_bigphys_addr() 102 phys_addr_t phys_addr = fixup_bigphys_addr(pfn << PAGE_SHIFT, size); in io_remap_pfn_range() local 104 return remap_pfn_range(vma, vaddr, phys_addr >> PAGE_SHIFT, size, prot); in io_remap_pfn_range()
|
/linux/include/uapi/linux/ |
A D | cec-funcs.h | 19 msg->msg[2] = phys_addr >> 8; in cec_msg_active_source() 24 __u16 *phys_addr) in cec_ops_active_source() argument 44 __u16 phys_addr) in cec_msg_inactive_source() argument 53 __u16 *phys_addr) in cec_ops_inactive_source() argument 68 __u16 phys_addr) in cec_msg_routing_information() argument 116 __u16 *phys_addr) in cec_ops_set_stream_path() argument 185 __u16 phys_addr; member 545 __u16 phys_addr) in cec_msg_clear_ext_timer() argument 701 __u16 phys_addr) in cec_msg_set_ext_timer() argument 730 __u16 *phys_addr) in cec_ops_set_ext_timer() argument [all …]
|
/linux/arch/riscv/mm/ |
A D | kasan_init.c | 56 phys_addr_t phys_addr; in kasan_populate_pte() local 68 phys_addr = memblock_phys_alloc(PAGE_SIZE, PAGE_SIZE); in kasan_populate_pte() 69 set_pte(ptep, pfn_pte(PFN_DOWN(phys_addr), PAGE_KERNEL)); in kasan_populate_pte() 78 phys_addr_t phys_addr; in kasan_populate_pmd() local 92 phys_addr = memblock_phys_alloc(PMD_SIZE, PMD_SIZE); in kasan_populate_pmd() 93 if (phys_addr) { in kasan_populate_pmd() 94 set_pmd(pmdp, pfn_pmd(PFN_DOWN(phys_addr), PAGE_KERNEL)); in kasan_populate_pmd() 113 phys_addr_t phys_addr; in kasan_populate_pgd() local 127 phys_addr = memblock_phys_alloc(PGDIR_SIZE, PGDIR_SIZE); in kasan_populate_pgd() 128 if (phys_addr) { in kasan_populate_pgd() [all …]
|
/linux/include/linux/ |
A D | io-mapping.h | 68 resource_size_t phys_addr; in io_mapping_map_atomic_wc() local 71 phys_addr = mapping->base + offset; in io_mapping_map_atomic_wc() 74 return __iomap_local_pfn_prot(PHYS_PFN(phys_addr), mapping->prot); in io_mapping_map_atomic_wc() 88 resource_size_t phys_addr; in io_mapping_map_local_wc() local 91 phys_addr = mapping->base + offset; in io_mapping_map_local_wc() 92 return __iomap_local_pfn_prot(PHYS_PFN(phys_addr), mapping->prot); in io_mapping_map_local_wc() 105 resource_size_t phys_addr; in io_mapping_map_wc() local 108 phys_addr = mapping->base + offset; in io_mapping_map_wc() 110 return ioremap_wc(phys_addr, size); in io_mapping_map_wc()
|
/linux/arch/arm/mm/ |
A D | ioremap.c | 62 if (vm->phys_addr > paddr || in find_static_vm_paddr() 63 paddr + size - 1 > vm->phys_addr + vm->size - 1) in find_static_vm_paddr() 280 addr += paddr - svm->vm.phys_addr; in __arm_ioremap_pfn_caller() 297 area->phys_addr = paddr; in __arm_ioremap_pfn_caller() 327 unsigned long offset = phys_addr & ~PAGE_MASK; in __arm_ioremap_caller() 328 unsigned long pfn = __phys_to_pfn(phys_addr); in __arm_ioremap_caller() 333 last_addr = phys_addr + size - 1; in __arm_ioremap_caller() 334 if (!size || last_addr < phys_addr) in __arm_ioremap_caller() 401 return __arm_ioremap_caller(phys_addr, size, mtype, in __arm_ioremap_exec() 410 void *arch_memremap_wb(phys_addr_t phys_addr, size_t size) in arch_memremap_wb() argument [all …]
|
/linux/drivers/cxl/core/ |
A D | regs.c | 188 resource_size_t phys_addr; in cxl_map_component_regs() local 191 phys_addr = pci_resource_start(pdev, map->barno); in cxl_map_component_regs() 192 phys_addr += map->block_offset; in cxl_map_component_regs() 194 phys_addr += map->component_map.hdm_decoder.offset; in cxl_map_component_regs() 196 regs->hdm_decoder = devm_cxl_iomap_block(dev, phys_addr, length); in cxl_map_component_regs() 209 resource_size_t phys_addr; in cxl_map_device_regs() local 211 phys_addr = pci_resource_start(pdev, map->barno); in cxl_map_device_regs() 212 phys_addr += map->block_offset; in cxl_map_device_regs() 218 addr = phys_addr + map->device_map.status.offset; in cxl_map_device_regs() 229 addr = phys_addr + map->device_map.mbox.offset; in cxl_map_device_regs() [all …]
|
/linux/drivers/firmware/efi/ |
A D | memmap.c | 256 start = md->phys_addr; in efi_memmap_split_count() 321 start = md->phys_addr; in efi_memmap_insert() 331 md->num_pages = (m_end - md->phys_addr + 1) >> in efi_memmap_insert() 337 md->phys_addr = m_end + 1; in efi_memmap_insert() 338 md->num_pages = (end - md->phys_addr + 1) >> in efi_memmap_insert() 344 md->num_pages = (m_start - md->phys_addr) >> in efi_memmap_insert() 351 md->phys_addr = m_start; in efi_memmap_insert() 358 md->phys_addr = m_end + 1; in efi_memmap_insert() 366 md->num_pages = (m_start - md->phys_addr) >> in efi_memmap_insert() 372 md->phys_addr = m_start; in efi_memmap_insert() [all …]
|
/linux/arch/powerpc/boot/ |
A D | cuboot-pq2.c | 33 u32 phys_addr; member 178 if (mem->phys_addr + mem->size[1] == mmio->phys_addr) in fixup_pci() 180 else if (mmio->phys_addr + mmio->size[1] == mem->phys_addr) in fixup_pci() 185 out_be32(&pci_regs[1][0], mem_base->phys_addr | 1); in fixup_pci() 188 out_be32(&pci_regs[1][1], io->phys_addr | 1); in fixup_pci() 192 out_le32(&pci_regs[0][2], mem->phys_addr >> 12); in fixup_pci() 196 out_le32(&pci_regs[0][8], mmio->phys_addr >> 12); in fixup_pci() 200 out_le32(&pci_regs[0][14], io->phys_addr >> 12); in fixup_pci()
|
/linux/arch/hexagon/mm/ |
A D | ioremap.c | 12 void __iomem *ioremap(unsigned long phys_addr, unsigned long size) in ioremap() argument 15 unsigned long offset = phys_addr & ~PAGE_MASK; in ioremap() 21 last_addr = phys_addr + size - 1; in ioremap() 24 if (!size || (last_addr < phys_addr)) in ioremap() 33 if (ioremap_page_range(addr, addr+size, phys_addr, prot)) { in ioremap()
|
/linux/drivers/input/serio/ |
A D | xilinx_ps2.c | 236 resource_size_t remap_size, phys_addr; in xps2_of_probe() local 268 phys_addr = r_mem.start; in xps2_of_probe() 270 if (!request_mem_region(phys_addr, remap_size, DRIVER_NAME)) { in xps2_of_probe() 272 (unsigned long long)phys_addr); in xps2_of_probe() 278 drvdata->base_address = ioremap(phys_addr, remap_size); in xps2_of_probe() 281 (unsigned long long)phys_addr); in xps2_of_probe() 296 (unsigned long long)phys_addr, drvdata->base_address, in xps2_of_probe() 306 "Xilinx XPS PS/2 at %08llX", (unsigned long long)phys_addr); in xps2_of_probe() 308 "xilinxps2/serio at %08llX", (unsigned long long)phys_addr); in xps2_of_probe() 316 release_mem_region(phys_addr, remap_size); in xps2_of_probe()
|
/linux/drivers/net/ethernet/qlogic/netxen/ |
A D | netxen_nic_ctx.c | 267 u64 phys_addr; in nx_fw_cmd_create_rx_ctx() local 346 phys_addr = hostrq_phys_addr; in nx_fw_cmd_create_rx_ctx() 348 cmd.req.arg1 = (u32)(phys_addr >> 32); in nx_fw_cmd_create_rx_ctx() 427 u64 offset, phys_addr; in nx_fw_cmd_create_tx_ctx() local 472 phys_addr = rq_phys_addr; in nx_fw_cmd_create_tx_ctx() 704 cpu_to_le64(rds_ring->phys_addr); in netxen_init_old_ctx() 726 lower32(recv_ctx->phys_addr)); in netxen_init_old_ctx() 728 upper32(recv_ctx->phys_addr)); in netxen_init_old_ctx() 762 cpu_to_le64(recv_ctx->phys_addr + in netxen_alloc_hw_resources() 901 rds_ring->phys_addr); in netxen_free_hw_resources() [all …]
|
/linux/arch/x86/platform/efi/ |
A D | efi_32.c | 42 start_pfn = PFN_DOWN(md->phys_addr); in efi_map_region() 44 end = md->phys_addr + size; in efi_map_region() 48 va = __va(md->phys_addr); in efi_map_region() 53 va = ioremap_cache(md->phys_addr, size); in efi_map_region() 58 pr_err("ioremap of 0x%llX failed!\n", md->phys_addr); in efi_map_region() 88 void __init parse_efi_setup(u64 phys_addr, u32 data_len) {} in parse_efi_setup() argument
|
/linux/drivers/pci/endpoint/ |
A D | pci-epc-mem.c | 169 phys_addr_t *phys_addr, size_t size) in pci_epc_mem_alloc_addr() argument 189 *phys_addr = mem->window.phys_base + in pci_epc_mem_alloc_addr() 191 virt_addr = ioremap(*phys_addr, align_size); in pci_epc_mem_alloc_addr() 209 phys_addr_t phys_addr) in pci_epc_get_matching_window() argument 217 if (phys_addr >= mem->window.phys_base && in pci_epc_get_matching_window() 218 phys_addr < (mem->window.phys_base + mem->window.size)) in pci_epc_get_matching_window() 234 void pci_epc_mem_free_addr(struct pci_epc *epc, phys_addr_t phys_addr, in pci_epc_mem_free_addr() argument 243 mem = pci_epc_get_matching_window(epc, phys_addr); in pci_epc_mem_free_addr() 252 pageno = (phys_addr - mem->window.phys_base) >> page_shift; in pci_epc_mem_free_addr()
|
/linux/drivers/misc/sgi-xp/ |
A D | xp_uv.c | 91 xp_expand_memprotect_uv(unsigned long phys_addr, unsigned long size) in xp_expand_memprotect_uv() argument 96 ret = uv_bios_change_memprotect(phys_addr, size, UV_MEMPROT_ALLOW_RW); in xp_expand_memprotect_uv() 106 ret = sn_change_memprotect(phys_addr, size, SN_MEMPROT_ACCESS_CLASS_1, in xp_expand_memprotect_uv() 120 xp_restrict_memprotect_uv(unsigned long phys_addr, unsigned long size) in xp_restrict_memprotect_uv() argument 125 ret = uv_bios_change_memprotect(phys_addr, size, in xp_restrict_memprotect_uv() 136 ret = sn_change_memprotect(phys_addr, size, SN_MEMPROT_ACCESS_CLASS_0, in xp_restrict_memprotect_uv()
|