Home
last modified time | relevance | path

Searched refs:PAGE_OFFSET (Results 1 – 25 of 232) sorted by relevance

12345678910

/linux/arch/alpha/include/asm/
A Dsetup.h23 #define KERNEL_START (PAGE_OFFSET+KERNEL_START_PHYS)
25 #define INIT_STACK (PAGE_OFFSET+KERNEL_START_PHYS+0x02000)
26 #define EMPTY_PGT (PAGE_OFFSET+KERNEL_START_PHYS+0x04000)
27 #define EMPTY_PGE (PAGE_OFFSET+KERNEL_START_PHYS+0x08000)
28 #define ZERO_PGE (PAGE_OFFSET+KERNEL_START_PHYS+0x0A000)
30 #define START_ADDR (PAGE_OFFSET+KERNEL_START_PHYS+0x10000)
A Dpage.h69 #define PAGE_OFFSET 0xffff800000000000UL macro
71 #define PAGE_OFFSET 0xfffffc0000000000UL macro
77 #define PAGE_OFFSET 0xffff800000000000 macro
79 #define PAGE_OFFSET 0xfffffc0000000000 macro
84 #define __pa(x) ((unsigned long) (x) - PAGE_OFFSET)
85 #define __va(x) ((void *)((unsigned long) (x) + PAGE_OFFSET))
/linux/arch/arm/include/asm/
A Dmemory.h29 #define PAGE_OFFSET UL(CONFIG_PAGE_OFFSET) macro
30 #define KERNEL_OFFSET (PAGE_OFFSET)
55 #define MODULES_VADDR (PAGE_OFFSET - SZ_16M)
58 #define MODULES_VADDR (PAGE_OFFSET - SZ_8M)
69 #define MODULES_END (PAGE_OFFSET - PMD_SIZE)
71 #define MODULES_END (PAGE_OFFSET)
122 #define MODULES_VADDR PAGE_OFFSET
281 return (phys_addr_t)x - PAGE_OFFSET + PHYS_OFFSET; in __virt_to_phys_nodebug()
286 return x - PHYS_OFFSET + PAGE_OFFSET; in __phys_to_virt()
292 ((((unsigned long)(kaddr) - PAGE_OFFSET) >> PAGE_SHIFT) + \
[all …]
/linux/arch/powerpc/include/asm/
A Dpage.h81 #define PAGE_OFFSET ASM_CONST(CONFIG_PAGE_OFFSET) macro
115 #define MEMORY_START (PHYSICAL_START + PAGE_OFFSET - KERNELBASE)
222 VIRTUAL_BUG_ON((unsigned long)(x) >= PAGE_OFFSET); \
223 (void *)(unsigned long)((phys_addr_t)(x) | PAGE_OFFSET); \
228 VIRTUAL_BUG_ON((unsigned long)(x) < PAGE_OFFSET); \
233 #define __va(x) ((void *)(unsigned long)((phys_addr_t)(x) + PAGE_OFFSET - MEMORY_START))
234 #define __pa(x) ((unsigned long)(x) - PAGE_OFFSET + MEMORY_START)
259 #define is_kernel_addr(x) ((x) >= PAGE_OFFSET)
/linux/arch/x86/mm/
A Dphysaddr.c25 x = y + (__START_KERNEL_map - PAGE_OFFSET); in __phys_addr()
58 x = y + (__START_KERNEL_map - PAGE_OFFSET); in __virt_addr_valid()
74 unsigned long phys_addr = x - PAGE_OFFSET; in __phys_addr()
76 VIRTUAL_BUG_ON(x < PAGE_OFFSET); in __phys_addr()
90 if (x < PAGE_OFFSET) in __virt_addr_valid()
96 return pfn_valid((x - PAGE_OFFSET) >> PAGE_SHIFT); in __virt_addr_valid()
/linux/arch/nds32/include/asm/
A Dmemory.h24 #define PAGE_OFFSET (CONFIG_PAGE_OFFSET) macro
32 #define __virt_to_phys(x) ((x) - PAGE_OFFSET + PHYS_OFFSET)
33 #define __phys_to_virt(x) ((x) - PHYS_OFFSET + PAGE_OFFSET)
40 #define MODULES_END (PAGE_OFFSET)
83 #define virt_addr_valid(kaddr) ((unsigned long)(kaddr) >= PAGE_OFFSET && (unsigned long)(kaddr) < (…
/linux/arch/powerpc/mm/book3s32/
A Dhash_low.S52 lis r8, (mmu_hash_lock - PAGE_OFFSET)@h
53 ori r8, r8, (mmu_hash_lock - PAGE_OFFSET)@l
149 lis r8, (mmu_hash_lock - PAGE_OFFSET)@ha
151 stw r0, (mmu_hash_lock - PAGE_OFFSET)@l(r8)
158 lis r8, (mmu_hash_lock - PAGE_OFFSET)@ha
160 stw r0, (mmu_hash_lock - PAGE_OFFSET)@l(r8)
199 lis r6, (mmu_hash_lock - PAGE_OFFSET)@ha
200 addi r6, r6, (mmu_hash_lock - PAGE_OFFSET)@l
251 lis r6, (mmu_hash_lock - PAGE_OFFSET)@ha
402 lwz r6, (next_slot - PAGE_OFFSET)@l(r4)
[all …]
A Dmmu.c152 setbat(idx, PAGE_OFFSET + base, base, size, PAGE_KERNEL_X); in __mmu_mapin_ram()
162 unsigned long border = (unsigned long)__init_begin - PAGE_OFFSET; in mmu_mapin_ram()
198 unsigned long base = (unsigned long)_stext - PAGE_OFFSET; in mmu_mark_initmem_nx()
199 unsigned long top = (unsigned long)_etext - PAGE_OFFSET; in mmu_mark_initmem_nx()
200 unsigned long border = (unsigned long)__init_begin - PAGE_OFFSET; in mmu_mark_initmem_nx()
205 setibat(i++, PAGE_OFFSET + base, base, size, PAGE_KERNEL_TEXT); in mmu_mark_initmem_nx()
217 setibat(i++, PAGE_OFFSET + base, base, size, PAGE_KERNEL_TEXT); in mmu_mark_initmem_nx()
400 unsigned int hash = (unsigned int)Hash - PAGE_OFFSET; in MMU_init_hw_patch()
/linux/arch/sh/include/asm/
A Dpage.h123 #define PAGE_OFFSET CONFIG_PAGE_OFFSET macro
135 #define ___pa(x) ((x)-PAGE_OFFSET+__MEMORY_START)
136 #define ___va(x) ((x)+PAGE_OFFSET-__MEMORY_START)
138 #define ___pa(x) ((x)-PAGE_OFFSET)
139 #define ___va(x) ((x)+PAGE_OFFSET)
152 #define UNCAC_ADDR(addr) ((addr) - PAGE_OFFSET + uncached_start)
153 #define CAC_ADDR(addr) ((addr) - uncached_start + PAGE_OFFSET)
/linux/arch/openrisc/include/asm/
A Dpage.h29 #define PAGE_OFFSET 0xc0000000 macro
30 #define KERNELBASE PAGE_OFFSET
72 #define __va(x) ((void *)((unsigned long)(x) + PAGE_OFFSET))
73 #define __pa(x) ((unsigned long) (x) - PAGE_OFFSET)
79 (mem_map + (((unsigned long)(addr)-PAGE_OFFSET) >> PAGE_SHIFT))
/linux/arch/arm64/include/asm/
A Dmemory.h34 #define VMEMMAP_SIZE ((_PAGE_END(VA_BITS_MIN) - PAGE_OFFSET) >> VMEMMAP_SHIFT)
45 #define PAGE_OFFSET (_PAGE_OFFSET(VA_BITS)) macro
266 #define __is_lm_address(addr) (((u64)(addr) - PAGE_OFFSET) < (PAGE_END - PAGE_OFFSET))
268 #define __lm_to_phys(addr) (((addr) - PAGE_OFFSET) + PHYS_OFFSET)
286 #define __phys_to_virt(x) ((unsigned long)((x) - PHYS_OFFSET) | PAGE_OFFSET)
340 u64 __addr = PAGE_OFFSET + (__idx * PAGE_SIZE); \
345 u64 __idx = (__tag_reset((u64)x) - PAGE_OFFSET) / PAGE_SIZE; \
/linux/arch/csky/include/asm/
A Dpage.h27 #define PAGE_OFFSET CONFIG_PAGE_OFFSET macro
40 #define virt_addr_valid(kaddr) ((void *)(kaddr) >= (void *)PAGE_OFFSET && \
79 #define __pa(x) ((unsigned long)(x) - PAGE_OFFSET + va_pa_offset)
80 #define __va(x) ((void *)((unsigned long)(x) + PAGE_OFFSET - va_pa_offset))
84 #define MAP_NR(x) PFN_DOWN((unsigned long)(x) - PAGE_OFFSET - \
/linux/arch/powerpc/kernel/
A Dhead_8xx.S48 cmpli cr0, \scratch, PAGE_OFFSET@h
209 oris r10, r10, (swapper_pg_dir - PAGE_OFFSET)@ha
241 stw r10, (itlb_miss_counter - PAGE_OFFSET)@l(0)
300 stw r10, (dtlb_miss_counter - PAGE_OFFSET)@l(0)
358 cmplwi cr1, r11, (.Ldtlbie - PAGE_OFFSET)@l
359 cmplwi cr7, r11, (.Litlbie - PAGE_OFFSET)@l
406 cmpli cr1, r11, PAGE_OFFSET@h
691 lis r9, (1f - PAGE_OFFSET)@h
692 ori r9, r9, (1f - PAGE_OFFSET)@l
712 LOAD_REG_IMMEDIATE(r6, PAGE_OFFSET)
[all …]
/linux/tools/testing/selftests/powerpc/mm/
A Dbad_accesses.c24 #define PAGE_OFFSET (0xcul << 60) macro
60 (fault_addr < PAGE_OFFSET || fault_addr >= kernel_virt_end)); in bad_access()
92 kernel_virt_end = PAGE_OFFSET + (7 * (512ul << 40)); in test()
97 kernel_virt_end = PAGE_OFFSET + (7 * (64ul << 40)); in test()
/linux/arch/nios2/include/asm/
A Dpage.h31 #define PAGE_OFFSET \ macro
80 ((unsigned long)(x) - PAGE_OFFSET + PHYS_OFFSET)
82 ((void *)((unsigned long)(x) + PAGE_OFFSET - PHYS_OFFSET))
85 ((void *)(((page) - mem_map) << PAGE_SHIFT) + PAGE_OFFSET)
/linux/arch/xtensa/include/asm/
A Dpage.h28 #define PAGE_OFFSET XCHAL_KSEG_CACHED_VADDR macro
33 #define PAGE_OFFSET _AC(CONFIG_DEFAULT_MEM_START, UL) macro
167 unsigned long off = va - PAGE_OFFSET; in ___pa()
188 ((unsigned long) (x) - PAGE_OFFSET + PHYS_OFFSET)
191 ((void *)((unsigned long) (x) - PHYS_OFFSET + PAGE_OFFSET))
/linux/arch/m68k/include/asm/
A Dpage_mm.h100 if(x >= PAGE_OFFSET) in ___pa()
101 return (x-PAGE_OFFSET); in ___pa()
112 return (void *)(x+PAGE_OFFSET); in __va()
139 #define virt_addr_valid(kaddr) ((unsigned long)(kaddr) >= PAGE_OFFSET && (unsigned long)(kaddr) < (…
A Dpage_no.h26 #define virt_to_page(addr) (mem_map + (((unsigned long)(addr)-PAGE_OFFSET) >> PAGE_SHIFT))
27 #define page_to_virt(page) __va(((((page) - mem_map) << PAGE_SHIFT) + PAGE_OFFSET))
33 #define virt_addr_valid(kaddr) (((unsigned long)(kaddr) >= PAGE_OFFSET) && \
/linux/arch/riscv/include/asm/
A Dpage.h34 #define PAGE_OFFSET _AC(CONFIG_PAGE_OFFSET, UL) macro
36 #define KERN_VIRT_SIZE (-PAGE_OFFSET)
85 #define ARCH_PFN_OFFSET (PAGE_OFFSET >> PAGE_SHIFT)
110 ((x) >= PAGE_OFFSET && (!IS_ENABLED(CONFIG_64BIT) || (x) < kernel_map.virt_addr))
171 (unsigned long)(_addr) >= PAGE_OFFSET && pfn_valid(virt_to_pfn(_addr)); \
/linux/arch/hexagon/mm/
A Dinit.c21 #define bootmem_startpg (PFN_UP(((unsigned long) _end) - PAGE_OFFSET + PHYS_OFFSET))
186 segtable = segtable + (PAGE_OFFSET >> 22); in setup_arch_memory()
219 (unsigned long) _K_init_devicetable-PAGE_OFFSET); in setup_arch_memory()
220 *segtable = ((u32) (unsigned long) _K_init_devicetable-PAGE_OFFSET) | in setup_arch_memory()
/linux/arch/hexagon/include/asm/
A Dmem-layout.h19 #define PAGE_OFFSET _AC(0xc0000000, UL) macro
41 #define TASK_SIZE (PAGE_OFFSET)
54 #define MIN_KERNEL_SEG (PAGE_OFFSET >> PGDIR_SHIFT) /* L1 shift is 22 bits */
/linux/arch/sparc/include/asm/
A Dpage_32.h119 #define PAGE_OFFSET 0xf0000000 macro
124 #define __pa(x) ((unsigned long)(x) - PAGE_OFFSET + phys_base)
125 #define __va(x) ((void *)((unsigned long) (x) - phys_base + PAGE_OFFSET))
134 #define virt_addr_valid(kaddr) ((((unsigned long)(kaddr)-PAGE_OFFSET)>>PAGE_SHIFT) < max_mapnr)
/linux/arch/arm/mach-footbridge/
A Dcommon.c236 WARN_ON(res < PAGE_OFFSET || res >= (unsigned long)high_memory); in __virt_to_bus()
238 return res + (fb_bus_sdram_offset() - PAGE_OFFSET); in __virt_to_bus()
244 res = res - (fb_bus_sdram_offset() - PAGE_OFFSET); in __bus_to_virt()
246 WARN_ON(res < PAGE_OFFSET || res >= (unsigned long)high_memory); in __bus_to_virt()
/linux/drivers/pci/controller/
A Dpci-versatile.c95 writel(__pa(PAGE_OFFSET) >> 28, PCI_SMAP(mem)); in versatile_pci_probe()
132 writel(__pa(PAGE_OFFSET), local_pci_cfg_base + PCI_BASE_ADDRESS_0); in versatile_pci_probe()
133 writel(__pa(PAGE_OFFSET), local_pci_cfg_base + PCI_BASE_ADDRESS_1); in versatile_pci_probe()
134 writel(__pa(PAGE_OFFSET), local_pci_cfg_base + PCI_BASE_ADDRESS_2); in versatile_pci_probe()
/linux/arch/powerpc/mm/nohash/
A D8xx.c37 if (va >= PAGE_OFFSET && va < PAGE_OFFSET + block_mapped_ram) in v_block_mapped()
132 unsigned long v = PAGE_OFFSET + offset; in mmu_mapin_ram_chunk()
145 flush_tlb_kernel_range(PAGE_OFFSET + v, PAGE_OFFSET + top); in mmu_mapin_ram_chunk()

Completed in 30 milliseconds

12345678910