Lines Matching refs:vaddr
79 unsigned long vaddr, int len) in dvma_map_cpu() argument
88 vaddr &= PAGE_MASK; in dvma_map_cpu()
90 end = PAGE_ALIGN(vaddr + len); in dvma_map_cpu()
92 pr_debug("dvma: mapping kern %08lx to virt %08lx\n", kaddr, vaddr); in dvma_map_cpu()
93 pgd = pgd_offset_k(vaddr); in dvma_map_cpu()
94 p4d = p4d_offset(pgd, vaddr); in dvma_map_cpu()
95 pud = pud_offset(p4d, vaddr); in dvma_map_cpu()
101 if((pmd = pmd_alloc(&init_mm, pud, vaddr)) == NULL) { in dvma_map_cpu()
106 if((end & PGDIR_MASK) > (vaddr & PGDIR_MASK)) in dvma_map_cpu()
107 end2 = (vaddr + (PGDIR_SIZE-1)) & PGDIR_MASK; in dvma_map_cpu()
115 if((pte = pte_alloc_kernel(pmd, vaddr)) == NULL) { in dvma_map_cpu()
120 if((end2 & PMD_MASK) > (vaddr & PMD_MASK)) in dvma_map_cpu()
121 end3 = (vaddr + (PMD_SIZE-1)) & PMD_MASK; in dvma_map_cpu()
127 __pa(kaddr), vaddr); in dvma_map_cpu()
132 vaddr += PAGE_SIZE; in dvma_map_cpu()
133 } while(vaddr < end3); in dvma_map_cpu()
135 } while(vaddr < end2); in dvma_map_cpu()
137 } while(vaddr < end); in dvma_map_cpu()