Lines Matching refs:vaddr

177 void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,  in copy_user_page()  argument
180 cpu_dcache_wbinval_page((unsigned long)vaddr); in copy_user_page()
181 cpu_icache_inval_page((unsigned long)vaddr); in copy_user_page()
187 void clear_user_page(void *addr, unsigned long vaddr, struct page *page) in clear_user_page() argument
189 cpu_dcache_wbinval_page((unsigned long)vaddr); in clear_user_page()
190 cpu_icache_inval_page((unsigned long)vaddr); in clear_user_page()
197 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() argument
206 if (aliasing(vaddr, (unsigned long)kfrom)) in copy_user_highpage()
208 vto = kremap0(vaddr, pto); in copy_user_highpage()
209 vfrom = kremap1(vaddr, pfrom); in copy_user_highpage()
218 void clear_user_highpage(struct page *page, unsigned long vaddr) in clear_user_highpage() argument
225 if (aliasing(kto, vaddr) && kto != 0) { in clear_user_highpage()
229 vto = kremap0(vaddr, page_to_phys(page)); in clear_user_highpage()
251 unsigned long vaddr, kto; in flush_dcache_page() local
253 vaddr = page->index << PAGE_SHIFT; in flush_dcache_page()
254 if (aliasing(vaddr, kaddr)) { in flush_dcache_page()
255 kto = kremap0(vaddr, page_to_phys(page)); in flush_dcache_page()
266 unsigned long vaddr, void *dst, void *src, int len) in copy_to_user_page() argument
271 vto = kremap0(vaddr, page_to_phys(page)); in copy_to_user_page()
272 dst = (void *)(vto | (vaddr & (PAGE_SIZE - 1))); in copy_to_user_page()
287 unsigned long vaddr, void *dst, void *src, int len) in copy_from_user_page() argument
292 vto = kremap0(vaddr, page_to_phys(page)); in copy_from_user_page()
293 src = (void *)(vto | (vaddr & (PAGE_SIZE - 1))); in copy_from_user_page()
300 struct page *page, unsigned long vaddr) in flush_anon_page() argument
311 cpu_icache_inval_page(vaddr & PAGE_MASK); in flush_anon_page()
313 if (aliasing(vaddr, kaddr)) { in flush_anon_page()
314 ktmp = kremap0(vaddr, page_to_phys(page)); in flush_anon_page()