Home
last modified time | relevance | path

Searched refs:vmf (Results 1 – 25 of 134) sorted by relevance

123456

/linux/mm/
A Dmemory.c3022 vmf->pte = pte_offset_map_lock(mm, vmf->pmd, vmf->address, &vmf->ptl); in wp_page_copy()
3140 vmf->pte = pte_offset_map_lock(vmf->vma->vm_mm, vmf->pmd, vmf->address, in finish_mkwrite_fault()
3147 update_mmu_tlb(vmf->vma, vmf->address, vmf->pte); in finish_mkwrite_fault()
4390 vmf->pte = pte_offset_map(vmf->pmd, vmf->address); in do_numa_page()
4464 __split_huge_pud(vmf->vma, vmf->pud, vmf->address); in create_huge_pud()
4529 vmf->pte = pte_offset_map(vmf->pmd, vmf->address); in handle_pte_fault()
4530 vmf->orig_pte = *vmf->pte; in handle_pte_fault()
4560 vmf->ptl = pte_lockptr(vmf->vma->vm_mm, vmf->pmd); in handle_pte_fault()
4564 update_mmu_tlb(vmf->vma, vmf->address, vmf->pte); in handle_pte_fault()
4575 update_mmu_cache(vmf->vma, vmf->address, vmf->pte); in handle_pte_fault()
[all …]
A Dhuge_memory.c628 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in __do_huge_pmd_anonymous_page()
654 update_mmu_cache_pmd(vma, vmf->address, vmf->pmd); in __do_huge_pmd_anonymous_page()
752 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_anonymous_page()
1245 vmf->ptl = pud_lock(vmf->vma->vm_mm, vmf->pud); in huge_pud_set_accessed()
1254 update_mmu_cache_pud(vmf->vma, vmf->address, vmf->pud); in huge_pud_set_accessed()
1268 vmf->ptl = pmd_lock(vmf->vma->vm_mm, vmf->pmd); in huge_pmd_set_accessed()
1277 update_mmu_cache_pmd(vmf->vma, vmf->address, vmf->pmd); in huge_pmd_set_accessed()
1290 vmf->ptl = pmd_lockptr(vma->vm_mm, vmf->pmd); in do_huge_pmd_wp_page()
1437 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_numa_page()
1470 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_numa_page()
[all …]
A Dfilemap.c3013 pgoff_t offset = vmf->pgoff; in do_async_mmap_readahead()
3059 pgoff_t offset = vmf->pgoff; in filemap_fault()
3101 vmf->gfp_mask); in filemap_fault()
3163 vmf->page = page; in filemap_fault()
3221 if (pmd_none(*vmf->pmd)) in filemap_map_pmd()
3222 pmd_install(mm, vmf->pmd, &vmf->prealloc_pte); in filemap_map_pmd()
3315 vmf->pte = pte_offset_map_lock(vma->vm_mm, vmf->pmd, addr, &vmf->ptl); in filemap_map_pages()
3328 if (!pte_none(*vmf->pte)) in filemap_map_pages()
3332 if (vmf->address == addr) in filemap_map_pages()
3335 do_set_pte(vmf, page, addr); in filemap_map_pages()
[all …]
A Dsecretmem.c50 static vm_fault_t secretmem_fault(struct vm_fault *vmf) in secretmem_fault() argument
52 struct address_space *mapping = vmf->vma->vm_file->f_mapping; in secretmem_fault()
53 struct inode *inode = file_inode(vmf->vma->vm_file); in secretmem_fault()
54 pgoff_t offset = vmf->pgoff; in secretmem_fault()
55 gfp_t gfp = vmf->gfp_mask; in secretmem_fault()
60 if (((loff_t)vmf->pgoff << PAGE_SHIFT) >= i_size_read(inode)) in secretmem_fault()
96 vmf->page = page; in secretmem_fault()
A Dswap_state.c614 struct vm_fault *vmf) in swap_cluster_readahead() argument
624 struct vm_area_struct *vma = vmf->vma; in swap_cluster_readahead()
625 unsigned long addr = vmf->address; in swap_cluster_readahead()
715 struct vm_area_struct *vma = vmf->vma; in swap_ra_info()
732 faddr = vmf->address; in swap_ra_info()
789 struct vm_fault *vmf) in swap_vma_readahead() argument
792 struct vm_area_struct *vma = vmf->vma; in swap_vma_readahead()
802 swap_ra_info(vmf, &ra_info); in swap_vma_readahead()
818 vmf->address, &page_allocated); in swap_vma_readahead()
850 struct vm_fault *vmf) in swapin_readahead() argument
[all …]
/linux/include/trace/events/
A Dfs_dax.h13 TP_ARGS(inode, vmf, max_pgoff, result),
30 __entry->vm_end = vmf->vma->vm_end;
32 __entry->address = vmf->address;
33 __entry->flags = vmf->flags;
34 __entry->pgoff = vmf->pgoff;
80 __entry->address = vmf->address;
123 __entry->address = vmf->address;
155 TP_ARGS(inode, vmf, result),
170 __entry->flags = vmf->flags;
171 __entry->pgoff = vmf->pgoff;
[all …]
/linux/fs/
A Ddax.c769 dax_associate_entry(new_entry, mapping, vmf->vma, vmf->address); in dax_insert_entry()
1054 struct vm_fault *vmf) in dax_load_hole() argument
1098 ptl = pmd_lock(vmf->vma->vm_mm, vmf->pmd); in dax_pmd_load_hole()
1110 set_pmd_at(vmf->vma->vm_mm, pmd_addr, vmf->pmd, pmd_entry); in dax_pmd_load_hole()
1420 return vmf_insert_mixed_mkwrite(vmf->vma, vmf->address, pfn); in dax_fault_iter()
1421 return vmf_insert_mixed(vmf->vma, vmf->address, pfn); in dax_fault_iter()
1450 if ((vmf->flags & FAULT_FLAG_WRITE) && !vmf->cow_page) in dax_iomap_pte_fault()
1465 if (pmd_trans_huge(*vmf->pmd) || pmd_devmap(*vmf->pmd)) { in dax_iomap_pte_fault()
1587 if (!pmd_none(*vmf->pmd) && !pmd_trans_huge(*vmf->pmd) && in dax_iomap_pmd_fault()
1607 split_huge_pmd(vmf->vma, vmf->pmd, vmf->address); in dax_iomap_pmd_fault()
[all …]
/linux/drivers/dax/
A Ddevice.c77 struct vm_fault *vmf, pfn_t *pfn) in __dev_dax_pte_fault() argument
103 return vmf_insert_mixed(vmf->vma, vmf->address, *pfn); in __dev_dax_pte_fault()
107 struct vm_fault *vmf, pfn_t *pfn) in __dev_dax_pmd_fault() argument
130 if (pmd_addr < vmf->vma->vm_start || in __dev_dax_pmd_fault()
143 return vmf_insert_pfn_pmd(vmf, *pfn, vmf->flags & FAULT_FLAG_WRITE); in __dev_dax_pmd_fault()
148 struct vm_fault *vmf, pfn_t *pfn) in __dev_dax_pud_fault() argument
172 if (pud_addr < vmf->vma->vm_start || in __dev_dax_pud_fault()
185 return vmf_insert_pfn_pud(vmf, *pfn, vmf->flags & FAULT_FLAG_WRITE); in __dev_dax_pud_fault()
189 struct vm_fault *vmf, pfn_t *pfn) in __dev_dax_pud_fault() argument
207 vmf->vma->vm_start, vmf->vma->vm_end, pe_size); in dev_dax_huge_fault()
[all …]
/linux/drivers/gpu/drm/ttm/
A Dttm_bo_vm.c47 struct vm_fault *vmf) in ttm_bo_vm_fault_idle() argument
72 mmap_read_unlock(vmf->vma->vm_mm); in ttm_bo_vm_fault_idle()
130 struct vm_fault *vmf) in ttm_bo_vm_reserve() argument
147 mmap_read_unlock(vmf->vma->vm_mm); in ttm_bo_vm_reserve()
198 struct vm_area_struct *vma = vmf->vma; in ttm_bo_vm_fault_reserved()
209 unsigned long address = vmf->address; in ttm_bo_vm_fault_reserved()
215 ret = ttm_bo_vm_fault_idle(bo, vmf); in ttm_bo_vm_fault_reserved()
299 struct vm_area_struct *vma = vmf->vma; in ttm_bo_vm_dummy_page()
329 struct vm_area_struct *vma = vmf->vma; in ttm_bo_vm_fault()
336 ret = ttm_bo_vm_reserve(bo, vmf); in ttm_bo_vm_fault()
[all …]
/linux/drivers/video/fbdev/core/
A Dfb_defio.c40 static vm_fault_t fb_deferred_io_fault(struct vm_fault *vmf) in fb_deferred_io_fault() argument
44 struct fb_info *info = vmf->vma->vm_private_data; in fb_deferred_io_fault()
46 offset = vmf->pgoff << PAGE_SHIFT; in fb_deferred_io_fault()
56 if (vmf->vma->vm_file) in fb_deferred_io_fault()
57 page->mapping = vmf->vma->vm_file->f_mapping; in fb_deferred_io_fault()
62 page->index = vmf->pgoff; in fb_deferred_io_fault()
64 vmf->page = page; in fb_deferred_io_fault()
93 static vm_fault_t fb_deferred_io_mkwrite(struct vm_fault *vmf) in fb_deferred_io_mkwrite() argument
95 struct page *page = vmf->page; in fb_deferred_io_mkwrite()
96 struct fb_info *info = vmf->vma->vm_private_data; in fb_deferred_io_mkwrite()
[all …]
/linux/fs/ocfs2/
A Dmmap.c31 static vm_fault_t ocfs2_fault(struct vm_fault *vmf) in ocfs2_fault() argument
33 struct vm_area_struct *vma = vmf->vma; in ocfs2_fault()
38 ret = filemap_fault(vmf); in ocfs2_fault()
42 vma, vmf->page, vmf->pgoff); in ocfs2_fault()
113 static vm_fault_t ocfs2_page_mkwrite(struct vm_fault *vmf) in ocfs2_page_mkwrite() argument
115 struct page *page = vmf->page; in ocfs2_page_mkwrite()
116 struct inode *inode = file_inode(vmf->vma->vm_file); in ocfs2_page_mkwrite()
144 ret = __ocfs2_page_mkwrite(vmf->vma->vm_file, di_bh, page); in ocfs2_page_mkwrite()
/linux/include/linux/
A Dhuge_mm.h10 vm_fault_t do_huge_pmd_anonymous_page(struct vm_fault *vmf);
14 void huge_pmd_set_accessed(struct vm_fault *vmf);
20 void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud);
22 static inline void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud) in huge_pud_set_accessed() argument
27 vm_fault_t do_huge_pmd_wp_page(struct vm_fault *vmf);
41 vm_fault_t vmf_insert_pfn_pmd_prot(struct vm_fault *vmf, pfn_t pfn,
58 return vmf_insert_pfn_pmd_prot(vmf, pfn, vmf->vma->vm_page_prot, write); in vmf_insert_pfn_pmd()
60 vm_fault_t vmf_insert_pfn_pud_prot(struct vm_fault *vmf, pfn_t pfn,
77 return vmf_insert_pfn_pud_prot(vmf, pfn, vmf->vma->vm_page_prot, write); in vmf_insert_pfn_pud()
282 vm_fault_t do_huge_pmd_numa_page(struct vm_fault *vmf);
[all …]
/linux/arch/x86/entry/vdso/
A Dvma.c60 struct vm_area_struct *vma, struct vm_fault *vmf) in vdso_fault() argument
64 if (!image || (vmf->pgoff << PAGE_SHIFT) >= image->size) in vdso_fault()
67 vmf->page = virt_to_page(image->data + (vmf->pgoff << PAGE_SHIFT)); in vdso_fault()
68 get_page(vmf->page); in vdso_fault()
151 struct vm_area_struct *vma, struct vm_fault *vmf) in vvar_fault() argument
160 sym_offset = (long)(vmf->pgoff << PAGE_SHIFT) + in vvar_fault()
195 addr = vmf->address + (image->sym_timens_page - sym_offset); in vvar_fault()
203 return vmf_insert_pfn(vma, vmf->address, pfn); in vvar_fault()
208 return vmf_insert_pfn_prot(vma, vmf->address, in vvar_fault()
216 return vmf_insert_pfn(vma, vmf->address, in vvar_fault()
[all …]
/linux/drivers/gpu/drm/
A Ddrm_vm.c112 struct vm_area_struct *vma = vmf->vma; in drm_vm_fault()
169 vmf->page = page; in drm_vm_fault()
201 struct vm_area_struct *vma = vmf->vma; in drm_vm_shm_fault()
210 offset = vmf->address - vma->vm_start; in drm_vm_shm_fault()
216 vmf->page = page; in drm_vm_shm_fault()
301 struct vm_area_struct *vma = vmf->vma; in drm_vm_dma_fault()
314 offset = vmf->address - vma->vm_start; in drm_vm_dma_fault()
320 vmf->page = page; in drm_vm_dma_fault()
336 struct vm_area_struct *vma = vmf->vma; in drm_vm_sg_fault()
351 offset = vmf->address - vma->vm_start; in drm_vm_sg_fault()
[all …]
/linux/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_page_dirty.c393 vm_fault_t vmw_bo_vm_mkwrite(struct vm_fault *vmf) in vmw_bo_vm_mkwrite() argument
395 struct vm_area_struct *vma = vmf->vma; in vmw_bo_vm_mkwrite()
408 save_flags = vmf->flags; in vmw_bo_vm_mkwrite()
409 vmf->flags &= ~FAULT_FLAG_ALLOW_RETRY; in vmw_bo_vm_mkwrite()
410 ret = ttm_bo_vm_reserve(bo, vmf); in vmw_bo_vm_mkwrite()
411 vmf->flags = save_flags; in vmw_bo_vm_mkwrite()
435 vm_fault_t vmw_bo_vm_fault(struct vm_fault *vmf) in vmw_bo_vm_fault() argument
437 struct vm_area_struct *vma = vmf->vma; in vmw_bo_vm_fault()
446 ret = ttm_bo_vm_reserve(bo, vmf); in vmw_bo_vm_fault()
457 page_offset = vmf->pgoff - in vmw_bo_vm_fault()
[all …]
/linux/fs/xfs/
A Dxfs_file.c1311 struct vm_fault *vmf, in __xfs_filemap_fault() argument
1340 ret = iomap_page_mkwrite(vmf, in __xfs_filemap_fault()
1344 ret = filemap_fault(vmf); in __xfs_filemap_fault()
1355 struct vm_fault *vmf) in xfs_is_write_fault() argument
1363 struct vm_fault *vmf) in xfs_filemap_fault() argument
1368 xfs_is_write_fault(vmf)); in xfs_filemap_fault()
1373 struct vm_fault *vmf, in xfs_filemap_huge_fault() argument
1381 xfs_is_write_fault(vmf)); in xfs_filemap_huge_fault()
1386 struct vm_fault *vmf) in xfs_filemap_page_mkwrite() argument
1398 struct vm_fault *vmf) in xfs_filemap_pfn_mkwrite() argument
[all …]
/linux/drivers/xen/
A Dprivcmd-buf.c117 static vm_fault_t privcmd_buf_vma_fault(struct vm_fault *vmf) in privcmd_buf_vma_fault() argument
120 vmf->vma, vmf->vma->vm_start, vmf->vma->vm_end, in privcmd_buf_vma_fault()
121 vmf->pgoff, (void *)vmf->address); in privcmd_buf_vma_fault()
/linux/fs/ext2/
A Dfile.c91 static vm_fault_t ext2_dax_fault(struct vm_fault *vmf) in ext2_dax_fault() argument
93 struct inode *inode = file_inode(vmf->vma->vm_file); in ext2_dax_fault()
95 bool write = (vmf->flags & FAULT_FLAG_WRITE) && in ext2_dax_fault()
96 (vmf->vma->vm_flags & VM_SHARED); in ext2_dax_fault()
100 file_update_time(vmf->vma->vm_file); in ext2_dax_fault()
104 ret = dax_iomap_fault(vmf, PE_SIZE_PTE, NULL, NULL, &ext2_iomap_ops); in ext2_dax_fault()
/linux/drivers/gpu/drm/gma500/
A Dgem.c22 static vm_fault_t psb_gem_fault(struct vm_fault *vmf);
136 static vm_fault_t psb_gem_fault(struct vm_fault *vmf) in psb_gem_fault() argument
138 struct vm_area_struct *vma = vmf->vma; in psb_gem_fault()
172 page_offset = (vmf->address - vma->vm_start) >> PAGE_SHIFT; in psb_gem_fault()
179 ret = vmf_insert_pfn(vma, vmf->address, pfn); in psb_gem_fault()
/linux/sound/usb/usx2y/
A DusX2Yhwdep.c21 static vm_fault_t snd_us428ctls_vm_fault(struct vm_fault *vmf) in snd_us428ctls_vm_fault() argument
28 vmf->vma->vm_start, in snd_us428ctls_vm_fault()
29 vmf->pgoff); in snd_us428ctls_vm_fault()
31 offset = vmf->pgoff << PAGE_SHIFT; in snd_us428ctls_vm_fault()
32 vaddr = (char *)((struct usx2ydev *)vmf->vma->vm_private_data)->us428ctls_sharedmem + offset; in snd_us428ctls_vm_fault()
35 vmf->page = page; in snd_us428ctls_vm_fault()
/linux/drivers/char/
A Dmspec.c137 mspec_fault(struct vm_fault *vmf) in mspec_fault() argument
141 pgoff_t index = vmf->pgoff; in mspec_fault()
142 struct vma_data *vdata = vmf->vma->vm_private_data; in mspec_fault()
164 return vmf_insert_pfn(vmf->vma, vmf->address, pfn); in mspec_fault()
/linux/fs/nilfs2/
A Dfile.c45 static vm_fault_t nilfs_page_mkwrite(struct vm_fault *vmf) in nilfs_page_mkwrite() argument
47 struct vm_area_struct *vma = vmf->vma; in nilfs_page_mkwrite()
48 struct page *page = vmf->page; in nilfs_page_mkwrite()
99 ret = block_page_mkwrite(vma, vmf, nilfs_get_block); in nilfs_page_mkwrite()
/linux/drivers/misc/ocxl/
A Dsysfs.c109 static vm_fault_t global_mmio_fault(struct vm_fault *vmf) in global_mmio_fault() argument
111 struct vm_area_struct *vma = vmf->vma; in global_mmio_fault()
115 if (vmf->pgoff >= (afu->config.global_mmio_size >> PAGE_SHIFT)) in global_mmio_fault()
118 offset = vmf->pgoff; in global_mmio_fault()
120 return vmf_insert_pfn(vma, vmf->address, offset); in global_mmio_fault()
A Dcontext.c139 static vm_fault_t ocxl_mmap_fault(struct vm_fault *vmf) in ocxl_mmap_fault() argument
141 struct vm_area_struct *vma = vmf->vma; in ocxl_mmap_fault()
146 offset = vmf->pgoff << PAGE_SHIFT; in ocxl_mmap_fault()
148 ctx->pasid, vmf->address, offset); in ocxl_mmap_fault()
151 ret = map_pp_mmio(vma, vmf->address, offset, ctx); in ocxl_mmap_fault()
153 ret = map_afu_irq(vma, vmf->address, offset, ctx); in ocxl_mmap_fault()
/linux/drivers/misc/cxl/
A Dcontext.c126 static vm_fault_t cxl_mmap_fault(struct vm_fault *vmf) in cxl_mmap_fault() argument
128 struct vm_area_struct *vma = vmf->vma; in cxl_mmap_fault()
133 offset = vmf->pgoff << PAGE_SHIFT; in cxl_mmap_fault()
136 __func__, ctx->pe, vmf->address, offset); in cxl_mmap_fault()
161 vmf->page = ctx->ff_page; in cxl_mmap_fault()
168 ret = vmf_insert_pfn(vma, vmf->address, (area + offset) >> PAGE_SHIFT); in cxl_mmap_fault()

Completed in 62 milliseconds

123456