/linux/fs/nilfs2/ |
A D | page.c | 107 kaddr1 = kmap_atomic(dpage); in nilfs_copy_buffer() 124 SetPageUptodate(dpage); in nilfs_copy_buffer() 126 ClearPageUptodate(dpage); in nilfs_copy_buffer() 128 SetPageMappedToDisk(dpage); in nilfs_copy_buffer() 261 if (unlikely(!dpage)) { in nilfs_copy_dirty_pages() 274 unlock_page(dpage); in nilfs_copy_dirty_pages() 275 put_page(dpage); in nilfs_copy_dirty_pages() 313 if (dpage) { in nilfs_copy_back_pages() 315 WARN_ON(PageDirty(dpage)); in nilfs_copy_back_pages() 317 unlock_page(dpage); in nilfs_copy_back_pages() [all …]
|
/linux/lib/ |
A D | test_hmm.c | 541 struct page *dpage = NULL; in dmirror_devmem_alloc_page() local 555 dpage = mdevice->free_pages; in dmirror_devmem_alloc_page() 566 get_page(dpage); in dmirror_devmem_alloc_page() 567 lock_page(dpage); in dmirror_devmem_alloc_page() 568 return dpage; in dmirror_devmem_alloc_page() 586 struct page *dpage; in dmirror_migrate_alloc_and_copy() local 599 if (!dpage) in dmirror_migrate_alloc_and_copy() 683 struct page *dpage; in dmirror_migrate_finalize_and_map() local 690 if (!dpage) in dmirror_migrate_finalize_and_map() 1133 if (!dpage) in dmirror_devmem_fault_alloc_and_copy() [all …]
|
/linux/arch/powerpc/kvm/ |
A D | book3s_hv_uvmem.c | 542 if (!dpage) { in __kvmppc_svm_page_out() 547 lock_page(dpage); in __kvmppc_svm_page_out() 549 pfn = page_to_pfn(dpage); in __kvmppc_svm_page_out() 565 unlock_page(dpage); in __kvmppc_svm_page_out() 566 __free_page(dpage); in __kvmppc_svm_page_out() 686 struct page *dpage = NULL; in kvmppc_uvmem_get_page() local 715 get_page(dpage); in kvmppc_uvmem_get_page() 716 lock_page(dpage); in kvmppc_uvmem_get_page() 717 return dpage; in kvmppc_uvmem_get_page() 740 struct page *dpage; in kvmppc_svm_page_in() local [all …]
|
/linux/drivers/gpu/drm/nouveau/ |
A D | nouveau_dmem.c | 145 struct page *dpage, *spage; in nouveau_dmem_fault_copy_one() local 153 if (!dpage) in nouveau_dmem_fault_copy_one() 155 lock_page(dpage); in nouveau_dmem_fault_copy_one() 176 __free_page(dpage); in nouveau_dmem_fault_copy_one() 573 struct page *dpage, *spage; in nouveau_dmem_migrate_copy_one() local 580 dpage = nouveau_dmem_page_alloc_locked(drm); in nouveau_dmem_migrate_copy_one() 581 if (!dpage) in nouveau_dmem_migrate_copy_one() 584 paddr = nouveau_dmem_page_addr(dpage); in nouveau_dmem_migrate_copy_one() 600 dpage->zone_device_data = svmm; in nouveau_dmem_migrate_copy_one() 605 return migrate_pfn(page_to_pfn(dpage)); in nouveau_dmem_migrate_copy_one() [all …]
|
/linux/fs/f2fs/ |
A D | acl.c | 169 struct page *dpage) in __f2fs_get_acl() argument 179 retval = f2fs_getxattr(inode, name_index, "", NULL, 0, dpage); in __f2fs_get_acl() 185 retval, dpage); in __f2fs_get_acl() 360 struct page *dpage) in f2fs_acl_create() argument 372 p = __f2fs_get_acl(dir, ACL_TYPE_DEFAULT, dpage); in f2fs_acl_create() 410 struct page *dpage) in f2fs_init_acl() argument 415 error = f2fs_acl_create(dir, &inode->i_mode, &default_acl, &acl, dpage); in f2fs_init_acl()
|
A D | acl.h | 46 struct page *ipage, struct page *dpage) in f2fs_init_acl() argument
|
A D | xattr.h | 148 size_t buffer_size, struct page *dpage) in f2fs_getxattr() argument
|
A D | dir.c | 565 const struct f2fs_filename *fname, struct page *dpage) in f2fs_init_inode_metadata() argument 586 err = f2fs_init_acl(inode, dir, page, dpage); in f2fs_init_inode_metadata()
|
A D | f2fs.h | 3336 const struct f2fs_filename *fname, struct page *dpage);
|
/linux/drivers/dma/ |
A D | nbpfaxi.c | 702 if (!dpage) in nbpf_desc_page_alloc() 708 for (i = 0, ldesc = dpage->ldesc, hwdesc = dpage->hwdesc; in nbpf_desc_page_alloc() 709 i < ARRAY_SIZE(dpage->ldesc); in nbpf_desc_page_alloc() 720 for (i = 0, desc = dpage->desc; in nbpf_desc_page_alloc() 721 i < ARRAY_SIZE(dpage->desc); in nbpf_desc_page_alloc() 740 return ARRAY_SIZE(dpage->desc); in nbpf_desc_page_alloc() 1066 struct nbpf_desc_page *dpage, *tmp; in nbpf_free_chan_resources() local 1078 list_del(&dpage->node); in nbpf_free_chan_resources() 1079 for (i = 0, ldesc = dpage->ldesc; in nbpf_free_chan_resources() 1080 i < ARRAY_SIZE(dpage->ldesc); in nbpf_free_chan_resources() [all …]
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
A D | kfd_migrate.c | 564 struct page *dpage; in svm_migrate_copy_to_ram() local 606 dpage = svm_migrate_get_sys_page(migrate->vma, addr); in svm_migrate_copy_to_ram() 607 if (!dpage) { in svm_migrate_copy_to_ram() 614 dst[i] = dma_map_page(dev, dpage, 0, PAGE_SIZE, DMA_FROM_DEVICE); in svm_migrate_copy_to_ram() 622 dst[i] >> PAGE_SHIFT, page_to_pfn(dpage)); in svm_migrate_copy_to_ram() 624 migrate->dst[i] = migrate_pfn(page_to_pfn(dpage)); in svm_migrate_copy_to_ram()
|
/linux/drivers/net/ethernet/3com/ |
A D | typhoon.c | 1336 void *dpage; in typhoon_download_firmware() local 1357 dpage = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &dpage_dma, GFP_ATOMIC); in typhoon_download_firmware() 1358 if (!dpage) { in typhoon_download_firmware() 1422 dpage, len)); in typhoon_download_firmware() 1462 dma_free_coherent(&pdev->dev, PAGE_SIZE, dpage, dpage_dma); in typhoon_download_firmware()
|
/linux/drivers/crypto/ |
A D | hifn_795x.c | 1326 struct page *spage, *dpage; in hifn_setup_dma() local 1347 dpage = sg_page(t); in hifn_setup_dma() 1352 dpage = sg_page(dst); in hifn_setup_dma() 1358 hifn_setup_dst_desc(dev, dpage, doff, len, n - len == 0); in hifn_setup_dma()
|
/linux/Documentation/vm/ |
A D | hmm.rst | 363 dst[i] = migrate_pfn(page_to_pfn(dpage));
|
/linux/drivers/scsi/ |
A D | st.c | 4065 struct page *dpage = st_bp->reserved_pages[dst_seg]; in move_buffer_data() local 4069 memmove(page_address(dpage) + dst_offset, in move_buffer_data()
|