Searched refs:umem_dmabuf (Results 1 – 6 of 6) sorted by relevance
/linux/drivers/infiniband/core/ |
A D | umem_dmabuf.c | 26 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages() 36 end = ALIGN(umem_dmabuf->umem.address + umem_dmabuf->umem.length, in ib_umem_dmabuf_map_pages() 61 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages() 102 dma_buf_unmap_attachment(umem_dmabuf->attach, umem_dmabuf->sgt, in ib_umem_dmabuf_unmap_pages() 133 umem_dmabuf = kzalloc(sizeof(*umem_dmabuf), GFP_KERNEL); in ib_umem_dmabuf_get() 134 if (!umem_dmabuf) { in ib_umem_dmabuf_get() 153 umem_dmabuf); in ib_umem_dmabuf_get() 158 return umem_dmabuf; in ib_umem_dmabuf_get() 161 kfree(umem_dmabuf); in ib_umem_dmabuf_get() 207 return umem_dmabuf; in ib_umem_dmabuf_get_pinned() [all …]
|
A D | Makefile | 43 ib_uverbs-$(CONFIG_INFINIBAND_USER_MEM) += umem.o umem_dmabuf.o
|
/linux/include/rdma/ |
A D | ib_umem.h | 147 int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf); 148 void ib_umem_dmabuf_unmap_pages(struct ib_umem_dmabuf *umem_dmabuf); 149 void ib_umem_dmabuf_release(struct ib_umem_dmabuf *umem_dmabuf); 193 static inline int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf) in ib_umem_dmabuf_map_pages() argument 197 static inline void ib_umem_dmabuf_unmap_pages(struct ib_umem_dmabuf *umem_dmabuf) { } in ib_umem_dmabuf_unmap_pages() argument 198 static inline void ib_umem_dmabuf_release(struct ib_umem_dmabuf *umem_dmabuf) { } in ib_umem_dmabuf_release() argument
|
/linux/drivers/infiniband/hw/mlx5/ |
A D | odp.c | 692 struct ib_umem_dmabuf *umem_dmabuf = to_ib_umem_dmabuf(mr->umem); in pagefault_dmabuf_mr() local 700 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in pagefault_dmabuf_mr() 701 err = ib_umem_dmabuf_map_pages(umem_dmabuf); in pagefault_dmabuf_mr() 703 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in pagefault_dmabuf_mr() 707 page_size = mlx5_umem_find_best_pgsz(&umem_dmabuf->umem, mkc, in pagefault_dmabuf_mr() 709 umem_dmabuf->umem.iova); in pagefault_dmabuf_mr() 711 ib_umem_dmabuf_unmap_pages(umem_dmabuf); in pagefault_dmabuf_mr() 716 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in pagefault_dmabuf_mr()
|
A D | mr.c | 1572 struct mlx5_ib_mr *mr = umem_dmabuf->private; in mlx5_ib_dmabuf_invalidate_cb() 1574 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in mlx5_ib_dmabuf_invalidate_cb() 1576 if (!umem_dmabuf->sgt) in mlx5_ib_dmabuf_invalidate_cb() 1580 ib_umem_dmabuf_unmap_pages(umem_dmabuf); in mlx5_ib_dmabuf_invalidate_cb() 1595 struct ib_umem_dmabuf *umem_dmabuf; in mlx5_ib_reg_user_mr_dmabuf() local 1613 if (IS_ERR(umem_dmabuf)) { in mlx5_ib_reg_user_mr_dmabuf() 1615 PTR_ERR(umem_dmabuf)); in mlx5_ib_reg_user_mr_dmabuf() 1616 return ERR_CAST(umem_dmabuf); in mlx5_ib_reg_user_mr_dmabuf() 1619 mr = alloc_cacheable_mr(pd, &umem_dmabuf->umem, virt_addr, in mlx5_ib_reg_user_mr_dmabuf() 1622 ib_umem_release(&umem_dmabuf->umem); in mlx5_ib_reg_user_mr_dmabuf() [all …]
|
/linux/drivers/infiniband/hw/efa/ |
A D | efa_verbs.c | 1649 struct ib_umem_dmabuf *umem_dmabuf; in efa_reg_user_mr_dmabuf() local 1659 umem_dmabuf = ib_umem_dmabuf_get_pinned(ibpd->device, start, length, fd, in efa_reg_user_mr_dmabuf() 1661 if (IS_ERR(umem_dmabuf)) { in efa_reg_user_mr_dmabuf() 1662 err = PTR_ERR(umem_dmabuf); in efa_reg_user_mr_dmabuf() 1667 mr->umem = &umem_dmabuf->umem; in efa_reg_user_mr_dmabuf()
|
Completed in 17 milliseconds