Lines Matching refs:buf
75 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_cookie() local
77 return &buf->dma_addr; in vb2_dc_cookie()
96 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_vaddr() local
98 if (buf->vaddr) in vb2_dc_vaddr()
99 return buf->vaddr; in vb2_dc_vaddr()
101 if (buf->db_attach) { in vb2_dc_vaddr()
104 if (!dma_buf_vmap(buf->db_attach->dmabuf, &map)) in vb2_dc_vaddr()
105 buf->vaddr = map.vaddr; in vb2_dc_vaddr()
107 return buf->vaddr; in vb2_dc_vaddr()
110 if (buf->non_coherent_mem) in vb2_dc_vaddr()
111 buf->vaddr = dma_vmap_noncontiguous(buf->dev, buf->size, in vb2_dc_vaddr()
112 buf->dma_sgt); in vb2_dc_vaddr()
113 return buf->vaddr; in vb2_dc_vaddr()
118 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_num_users() local
120 return refcount_read(&buf->refcount); in vb2_dc_num_users()
125 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_prepare() local
126 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare()
129 if (buf->vb->skip_cache_sync_on_prepare) in vb2_dc_prepare()
132 if (!buf->non_coherent_mem) in vb2_dc_prepare()
136 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare()
139 if (buf->vaddr) in vb2_dc_prepare()
140 flush_kernel_vmap_range(buf->vaddr, buf->size); in vb2_dc_prepare()
145 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_finish() local
146 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish()
149 if (buf->vb->skip_cache_sync_on_finish) in vb2_dc_finish()
152 if (!buf->non_coherent_mem) in vb2_dc_finish()
156 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish()
159 if (buf->vaddr) in vb2_dc_finish()
160 invalidate_kernel_vmap_range(buf->vaddr, buf->size); in vb2_dc_finish()
169 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_put() local
171 if (!refcount_dec_and_test(&buf->refcount)) in vb2_dc_put()
174 if (buf->non_coherent_mem) { in vb2_dc_put()
175 if (buf->vaddr) in vb2_dc_put()
176 dma_vunmap_noncontiguous(buf->dev, buf->vaddr); in vb2_dc_put()
177 dma_free_noncontiguous(buf->dev, buf->size, in vb2_dc_put()
178 buf->dma_sgt, buf->dma_dir); in vb2_dc_put()
180 if (buf->sgt_base) { in vb2_dc_put()
181 sg_free_table(buf->sgt_base); in vb2_dc_put()
182 kfree(buf->sgt_base); in vb2_dc_put()
184 dma_free_attrs(buf->dev, buf->size, buf->cookie, in vb2_dc_put()
185 buf->dma_addr, buf->attrs); in vb2_dc_put()
187 put_device(buf->dev); in vb2_dc_put()
188 kfree(buf); in vb2_dc_put()
191 static int vb2_dc_alloc_coherent(struct vb2_dc_buf *buf) in vb2_dc_alloc_coherent() argument
193 struct vb2_queue *q = buf->vb->vb2_queue; in vb2_dc_alloc_coherent()
195 buf->cookie = dma_alloc_attrs(buf->dev, in vb2_dc_alloc_coherent()
196 buf->size, in vb2_dc_alloc_coherent()
197 &buf->dma_addr, in vb2_dc_alloc_coherent()
199 buf->attrs); in vb2_dc_alloc_coherent()
200 if (!buf->cookie) in vb2_dc_alloc_coherent()
206 buf->vaddr = buf->cookie; in vb2_dc_alloc_coherent()
210 static int vb2_dc_alloc_non_coherent(struct vb2_dc_buf *buf) in vb2_dc_alloc_non_coherent() argument
212 struct vb2_queue *q = buf->vb->vb2_queue; in vb2_dc_alloc_non_coherent()
214 buf->dma_sgt = dma_alloc_noncontiguous(buf->dev, in vb2_dc_alloc_non_coherent()
215 buf->size, in vb2_dc_alloc_non_coherent()
216 buf->dma_dir, in vb2_dc_alloc_non_coherent()
218 buf->attrs); in vb2_dc_alloc_non_coherent()
219 if (!buf->dma_sgt) in vb2_dc_alloc_non_coherent()
222 buf->dma_addr = sg_dma_address(buf->dma_sgt->sgl); in vb2_dc_alloc_non_coherent()
235 struct vb2_dc_buf *buf; in vb2_dc_alloc() local
241 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dc_alloc()
242 if (!buf) in vb2_dc_alloc()
245 buf->attrs = vb->vb2_queue->dma_attrs; in vb2_dc_alloc()
246 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dc_alloc()
247 buf->vb = vb; in vb2_dc_alloc()
248 buf->non_coherent_mem = vb->vb2_queue->non_coherent_mem; in vb2_dc_alloc()
250 buf->size = size; in vb2_dc_alloc()
252 buf->dev = get_device(dev); in vb2_dc_alloc()
254 if (buf->non_coherent_mem) in vb2_dc_alloc()
255 ret = vb2_dc_alloc_non_coherent(buf); in vb2_dc_alloc()
257 ret = vb2_dc_alloc_coherent(buf); in vb2_dc_alloc()
261 kfree(buf); in vb2_dc_alloc()
265 buf->handler.refcount = &buf->refcount; in vb2_dc_alloc()
266 buf->handler.put = vb2_dc_put; in vb2_dc_alloc()
267 buf->handler.arg = buf; in vb2_dc_alloc()
269 refcount_set(&buf->refcount, 1); in vb2_dc_alloc()
271 return buf; in vb2_dc_alloc()
276 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_mmap() local
279 if (!buf) { in vb2_dc_mmap()
284 if (buf->non_coherent_mem) in vb2_dc_mmap()
285 ret = dma_mmap_noncontiguous(buf->dev, vma, buf->size, in vb2_dc_mmap()
286 buf->dma_sgt); in vb2_dc_mmap()
288 ret = dma_mmap_attrs(buf->dev, vma, buf->cookie, buf->dma_addr, in vb2_dc_mmap()
289 buf->size, buf->attrs); in vb2_dc_mmap()
296 vma->vm_private_data = &buf->handler; in vb2_dc_mmap()
302 __func__, (unsigned long)buf->dma_addr, vma->vm_start, in vb2_dc_mmap()
303 buf->size); in vb2_dc_mmap()
324 struct vb2_dc_buf *buf = dbuf->priv; in vb2_dc_dmabuf_ops_attach() local
335 ret = sg_alloc_table(sgt, buf->sgt_base->orig_nents, GFP_KERNEL); in vb2_dc_dmabuf_ops_attach()
341 rd = buf->sgt_base->sgl; in vb2_dc_dmabuf_ops_attach()
451 struct vb2_dc_buf *buf; in vb2_dc_dmabuf_ops_vmap() local
454 buf = dbuf->priv; in vb2_dc_dmabuf_ops_vmap()
455 vaddr = vb2_dc_vaddr(buf->vb, buf); in vb2_dc_dmabuf_ops_vmap()
482 static struct sg_table *vb2_dc_get_base_sgt(struct vb2_dc_buf *buf) in vb2_dc_get_base_sgt() argument
487 if (buf->non_coherent_mem) in vb2_dc_get_base_sgt()
488 return buf->dma_sgt; in vb2_dc_get_base_sgt()
492 dev_err(buf->dev, "failed to alloc sg table\n"); in vb2_dc_get_base_sgt()
496 ret = dma_get_sgtable_attrs(buf->dev, sgt, buf->cookie, buf->dma_addr, in vb2_dc_get_base_sgt()
497 buf->size, buf->attrs); in vb2_dc_get_base_sgt()
499 dev_err(buf->dev, "failed to get scatterlist from DMA API\n"); in vb2_dc_get_base_sgt()
511 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_get_dmabuf() local
516 exp_info.size = buf->size; in vb2_dc_get_dmabuf()
518 exp_info.priv = buf; in vb2_dc_get_dmabuf()
520 if (!buf->sgt_base) in vb2_dc_get_dmabuf()
521 buf->sgt_base = vb2_dc_get_base_sgt(buf); in vb2_dc_get_dmabuf()
523 if (WARN_ON(!buf->sgt_base)) in vb2_dc_get_dmabuf()
531 refcount_inc(&buf->refcount); in vb2_dc_get_dmabuf()
542 struct vb2_dc_buf *buf = buf_priv; in vb2_dc_put_userptr() local
543 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_put_userptr()
552 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dc_put_userptr()
554 pages = frame_vector_pages(buf->vec); in vb2_dc_put_userptr()
557 if (buf->dma_dir == DMA_FROM_DEVICE || in vb2_dc_put_userptr()
558 buf->dma_dir == DMA_BIDIRECTIONAL) in vb2_dc_put_userptr()
559 for (i = 0; i < frame_vector_count(buf->vec); i++) in vb2_dc_put_userptr()
564 dma_unmap_resource(buf->dev, buf->dma_addr, buf->size, in vb2_dc_put_userptr()
565 buf->dma_dir, 0); in vb2_dc_put_userptr()
567 vb2_destroy_framevec(buf->vec); in vb2_dc_put_userptr()
568 kfree(buf); in vb2_dc_put_userptr()
574 struct vb2_dc_buf *buf; in vb2_dc_get_userptr() local
597 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dc_get_userptr()
598 if (!buf) in vb2_dc_get_userptr()
601 buf->dev = dev; in vb2_dc_get_userptr()
602 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dc_get_userptr()
603 buf->vb = vb; in vb2_dc_get_userptr()
611 buf->vec = vec; in vb2_dc_get_userptr()
624 buf->dma_addr = dma_map_resource(buf->dev, in vb2_dc_get_userptr()
625 __pfn_to_phys(nums[0]), size, buf->dma_dir, 0); in vb2_dc_get_userptr()
626 if (dma_mapping_error(buf->dev, buf->dma_addr)) { in vb2_dc_get_userptr()
651 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dc_get_userptr()
666 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_get_userptr()
667 buf->dma_sgt = sgt; in vb2_dc_get_userptr()
668 buf->non_coherent_mem = 1; in vb2_dc_get_userptr()
671 buf->size = size; in vb2_dc_get_userptr()
673 return buf; in vb2_dc_get_userptr()
676 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, DMA_ATTR_SKIP_CPU_SYNC); in vb2_dc_get_userptr()
688 kfree(buf); in vb2_dc_get_userptr()
699 struct vb2_dc_buf *buf = mem_priv; in vb2_dc_map_dmabuf() local
703 if (WARN_ON(!buf->db_attach)) { in vb2_dc_map_dmabuf()
708 if (WARN_ON(buf->dma_sgt)) { in vb2_dc_map_dmabuf()
714 sgt = dma_buf_map_attachment(buf->db_attach, buf->dma_dir); in vb2_dc_map_dmabuf()
722 if (contig_size < buf->size) { in vb2_dc_map_dmabuf()
724 contig_size, buf->size); in vb2_dc_map_dmabuf()
725 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); in vb2_dc_map_dmabuf()
729 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_map_dmabuf()
730 buf->dma_sgt = sgt; in vb2_dc_map_dmabuf()
731 buf->vaddr = NULL; in vb2_dc_map_dmabuf()
738 struct vb2_dc_buf *buf = mem_priv; in vb2_dc_unmap_dmabuf() local
739 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_unmap_dmabuf()
740 struct dma_buf_map map = DMA_BUF_MAP_INIT_VADDR(buf->vaddr); in vb2_dc_unmap_dmabuf()
742 if (WARN_ON(!buf->db_attach)) { in vb2_dc_unmap_dmabuf()
752 if (buf->vaddr) { in vb2_dc_unmap_dmabuf()
753 dma_buf_vunmap(buf->db_attach->dmabuf, &map); in vb2_dc_unmap_dmabuf()
754 buf->vaddr = NULL; in vb2_dc_unmap_dmabuf()
756 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); in vb2_dc_unmap_dmabuf()
758 buf->dma_addr = 0; in vb2_dc_unmap_dmabuf()
759 buf->dma_sgt = NULL; in vb2_dc_unmap_dmabuf()
764 struct vb2_dc_buf *buf = mem_priv; in vb2_dc_detach_dmabuf() local
767 if (WARN_ON(buf->dma_addr)) in vb2_dc_detach_dmabuf()
768 vb2_dc_unmap_dmabuf(buf); in vb2_dc_detach_dmabuf()
771 dma_buf_detach(buf->db_attach->dmabuf, buf->db_attach); in vb2_dc_detach_dmabuf()
772 kfree(buf); in vb2_dc_detach_dmabuf()
778 struct vb2_dc_buf *buf; in vb2_dc_attach_dmabuf() local
787 buf = kzalloc(sizeof(*buf), GFP_KERNEL); in vb2_dc_attach_dmabuf()
788 if (!buf) in vb2_dc_attach_dmabuf()
791 buf->dev = dev; in vb2_dc_attach_dmabuf()
792 buf->vb = vb; in vb2_dc_attach_dmabuf()
795 dba = dma_buf_attach(dbuf, buf->dev); in vb2_dc_attach_dmabuf()
798 kfree(buf); in vb2_dc_attach_dmabuf()
802 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dc_attach_dmabuf()
803 buf->size = size; in vb2_dc_attach_dmabuf()
804 buf->db_attach = dba; in vb2_dc_attach_dmabuf()
806 return buf; in vb2_dc_attach_dmabuf()