Home
last modified time | relevance | path

Searched refs:sg_dma_len (Results 1 – 25 of 349) sorted by relevance

12345678910>>...14

/linux/drivers/parisc/
A Diommu-helpers.h47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir()
51 dma_len = sg_dma_len(startsg); in iommu_fill_pdir()
52 sg_dma_len(startsg) = 0; in iommu_fill_pdir()
71 sg_dma_len(dma_sg) += startsg->length; in iommu_fill_pdir()
125 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
142 sg_dma_len(startsg) = 0; in iommu_coalesce_chunks()
171 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks()
/linux/drivers/media/pci/tw68/
A Dtw68-risc.c57 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
58 offset -= sg_dma_len(sg); in tw68_risc_field()
61 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
76 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
84 while (todo > sg_dma_len(sg)) { in tw68_risc_field()
87 sg_dma_len(sg)); in tw68_risc_field()
89 todo -= sg_dma_len(sg); in tw68_risc_field()
91 done += sg_dma_len(sg); in tw68_risc_field()
/linux/drivers/infiniband/core/
A Dumem_dmabuf.c39 if (start < cur + sg_dma_len(sg) && cur < end) in ib_umem_dmabuf_map_pages()
41 if (cur <= start && start < cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
47 sg_dma_len(sg) -= offset; in ib_umem_dmabuf_map_pages()
50 if (cur < end && end <= cur + sg_dma_len(sg)) { in ib_umem_dmabuf_map_pages()
51 unsigned long trim = cur + sg_dma_len(sg) - end; in ib_umem_dmabuf_map_pages()
55 sg_dma_len(sg) -= trim; in ib_umem_dmabuf_map_pages()
58 cur += sg_dma_len(sg); in ib_umem_dmabuf_map_pages()
90 sg_dma_len(umem_dmabuf->first_sg) += in ib_umem_dmabuf_unmap_pages()
96 sg_dma_len(umem_dmabuf->last_sg) += in ib_umem_dmabuf_unmap_pages()
/linux/drivers/gpu/drm/i915/
A Di915_scatterlist.h33 if (dma && s.sgp && sg_dma_len(s.sgp) == 0) { in __sgt_iter()
39 s.max += sg_dma_len(s.sgp); in __sgt_iter()
56 return sg_dma_len(sg) >> PAGE_SHIFT; in __sg_dma_page_count()
120 while (sg && sg_dma_len(sg)) { in i915_sg_dma_sizes()
122 GEM_BUG_ON(!IS_ALIGNED(sg_dma_len(sg), PAGE_SIZE)); in i915_sg_dma_sizes()
123 page_sizes |= sg_dma_len(sg); in i915_sg_dma_sizes()
A Di915_scatterlist.c32 sg_dma_len(new_sg) = sg_dma_len(sg); in i915_sg_trim()
89 sg_dma_len(sg) = 0; in i915_sg_from_mm_node()
96 sg_dma_len(sg) += len; in i915_sg_from_mm_node()
165 sg_dma_len(sg) = 0; in i915_sg_from_buddy_resource()
172 sg_dma_len(sg) += len; in i915_sg_from_buddy_resource()
/linux/net/rds/
A Dib_frmr.c134 ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_dma_len, in rds_ib_post_reg_frmr()
136 if (unlikely(ret != ibmr->sg_dma_len)) in rds_ib_post_reg_frmr()
205 ibmr->sg_dma_len = 0; in rds_ib_map_frmr()
207 WARN_ON(ibmr->sg_dma_len); in rds_ib_map_frmr()
208 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
210 if (unlikely(!ibmr->sg_dma_len)) { in rds_ib_map_frmr()
220 for (i = 0; i < ibmr->sg_dma_len; ++i) { in rds_ib_map_frmr()
221 unsigned int dma_len = sg_dma_len(&ibmr->sg[i]); in rds_ib_map_frmr()
233 if (i < ibmr->sg_dma_len - 1) in rds_ib_map_frmr()
262 ibmr->sg_dma_len = 0; in rds_ib_map_frmr()
[all …]
A Dib.h327 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_cpu() argument
333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
335 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_cpu()
342 unsigned int sg_dma_len, in rds_ib_dma_sync_sg_for_device() argument
348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
350 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_device()
/linux/lib/
A Dsg_split.c36 sglen = mapped ? sg_dma_len(sg) : sg->length; in sg_calculate_split()
95 sg_dma_len(out_sg) = 0; in sg_split_phys()
114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
117 sg_dma_len(out_sg) -= split->skip_sg0; in sg_split_mapped()
121 sg_dma_len(--out_sg) = split->length_last_sg; in sg_split_mapped()
/linux/drivers/media/pci/bt8xx/
A Dbttv-risc.c74 offset -= sg_dma_len(sg); in bttv_risc_packed()
77 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed()
87 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
89 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
92 while (todo > sg_dma_len(sg)) { in bttv_risc_packed()
94 sg_dma_len(sg)); in bttv_risc_packed()
96 todo -= sg_dma_len(sg); in bttv_risc_packed()
177 yoffset -= sg_dma_len(ysg); in bttv_risc_planar()
184 ylen = sg_dma_len(ysg) - yoffset; in bttv_risc_planar()
187 uoffset -= sg_dma_len(usg); in bttv_risc_planar()
[all …]
/linux/drivers/media/pci/cx25821/
A Dcx25821-core.c1017 offset -= sg_dma_len(sg); in cx25821_risc_field()
1031 (sg_dma_len(sg) - offset)); in cx25821_risc_field()
1034 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field()
1037 while (todo > sg_dma_len(sg)) { in cx25821_risc_field()
1039 sg_dma_len(sg)); in cx25821_risc_field()
1042 todo -= sg_dma_len(sg); in cx25821_risc_field()
1122 offset -= sg_dma_len(sg); in cx25821_risc_field_audio()
1142 (sg_dma_len(sg) - offset)); in cx25821_risc_field_audio()
1148 while (todo > sg_dma_len(sg)) { in cx25821_risc_field_audio()
1150 sg_dma_len(sg)); in cx25821_risc_field_audio()
[all …]
/linux/drivers/scsi/qla2xxx/
A Dqla_dsd.h15 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd32()
28 put_unaligned_le32(sg_dma_len(sg), &(*dsd)->length); in append_dsd64()
/linux/drivers/crypto/gemini/
A Dsl3516-ce-cipher.c55 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback()
71 if ((sg_dma_len(sg) % 16) != 0) { in sl3516_ce_need_fallback()
179 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher()
182 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher()
201 if (sg_dma_len(sg) == 0) in sl3516_ce_cipher()
204 todo = min(len, sg_dma_len(sg)); in sl3516_ce_cipher()
/linux/drivers/spi/
A Dspi-bcm2835.c490 bs->tx_prologue = sg_dma_len(&tfr->tx_sg.sgl[0]) & 3; in bcm2835_spi_transfer_prologue()
493 bs->rx_prologue = sg_dma_len(&tfr->rx_sg.sgl[0]) & 3; in bcm2835_spi_transfer_prologue()
501 !(sg_dma_len(&tfr->tx_sg.sgl[0]) & ~3); in bcm2835_spi_transfer_prologue()
527 sg_dma_len(&tfr->rx_sg.sgl[0]) -= bs->rx_prologue; in bcm2835_spi_transfer_prologue()
550 sg_dma_len(&tfr->tx_sg.sgl[0]) -= bs->tx_prologue; in bcm2835_spi_transfer_prologue()
552 sg_dma_len(&tfr->tx_sg.sgl[0]) = 0; in bcm2835_spi_transfer_prologue()
554 sg_dma_len(&tfr->tx_sg.sgl[1]) -= 4; in bcm2835_spi_transfer_prologue()
575 sg_dma_len(&tfr->rx_sg.sgl[0]) += bs->rx_prologue; in bcm2835_spi_undo_prologue()
583 sg_dma_len(&tfr->tx_sg.sgl[0]) += bs->tx_prologue; in bcm2835_spi_undo_prologue()
585 sg_dma_len(&tfr->tx_sg.sgl[0]) = bs->tx_prologue - 4; in bcm2835_spi_undo_prologue()
[all …]
/linux/drivers/crypto/allwinner/sun8i-ss/
A Dsun8i-ss-cipher.c38 if ((sg_dma_len(sg) % 16) != 0) in sun8i_ss_need_fallback()
48 if ((sg_dma_len(sg) % 16) != 0) in sun8i_ss_need_fallback()
191 if (sg_dma_len(sg) == 0) in sun8i_ss_cipher()
194 todo = min(len, sg_dma_len(sg)); in sun8i_ss_cipher()
213 if (sg_dma_len(sg) == 0) in sun8i_ss_cipher()
216 todo = min(len, sg_dma_len(sg)); in sun8i_ss_cipher()
/linux/drivers/media/pci/cx88/
A Dcx88-core.c90 while (offset && offset >= sg_dma_len(sg)) { in cx88_risc_field()
91 offset -= sg_dma_len(sg); in cx88_risc_field()
98 if (bpl <= sg_dma_len(sg) - offset) { in cx88_risc_field()
108 (sg_dma_len(sg) - offset)); in cx88_risc_field()
110 todo -= (sg_dma_len(sg) - offset); in cx88_risc_field()
113 while (todo > sg_dma_len(sg)) { in cx88_risc_field()
115 sg_dma_len(sg)); in cx88_risc_field()
117 todo -= sg_dma_len(sg); in cx88_risc_field()
/linux/drivers/media/pci/ivtv/
A Divtv-udma.c68 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); in ivtv_udma_fill_sg_array()
71 buffer_offset += sg_dma_len(sg); in ivtv_udma_fill_sg_array()
73 split -= sg_dma_len(sg); in ivtv_udma_fill_sg_array()
/linux/drivers/iommu/
A Ddma-iommu.c878 unsigned int s_length = sg_dma_len(s); in __finalise_sg()
884 sg_dma_len(s) = 0; in __finalise_sg()
907 sg_dma_len(cur) = cur_len; in __finalise_sg()
928 if (sg_dma_len(s)) in __invalidate_sg()
929 s->length = sg_dma_len(s); in __invalidate_sg()
931 sg_dma_len(s) = 0; in __invalidate_sg()
943 sg_dma_len(s), dir, attrs); in iommu_dma_unmap_sg_swiotlb()
957 sg_dma_len(s) = s->length; in iommu_dma_map_sg_swiotlb()
1012 sg_dma_len(s) = s_length; in iommu_dma_map_sg()
1086 if (sg_dma_len(tmp) == 0) in iommu_dma_unmap_sg()
[all …]
/linux/drivers/crypto/ccp/
A Dccp-dmaengine.c384 src_len = sg_dma_len(src_sg); in ccp_create_desc()
387 dst_len = sg_dma_len(dst_sg); in ccp_create_desc()
400 src_len = sg_dma_len(src_sg); in ccp_create_desc()
414 dst_len = sg_dma_len(dst_sg); in ccp_create_desc()
493 sg_dma_len(&dst_sg) = len; in ccp_prep_dma_memcpy()
497 sg_dma_len(&src_sg) = len; in ccp_prep_dma_memcpy()
/linux/drivers/dma/
A Dste_dma40_ll.c286 unsigned int len = sg_dma_len(current_sg); in d40_phy_sg_to_lli()
289 total_size += sg_dma_len(current_sg); in d40_phy_sg_to_lli()
435 unsigned int len = sg_dma_len(current_sg); in d40_log_sg_to_lli()
438 total_size += sg_dma_len(current_sg); in d40_log_sg_to_lli()
A Dtimb_dma.c148 if (sg_dma_len(sg) > USHRT_MAX) { in td_fill_desc()
154 if (sg_dma_len(sg) % sizeof(u32)) { in td_fill_desc()
156 sg_dma_len(sg)); in td_fill_desc()
168 dma_desc[3] = (sg_dma_len(sg) >> 8) & 0xff; in td_fill_desc()
169 dma_desc[2] = (sg_dma_len(sg) >> 0) & 0xff; in td_fill_desc()
/linux/drivers/gpu/drm/i915/selftests/
A Di915_vma.c391 if (sg_dma_len(sg) != PAGE_SIZE) { in assert_rotated()
393 sg_dma_len(sg), PAGE_SIZE, in assert_rotated()
418 if (sg_dma_len(sg) != left) { in assert_rotated()
420 sg_dma_len(sg), left, x, y); in assert_rotated()
466 left = sg_dma_len(sg); in assert_remapped()
474 sg_dma_len(sg), PAGE_SIZE, in assert_remapped()
511 if (sg_dma_len(sg) != left) { in assert_remapped()
513 sg_dma_len(sg), left, in assert_remapped()
/linux/drivers/gpu/drm/i915/gt/
A Dgen8_ppgtt.c424 GEM_BUG_ON(sg_dma_len(iter->sg) < I915_GTT_PAGE_SIZE); in gen8_ppgtt_insert_pte()
430 if (!iter->sg || sg_dma_len(iter->sg) == 0) { in gen8_ppgtt_insert_pte()
436 iter->max = iter->dma + sg_dma_len(iter->sg); in gen8_ppgtt_insert_pte()
463 unsigned int rem = sg_dma_len(iter->sg); in gen8_ppgtt_insert_huge()
506 GEM_BUG_ON(sg_dma_len(iter->sg) < page_size); in gen8_ppgtt_insert_huge()
517 rem = sg_dma_len(iter->sg); in gen8_ppgtt_insert_huge()
577 } while (iter->sg && sg_dma_len(iter->sg)); in gen8_ppgtt_insert_huge()
A Dintel_migrate.c248 return (struct sgt_dma){ sg, addr, addr + sg_dma_len(sg) }; in sg_sgt()
333 if (!it->sg || sg_dma_len(it->sg) == 0) in emit_pte()
337 it->max = it->dma + sg_dma_len(it->sg); in emit_pte()
483 if (err || !it_src.sg || !sg_dma_len(it_src.sg)) in intel_context_migrate_copy()
592 if (err || !it.sg || !sg_dma_len(it.sg)) in intel_context_migrate_clear()
/linux/drivers/media/pci/cobalt/
A Dcobalt-omnitek.c193 bytes = min(sg_dma_len(scatter_list) - offset, in descriptor_list_create()
236 bytes = min(sg_dma_len(scatter_list) - offset, in descriptor_list_create()
241 if (sg_dma_len(scatter_list) == offset) { in descriptor_list_create()
/linux/drivers/dma/qcom/
A Dqcom_adm.c233 u32 remainder = sg_dma_len(sg); in adm_process_fc_descriptors()
300 u32 remainder = sg_dma_len(sg); in adm_process_non_fc_descriptors()
392 box_count += DIV_ROUND_UP(sg_dma_len(sg) / burst, in adm_prep_slave_sg()
394 if (sg_dma_len(sg) % burst) in adm_prep_slave_sg()
397 single_count += DIV_ROUND_UP(sg_dma_len(sg), in adm_prep_slave_sg()
426 async_desc->length += sg_dma_len(sg); in adm_prep_slave_sg()

Completed in 59 milliseconds

12345678910>>...14