Lines Matching refs:xd
89 struct uniphier_xdmac_desc *xd; member
132 struct uniphier_xdmac_desc *xd) in uniphier_xdmac_chan_start() argument
139 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start()
140 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start()
141 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start()
142 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start()
148 if (xd->dir == DMA_DEV_TO_MEM) { in uniphier_xdmac_chan_start()
157 if (xd->dir == DMA_MEM_TO_DEV) { in uniphier_xdmac_chan_start()
219 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_start() local
221 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start()
222 if (xd) in uniphier_xdmac_start()
223 uniphier_xdmac_chan_start(xc, xd); in uniphier_xdmac_start()
226 xc->xd = xd; in uniphier_xdmac_start()
247 } else if ((stat & XDMAC_ID_ENDIDF) && xc->xd) { in uniphier_xdmac_chan_irq()
248 xc->xd->cur_node++; in uniphier_xdmac_chan_irq()
249 if (xc->xd->cur_node >= xc->xd->nr_node) { in uniphier_xdmac_chan_irq()
250 vchan_cookie_complete(&xc->xd->vd); in uniphier_xdmac_chan_irq()
253 uniphier_xdmac_chan_start(xc, xc->xd); in uniphier_xdmac_chan_irq()
284 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_dma_memcpy() local
294 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy()
295 if (!xd) in uniphier_xdmac_prep_dma_memcpy()
300 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy()
301 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy()
302 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy()
303 xd->nodes[i].nr_burst = len / burst_size; in uniphier_xdmac_prep_dma_memcpy()
310 xd->dir = DMA_MEM_TO_MEM; in uniphier_xdmac_prep_dma_memcpy()
311 xd->nr_node = nr; in uniphier_xdmac_prep_dma_memcpy()
312 xd->cur_node = 0; in uniphier_xdmac_prep_dma_memcpy()
314 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_dma_memcpy()
325 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_slave_sg() local
350 xd = kzalloc(struct_size(xd, nodes, sg_len), GFP_NOWAIT); in uniphier_xdmac_prep_slave_sg()
351 if (!xd) in uniphier_xdmac_prep_slave_sg()
355 xd->nodes[i].src = (direction == DMA_DEV_TO_MEM) in uniphier_xdmac_prep_slave_sg()
357 xd->nodes[i].dst = (direction == DMA_MEM_TO_DEV) in uniphier_xdmac_prep_slave_sg()
359 xd->nodes[i].burst_size = maxburst * buswidth; in uniphier_xdmac_prep_slave_sg()
360 xd->nodes[i].nr_burst = in uniphier_xdmac_prep_slave_sg()
361 sg_dma_len(sg) / xd->nodes[i].burst_size; in uniphier_xdmac_prep_slave_sg()
371 if (sg_dma_len(sg) % xd->nodes[i].burst_size) { in uniphier_xdmac_prep_slave_sg()
374 kfree(xd); in uniphier_xdmac_prep_slave_sg()
378 if (xd->nodes[i].nr_burst > XDMAC_MAX_WORDS) { in uniphier_xdmac_prep_slave_sg()
381 kfree(xd); in uniphier_xdmac_prep_slave_sg()
386 xd->dir = direction; in uniphier_xdmac_prep_slave_sg()
387 xd->nr_node = sg_len; in uniphier_xdmac_prep_slave_sg()
388 xd->cur_node = 0; in uniphier_xdmac_prep_slave_sg()
390 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_slave_sg()
414 if (xc->xd) { in uniphier_xdmac_terminate_all()
415 vchan_terminate_vdesc(&xc->xd->vd); in uniphier_xdmac_terminate_all()
416 xc->xd = NULL; in uniphier_xdmac_terminate_all()
442 if (vchan_issue_pending(vc) && !xc->xd) in uniphier_xdmac_issue_pending()