Lines Matching refs:chan
218 struct stm32_dma_chan chan[STM32_DMA_MAX_CHANNELS]; member
221 static struct stm32_dma_device *stm32_dma_get_dev(struct stm32_dma_chan *chan) in stm32_dma_get_dev() argument
223 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
229 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
237 static struct device *chan2dev(struct stm32_dma_chan *chan) in chan2dev() argument
239 return &chan->vchan.chan.dev->device; in chan2dev()
252 static int stm32_dma_get_width(struct stm32_dma_chan *chan, in stm32_dma_get_width() argument
263 dev_err(chan2dev(chan), "Dma bus width not supported\n"); in stm32_dma_get_width()
350 static int stm32_dma_get_burst(struct stm32_dma_chan *chan, u32 maxburst) in stm32_dma_get_burst() argument
363 dev_err(chan2dev(chan), "Dma burst size not supported\n"); in stm32_dma_get_burst()
368 static void stm32_dma_set_fifo_config(struct stm32_dma_chan *chan, in stm32_dma_set_fifo_config() argument
371 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_MASK; in stm32_dma_set_fifo_config()
372 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_DMEIE; in stm32_dma_set_fifo_config()
376 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DMEIE; in stm32_dma_set_fifo_config()
379 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK; in stm32_dma_set_fifo_config()
386 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_slave_config() local
388 memcpy(&chan->dma_sconfig, config, sizeof(*config)); in stm32_dma_slave_config()
390 chan->config_init = true; in stm32_dma_slave_config()
395 static u32 stm32_dma_irq_status(struct stm32_dma_chan *chan) in stm32_dma_irq_status() argument
397 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_status()
408 if (chan->id & 4) in stm32_dma_irq_status()
413 flags = dma_isr >> (((chan->id & 2) << 3) | ((chan->id & 1) * 6)); in stm32_dma_irq_status()
418 static void stm32_dma_irq_clear(struct stm32_dma_chan *chan, u32 flags) in stm32_dma_irq_clear() argument
420 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_clear()
431 dma_ifcr = flags << (((chan->id & 2) << 3) | ((chan->id & 1) * 6)); in stm32_dma_irq_clear()
433 if (chan->id & 4) in stm32_dma_irq_clear()
439 static int stm32_dma_disable_chan(struct stm32_dma_chan *chan) in stm32_dma_disable_chan() argument
441 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_disable_chan()
444 id = chan->id; in stm32_dma_disable_chan()
460 static void stm32_dma_stop(struct stm32_dma_chan *chan) in stm32_dma_stop() argument
462 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_stop()
467 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_stop()
469 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); in stm32_dma_stop()
470 dma_sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_stop()
472 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), dma_sfcr); in stm32_dma_stop()
475 ret = stm32_dma_disable_chan(chan); in stm32_dma_stop()
480 status = stm32_dma_irq_status(chan); in stm32_dma_stop()
482 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n", in stm32_dma_stop()
484 stm32_dma_irq_clear(chan, status); in stm32_dma_stop()
487 chan->busy = false; in stm32_dma_stop()
492 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_terminate_all() local
496 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
498 if (chan->desc) { in stm32_dma_terminate_all()
499 dma_cookie_complete(&chan->desc->vdesc.tx); in stm32_dma_terminate_all()
500 vchan_terminate_vdesc(&chan->desc->vdesc); in stm32_dma_terminate_all()
501 if (chan->busy) in stm32_dma_terminate_all()
502 stm32_dma_stop(chan); in stm32_dma_terminate_all()
503 chan->desc = NULL; in stm32_dma_terminate_all()
506 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
507 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
508 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
515 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_synchronize() local
517 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
520 static void stm32_dma_dump_reg(struct stm32_dma_chan *chan) in stm32_dma_dump_reg() argument
522 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_dump_reg()
523 u32 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_dump_reg()
524 u32 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); in stm32_dma_dump_reg()
525 u32 spar = stm32_dma_read(dmadev, STM32_DMA_SPAR(chan->id)); in stm32_dma_dump_reg()
526 u32 sm0ar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(chan->id)); in stm32_dma_dump_reg()
527 u32 sm1ar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(chan->id)); in stm32_dma_dump_reg()
528 u32 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_dump_reg()
530 dev_dbg(chan2dev(chan), "SCR: 0x%08x\n", scr); in stm32_dma_dump_reg()
531 dev_dbg(chan2dev(chan), "NDTR: 0x%08x\n", ndtr); in stm32_dma_dump_reg()
532 dev_dbg(chan2dev(chan), "SPAR: 0x%08x\n", spar); in stm32_dma_dump_reg()
533 dev_dbg(chan2dev(chan), "SM0AR: 0x%08x\n", sm0ar); in stm32_dma_dump_reg()
534 dev_dbg(chan2dev(chan), "SM1AR: 0x%08x\n", sm1ar); in stm32_dma_dump_reg()
535 dev_dbg(chan2dev(chan), "SFCR: 0x%08x\n", sfcr); in stm32_dma_dump_reg()
538 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan);
540 static void stm32_dma_start_transfer(struct stm32_dma_chan *chan) in stm32_dma_start_transfer() argument
542 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_start_transfer()
549 ret = stm32_dma_disable_chan(chan); in stm32_dma_start_transfer()
553 if (!chan->desc) { in stm32_dma_start_transfer()
554 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
560 chan->desc = to_stm32_dma_desc(vdesc); in stm32_dma_start_transfer()
561 chan->next_sg = 0; in stm32_dma_start_transfer()
564 if (chan->next_sg == chan->desc->num_sgs) in stm32_dma_start_transfer()
565 chan->next_sg = 0; in stm32_dma_start_transfer()
567 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_start_transfer()
571 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); in stm32_dma_start_transfer()
572 stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar); in stm32_dma_start_transfer()
573 stm32_dma_write(dmadev, STM32_DMA_SM0AR(chan->id), reg->dma_sm0ar); in stm32_dma_start_transfer()
574 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), reg->dma_sfcr); in stm32_dma_start_transfer()
575 stm32_dma_write(dmadev, STM32_DMA_SM1AR(chan->id), reg->dma_sm1ar); in stm32_dma_start_transfer()
576 stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), reg->dma_sndtr); in stm32_dma_start_transfer()
578 chan->next_sg++; in stm32_dma_start_transfer()
581 status = stm32_dma_irq_status(chan); in stm32_dma_start_transfer()
583 stm32_dma_irq_clear(chan, status); in stm32_dma_start_transfer()
585 if (chan->desc->cyclic) in stm32_dma_start_transfer()
586 stm32_dma_configure_next_sg(chan); in stm32_dma_start_transfer()
588 stm32_dma_dump_reg(chan); in stm32_dma_start_transfer()
592 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); in stm32_dma_start_transfer()
594 chan->busy = true; in stm32_dma_start_transfer()
596 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_dma_start_transfer()
599 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan) in stm32_dma_configure_next_sg() argument
601 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_configure_next_sg()
605 id = chan->id; in stm32_dma_configure_next_sg()
609 if (chan->next_sg == chan->desc->num_sgs) in stm32_dma_configure_next_sg()
610 chan->next_sg = 0; in stm32_dma_configure_next_sg()
612 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_configure_next_sg()
617 dev_dbg(chan2dev(chan), "CT=1 <=> SM0AR: 0x%08x\n", in stm32_dma_configure_next_sg()
622 dev_dbg(chan2dev(chan), "CT=0 <=> SM1AR: 0x%08x\n", in stm32_dma_configure_next_sg()
628 static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan) in stm32_dma_handle_chan_done() argument
630 if (chan->desc) { in stm32_dma_handle_chan_done()
631 if (chan->desc->cyclic) { in stm32_dma_handle_chan_done()
632 vchan_cyclic_callback(&chan->desc->vdesc); in stm32_dma_handle_chan_done()
633 chan->next_sg++; in stm32_dma_handle_chan_done()
634 stm32_dma_configure_next_sg(chan); in stm32_dma_handle_chan_done()
636 chan->busy = false; in stm32_dma_handle_chan_done()
637 if (chan->next_sg == chan->desc->num_sgs) { in stm32_dma_handle_chan_done()
638 vchan_cookie_complete(&chan->desc->vdesc); in stm32_dma_handle_chan_done()
639 chan->desc = NULL; in stm32_dma_handle_chan_done()
641 stm32_dma_start_transfer(chan); in stm32_dma_handle_chan_done()
648 struct stm32_dma_chan *chan = devid; in stm32_dma_chan_irq() local
649 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_chan_irq()
652 spin_lock(&chan->vchan.lock); in stm32_dma_chan_irq()
654 status = stm32_dma_irq_status(chan); in stm32_dma_chan_irq()
655 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_chan_irq()
656 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_chan_irq()
659 stm32_dma_irq_clear(chan, STM32_DMA_FEI); in stm32_dma_chan_irq()
664 dev_err(chan2dev(chan), "FIFO Error\n"); in stm32_dma_chan_irq()
666 dev_dbg(chan2dev(chan), "FIFO over/underrun\n"); in stm32_dma_chan_irq()
670 stm32_dma_irq_clear(chan, STM32_DMA_DMEI); in stm32_dma_chan_irq()
673 dev_dbg(chan2dev(chan), "Direct mode overrun\n"); in stm32_dma_chan_irq()
677 stm32_dma_irq_clear(chan, STM32_DMA_TCI); in stm32_dma_chan_irq()
679 stm32_dma_handle_chan_done(chan); in stm32_dma_chan_irq()
684 stm32_dma_irq_clear(chan, STM32_DMA_HTI); in stm32_dma_chan_irq()
689 stm32_dma_irq_clear(chan, status); in stm32_dma_chan_irq()
690 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status); in stm32_dma_chan_irq()
692 dev_err(chan2dev(chan), "chan disabled by HW\n"); in stm32_dma_chan_irq()
695 spin_unlock(&chan->vchan.lock); in stm32_dma_chan_irq()
702 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_issue_pending() local
705 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_issue_pending()
706 if (vchan_issue_pending(&chan->vchan) && !chan->desc && !chan->busy) { in stm32_dma_issue_pending()
707 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); in stm32_dma_issue_pending()
708 stm32_dma_start_transfer(chan); in stm32_dma_issue_pending()
711 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_issue_pending()
714 static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan, in stm32_dma_set_xfer_param() argument
725 src_addr_width = chan->dma_sconfig.src_addr_width; in stm32_dma_set_xfer_param()
726 dst_addr_width = chan->dma_sconfig.dst_addr_width; in stm32_dma_set_xfer_param()
727 src_maxburst = chan->dma_sconfig.src_maxburst; in stm32_dma_set_xfer_param()
728 dst_maxburst = chan->dma_sconfig.dst_maxburst; in stm32_dma_set_xfer_param()
729 fifoth = chan->threshold; in stm32_dma_set_xfer_param()
734 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); in stm32_dma_set_xfer_param()
744 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst); in stm32_dma_set_xfer_param()
751 chan->mem_width = src_addr_width; in stm32_dma_set_xfer_param()
752 src_bus_width = stm32_dma_get_width(chan, src_addr_width); in stm32_dma_set_xfer_param()
768 src_burst_size = stm32_dma_get_burst(chan, src_best_burst); in stm32_dma_set_xfer_param()
779 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK; in stm32_dma_set_xfer_param()
781 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_FTH(fifoth); in stm32_dma_set_xfer_param()
784 chan->chan_reg.dma_spar = chan->dma_sconfig.dst_addr; in stm32_dma_set_xfer_param()
790 src_bus_width = stm32_dma_get_width(chan, src_addr_width); in stm32_dma_set_xfer_param()
799 chan->mem_burst = src_best_burst; in stm32_dma_set_xfer_param()
800 src_burst_size = stm32_dma_get_burst(chan, src_best_burst); in stm32_dma_set_xfer_param()
807 chan->mem_width = dst_addr_width; in stm32_dma_set_xfer_param()
808 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); in stm32_dma_set_xfer_param()
824 chan->mem_burst = dst_best_burst; in stm32_dma_set_xfer_param()
825 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst); in stm32_dma_set_xfer_param()
836 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK; in stm32_dma_set_xfer_param()
838 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_FTH(fifoth); in stm32_dma_set_xfer_param()
841 chan->chan_reg.dma_spar = chan->dma_sconfig.src_addr; in stm32_dma_set_xfer_param()
842 *buswidth = chan->dma_sconfig.src_addr_width; in stm32_dma_set_xfer_param()
846 dev_err(chan2dev(chan), "Dma direction is not supported\n"); in stm32_dma_set_xfer_param()
850 stm32_dma_set_fifo_config(chan, src_best_burst, dst_best_burst); in stm32_dma_set_xfer_param()
853 chan->chan_reg.dma_scr &= ~(STM32_DMA_SCR_DIR_MASK | in stm32_dma_set_xfer_param()
856 chan->chan_reg.dma_scr |= dma_scr; in stm32_dma_set_xfer_param()
871 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_slave_sg() local
878 if (!chan->config_init) { in stm32_dma_prep_slave_sg()
879 dev_err(chan2dev(chan), "dma channel is not configured\n"); in stm32_dma_prep_slave_sg()
884 dev_err(chan2dev(chan), "Invalid segment length %d\n", sg_len); in stm32_dma_prep_slave_sg()
893 if (chan->dma_sconfig.device_fc) in stm32_dma_prep_slave_sg()
894 chan->chan_reg.dma_scr |= STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_slave_sg()
896 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_slave_sg()
899 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, in stm32_dma_prep_slave_sg()
909 dev_err(chan2dev(chan), "nb items not supported\n"); in stm32_dma_prep_slave_sg()
914 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; in stm32_dma_prep_slave_sg()
915 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; in stm32_dma_prep_slave_sg()
916 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; in stm32_dma_prep_slave_sg()
925 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_slave_sg()
937 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_dma_cyclic() local
944 dev_err(chan2dev(chan), "Invalid buffer/period len\n"); in stm32_dma_prep_dma_cyclic()
948 if (!chan->config_init) { in stm32_dma_prep_dma_cyclic()
949 dev_err(chan2dev(chan), "dma channel is not configured\n"); in stm32_dma_prep_dma_cyclic()
954 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n"); in stm32_dma_prep_dma_cyclic()
964 if (chan->busy) { in stm32_dma_prep_dma_cyclic()
965 dev_err(chan2dev(chan), "Request not allowed when dma busy\n"); in stm32_dma_prep_dma_cyclic()
969 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, period_len, in stm32_dma_prep_dma_cyclic()
976 dev_err(chan2dev(chan), "number of items not supported\n"); in stm32_dma_prep_dma_cyclic()
982 chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC; in stm32_dma_prep_dma_cyclic()
984 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM; in stm32_dma_prep_dma_cyclic()
987 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_dma_cyclic()
999 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; in stm32_dma_prep_dma_cyclic()
1000 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; in stm32_dma_prep_dma_cyclic()
1001 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; in stm32_dma_prep_dma_cyclic()
1011 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_dma_cyclic()
1018 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_dma_memcpy() local
1030 threshold = chan->threshold; in stm32_dma_prep_dma_memcpy()
1040 dma_burst = stm32_dma_get_burst(chan, best_burst); in stm32_dma_prep_dma_memcpy()
1063 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_dma_memcpy()
1066 static u32 stm32_dma_get_remaining_bytes(struct stm32_dma_chan *chan) in stm32_dma_get_remaining_bytes() argument
1069 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_get_remaining_bytes()
1071 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_get_remaining_bytes()
1073 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); in stm32_dma_get_remaining_bytes()
1090 static bool stm32_dma_is_current_sg(struct stm32_dma_chan *chan) in stm32_dma_is_current_sg() argument
1092 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_is_current_sg()
1096 id = chan->id; in stm32_dma_is_current_sg()
1102 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_is_current_sg()
1114 static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan, in stm32_dma_desc_residue() argument
1121 struct stm32_dma_sg_req *sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_desc_residue()
1149 residue = stm32_dma_get_remaining_bytes(chan); in stm32_dma_desc_residue()
1151 if (!stm32_dma_is_current_sg(chan)) { in stm32_dma_desc_residue()
1153 if (n_sg == chan->desc->num_sgs) in stm32_dma_desc_residue()
1165 if (!chan->desc->cyclic || n_sg != 0) in stm32_dma_desc_residue()
1169 if (!chan->mem_burst) in stm32_dma_desc_residue()
1172 burst_size = chan->mem_burst * chan->mem_width; in stm32_dma_desc_residue()
1184 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_tx_status() local
1194 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_tx_status()
1195 vdesc = vchan_find_desc(&chan->vchan, cookie); in stm32_dma_tx_status()
1196 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) in stm32_dma_tx_status()
1197 residue = stm32_dma_desc_residue(chan, chan->desc, in stm32_dma_tx_status()
1198 chan->next_sg); in stm32_dma_tx_status()
1200 residue = stm32_dma_desc_residue(chan, in stm32_dma_tx_status()
1204 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_tx_status()
1211 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_alloc_chan_resources() local
1212 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_alloc_chan_resources()
1215 chan->config_init = false; in stm32_dma_alloc_chan_resources()
1221 ret = stm32_dma_disable_chan(chan); in stm32_dma_alloc_chan_resources()
1230 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_free_chan_resources() local
1231 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_free_chan_resources()
1234 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); in stm32_dma_free_chan_resources()
1236 if (chan->busy) { in stm32_dma_free_chan_resources()
1237 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_free_chan_resources()
1238 stm32_dma_stop(chan); in stm32_dma_free_chan_resources()
1239 chan->desc = NULL; in stm32_dma_free_chan_resources()
1240 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_free_chan_resources()
1246 stm32_dma_clear_reg(&chan->chan_reg); in stm32_dma_free_chan_resources()
1247 chan->threshold = 0; in stm32_dma_free_chan_resources()
1255 static void stm32_dma_set_config(struct stm32_dma_chan *chan, in stm32_dma_set_config() argument
1258 stm32_dma_clear_reg(&chan->chan_reg); in stm32_dma_set_config()
1260 chan->chan_reg.dma_scr = cfg->stream_config & STM32_DMA_SCR_CFG_MASK; in stm32_dma_set_config()
1261 chan->chan_reg.dma_scr |= STM32_DMA_SCR_REQ(cfg->request_line); in stm32_dma_set_config()
1264 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TEIE | STM32_DMA_SCR_TCIE; in stm32_dma_set_config()
1266 chan->threshold = STM32_DMA_THRESHOLD_FTR_GET(cfg->features); in stm32_dma_set_config()
1268 chan->threshold = STM32_DMA_FIFO_THRESHOLD_NONE; in stm32_dma_set_config()
1270 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TRBUFF; in stm32_dma_set_config()
1279 struct stm32_dma_chan *chan; in stm32_dma_of_xlate() local
1298 chan = &dmadev->chan[cfg.channel_id]; in stm32_dma_of_xlate()
1300 c = dma_get_slave_channel(&chan->vchan.chan); in stm32_dma_of_xlate()
1306 stm32_dma_set_config(chan, &cfg); in stm32_dma_of_xlate()
1319 struct stm32_dma_chan *chan; in stm32_dma_probe() local
1403 chan = &dmadev->chan[i]; in stm32_dma_probe()
1404 chan->id = i; in stm32_dma_probe()
1405 chan->vchan.desc_free = stm32_dma_desc_free; in stm32_dma_probe()
1406 vchan_init(&chan->vchan, dd); in stm32_dma_probe()
1414 chan = &dmadev->chan[i]; in stm32_dma_probe()
1418 chan->irq = ret; in stm32_dma_probe()
1420 ret = devm_request_irq(&pdev->dev, chan->irq, in stm32_dma_probe()
1422 dev_name(chan2dev(chan)), chan); in stm32_dma_probe()