Lines Matching refs:dma

62 	struct ltq_dma_channel dma;  member
118 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; in xrx200_flush_dma()
125 ch->dma.desc++; in xrx200_flush_dma()
126 ch->dma.desc %= LTQ_DESC_NUM; in xrx200_flush_dma()
135 ltq_dma_open(&priv->chan_tx.dma); in xrx200_open()
136 ltq_dma_enable_irq(&priv->chan_tx.dma); in xrx200_open()
139 ltq_dma_open(&priv->chan_rx.dma); in xrx200_open()
148 ltq_dma_enable_irq(&priv->chan_rx.dma); in xrx200_open()
162 ltq_dma_close(&priv->chan_rx.dma); in xrx200_close()
165 ltq_dma_close(&priv->chan_tx.dma); in xrx200_close()
172 struct sk_buff *skb = ch->skb[ch->dma.desc]; in xrx200_alloc_skb()
177 ch->skb[ch->dma.desc] = netdev_alloc_skb_ip_align(priv->net_dev, in xrx200_alloc_skb()
179 if (!ch->skb[ch->dma.desc]) { in xrx200_alloc_skb()
184 mapping = dma_map_single(priv->dev, ch->skb[ch->dma.desc]->data, in xrx200_alloc_skb()
187 dev_kfree_skb_any(ch->skb[ch->dma.desc]); in xrx200_alloc_skb()
188 ch->skb[ch->dma.desc] = skb; in xrx200_alloc_skb()
193 ch->dma.desc_base[ch->dma.desc].addr = mapping; in xrx200_alloc_skb()
197 ch->dma.desc_base[ch->dma.desc].ctl = in xrx200_alloc_skb()
206 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; in xrx200_hw_receive()
207 struct sk_buff *skb = ch->skb[ch->dma.desc]; in xrx200_hw_receive()
214 ch->dma.desc++; in xrx200_hw_receive()
215 ch->dma.desc %= LTQ_DESC_NUM; in xrx200_hw_receive()
240 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; in xrx200_poll_rx()
254 ltq_dma_enable_irq(&ch->dma); in xrx200_poll_rx()
270 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->tx_free]; in xrx200_tx_housekeeping()
279 memset(&ch->dma.desc_base[ch->tx_free], 0, in xrx200_tx_housekeeping()
298 ltq_dma_enable_irq(&ch->dma); in xrx200_tx_housekeeping()
309 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; in xrx200_start_xmit()
322 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) { in xrx200_start_xmit()
328 ch->skb[ch->dma.desc] = skb; in xrx200_start_xmit()
342 ch->dma.desc++; in xrx200_start_xmit()
343 ch->dma.desc %= LTQ_DESC_NUM; in xrx200_start_xmit()
344 if (ch->dma.desc == ch->tx_free) in xrx200_start_xmit()
378 ltq_dma_close(&ch_rx->dma); in xrx200_change_mtu()
382 curr_desc = ch_rx->dma.desc; in xrx200_change_mtu()
384 for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM; in xrx200_change_mtu()
385 ch_rx->dma.desc++) { in xrx200_change_mtu()
386 skb = ch_rx->skb[ch_rx->dma.desc]; in xrx200_change_mtu()
396 ch_rx->dma.desc = curr_desc; in xrx200_change_mtu()
399 ltq_dma_open(&ch_rx->dma); in xrx200_change_mtu()
400 ltq_dma_enable_irq(&ch_rx->dma); in xrx200_change_mtu()
420 ltq_dma_disable_irq(&ch->dma); in xrx200_dma_irq()
424 ltq_dma_ack_irq(&ch->dma); in xrx200_dma_irq()
439 ch_rx->dma.nr = XRX200_DMA_RX; in xrx200_dma_init()
440 ch_rx->dma.dev = priv->dev; in xrx200_dma_init()
443 ltq_dma_alloc_rx(&ch_rx->dma); in xrx200_dma_init()
444 for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM; in xrx200_dma_init()
445 ch_rx->dma.desc++) { in xrx200_dma_init()
450 ch_rx->dma.desc = 0; in xrx200_dma_init()
451 ret = devm_request_irq(priv->dev, ch_rx->dma.irq, xrx200_dma_irq, 0, in xrx200_dma_init()
455 ch_rx->dma.irq); in xrx200_dma_init()
459 ch_tx->dma.nr = XRX200_DMA_TX; in xrx200_dma_init()
460 ch_tx->dma.dev = priv->dev; in xrx200_dma_init()
463 ltq_dma_alloc_tx(&ch_tx->dma); in xrx200_dma_init()
464 ret = devm_request_irq(priv->dev, ch_tx->dma.irq, xrx200_dma_irq, 0, in xrx200_dma_init()
468 ch_tx->dma.irq); in xrx200_dma_init()
475 ltq_dma_free(&ch_tx->dma); in xrx200_dma_init()
485 ltq_dma_free(&ch_rx->dma); in xrx200_dma_init()
493 ltq_dma_free(&priv->chan_tx.dma); in xrx200_hw_cleanup()
494 ltq_dma_free(&priv->chan_rx.dma); in xrx200_hw_cleanup()
529 priv->chan_rx.dma.irq = platform_get_irq_byname(pdev, "rx"); in xrx200_probe()
530 if (priv->chan_rx.dma.irq < 0) in xrx200_probe()
532 priv->chan_tx.dma.irq = platform_get_irq_byname(pdev, "tx"); in xrx200_probe()
533 if (priv->chan_tx.dma.irq < 0) in xrx200_probe()