Home
last modified time | relevance | path

Searched refs:tx_ring (Results 1 – 25 of 287) sorted by relevance

12345678910>>...12

/linux/drivers/net/ethernet/intel/ice/
A Dice_txrx.c47 if (!tx_ring || !tx_ring->desc) in ice_prgm_fdir_fltr()
76 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in ice_prgm_fdir_fltr()
100 writel(tx_ring->next_to_use, tx_ring->tail); in ice_prgm_fdir_fltr()
152 if (ice_ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in ice_clean_tx_ring()
163 ice_unmap_and_free_tx_buf(tx_ring, &tx_ring->tx_buf[i]); in ice_clean_tx_ring()
166 memset(tx_ring->tx_buf, 0, sizeof(*tx_ring->tx_buf) * tx_ring->count); in ice_clean_tx_ring()
194 devm_kfree(tx_ring->dev, tx_ring->tx_buf); in ice_free_tx_ring()
201 tx_ring->desc, tx_ring->dma); in ice_free_tx_ring()
2275 offload.tx_ring = tx_ring; in ice_xmit_frame_ring()
2278 first = &tx_ring->tx_buf[tx_ring->next_to_use]; in ice_xmit_frame_ring()
[all …]
/linux/drivers/net/ethernet/intel/iavf/
A Diavf_txrx.c68 iavf_unmap_and_free_tx_resource(tx_ring, &tx_ring->tx_bi[i]); in iavf_clean_tx_ring()
74 memset(tx_ring->desc, 0, tx_ring->size); in iavf_clean_tx_ring()
99 dma_free_coherent(tx_ring->dev, tx_ring->size, in iavf_free_tx_resources()
100 tx_ring->desc, tx_ring->dma); in iavf_free_tx_resources()
156 if (tx_ring && tx_ring->desc) { in iavf_detect_recover_hung()
299 (IAVF_DESC_UNUSED(tx_ring) != tx_ring->count)) in iavf_clean_tx_irq()
630 tx_ring->size = tx_ring->count * sizeof(struct iavf_tx_desc); in iavf_setup_tx_descriptors()
631 tx_ring->size = ALIGN(tx_ring->size, 4096); in iavf_setup_tx_descriptors()
632 tx_ring->desc = dma_alloc_coherent(dev, tx_ring->size, in iavf_setup_tx_descriptors()
2129 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in iavf_create_tx_ctx()
[all …]
/linux/drivers/net/ethernet/intel/fm10k/
A Dfm10k_main.c768 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tso()
862 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tx_csum()
903 netif_stop_subqueue(tx_ring->netdev, tx_ring->queue_index); in __fm10k_maybe_stop_tx()
913 netif_start_subqueue(tx_ring->netdev, tx_ring->queue_index); in __fm10k_maybe_stop_tx()
1037 i = tx_ring->count; in fm10k_tx_map()
1070 first = &tx_ring->tx_buffer[tx_ring->next_to_use]; in fm10k_xmit_frame_ring()
1186 i -= tx_ring->count; in fm10k_clean_tx_irq()
1228 i -= tx_ring->count; in fm10k_clean_tx_irq()
1248 i -= tx_ring->count; in fm10k_clean_tx_irq()
1260 i += tx_ring->count; in fm10k_clean_tx_irq()
[all …]
A Dfm10k_netdev.c23 if (!tx_ring->tx_buffer) in fm10k_setup_tx_resources()
29 tx_ring->size = tx_ring->count * sizeof(struct fm10k_tx_desc); in fm10k_setup_tx_resources()
30 tx_ring->size = ALIGN(tx_ring->size, 4096); in fm10k_setup_tx_resources()
32 tx_ring->desc = dma_alloc_coherent(dev, tx_ring->size, in fm10k_setup_tx_resources()
34 if (!tx_ring->desc) in fm10k_setup_tx_resources()
193 memset(tx_ring->desc, 0, tx_ring->size); in fm10k_clean_tx_ring()
210 if (!tx_ring->desc) in fm10k_free_tx_resources()
213 dma_free_coherent(tx_ring->dev, tx_ring->size, in fm10k_free_tx_resources()
214 tx_ring->desc, tx_ring->dma); in fm10k_free_tx_resources()
596 tx_ring = interface->tx_ring[txqueue]; in fm10k_tx_timeout()
[all …]
/linux/drivers/net/ethernet/netronome/nfp/
A Dnfp_net_debugfs.c76 struct nfp_net_tx_ring *tx_ring; in nfp_tx_q_show() local
85 tx_ring = r_vec->tx_ring; in nfp_tx_q_show()
87 tx_ring = r_vec->xdp_ring; in nfp_tx_q_show()
88 if (!r_vec->nfp_net || !tx_ring) in nfp_tx_q_show()
94 txd_cnt = tx_ring->cnt; in nfp_tx_q_show()
100 tx_ring->idx, tx_ring->qcidx, in nfp_tx_q_show()
101 tx_ring == r_vec->tx_ring ? "" : "xdp", in nfp_tx_q_show()
102 tx_ring->cnt, &tx_ring->dma, tx_ring->txds, in nfp_tx_q_show()
103 tx_ring->rd_p, tx_ring->wr_p, d_rd_p, d_wr_p); in nfp_tx_q_show()
106 txd = &tx_ring->txds[i]; in nfp_tx_q_show()
[all …]
A Dnfp_net_common.c687 return (tx_ring->wr_p - tx_ring->rd_p) >= (tx_ring->cnt - dcnt); in nfp_net_tx_full()
1154 if (tx_ring->wr_p == tx_ring->rd_p) in nfp_net_tx_complete()
1225 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_net_tx_complete()
1227 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_net_tx_complete()
1249 tx_ring->qcp_rd_p = D_IDX(tx_ring, tx_ring->qcp_rd_p + todo); in nfp_net_xdp_complete()
1264 WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt, in nfp_net_xdp_complete()
1266 tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt); in nfp_net_xdp_complete()
1284 while (!tx_ring->is_xdp && tx_ring->rd_p != tx_ring->wr_p) { in nfp_net_tx_ring_reset()
2119 tx_ring = r_vec->tx_ring; in nfp_ctrl_tx_one()
2418 tx_ring->size = array_size(tx_ring->cnt, sizeof(*tx_ring->txds)); in nfp_net_tx_ring_alloc()
[all …]
/linux/drivers/net/ethernet/intel/i40e/
A Di40e_txrx.c34 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_fdir()
153 writel(tx_ring->next_to_use, tx_ring->tail); in i40e_program_fdir_filter()
793 if (ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in i40e_clean_tx_ring()
810 memset(tx_ring->desc, 0, tx_ring->size); in i40e_clean_tx_ring()
837 dma_free_coherent(tx_ring->dev, tx_ring->size, in i40e_free_tx_resources()
838 tx_ring->desc, tx_ring->dma); in i40e_free_tx_resources()
899 if (tx_ring && tx_ring->desc) { in i40e_detect_recover_hung()
1451 tx_ring->size = ALIGN(tx_ring->size, 4096); in i40e_setup_tx_descriptors()
2870 (tx_ring->atr_count < tx_ring->atr_sample_rate)) in i40e_atr()
2880 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_atr()
[all …]
A Di40e_txrx_common.h47 u64_stats_update_begin(&tx_ring->syncp); in i40e_update_tx_stats()
48 tx_ring->stats.bytes += total_bytes; in i40e_update_tx_stats()
49 tx_ring->stats.packets += total_packets; in i40e_update_tx_stats()
50 u64_stats_update_end(&tx_ring->syncp); in i40e_update_tx_stats()
51 tx_ring->q_vector->tx.total_bytes += total_bytes; in i40e_update_tx_stats()
52 tx_ring->q_vector->tx.total_packets += total_packets; in i40e_update_tx_stats()
67 if (tx_ring->flags & I40E_TXR_FLAGS_WB_ON_ITR) { in i40e_arm_wb()
73 unsigned int j = i40e_get_tx_pending(tx_ring, false); in i40e_arm_wb()
78 (I40E_DESC_UNUSED(tx_ring) != tx_ring->count)) in i40e_arm_wb()
79 tx_ring->arm_wb = true; in i40e_arm_wb()
[all …]
A Di40e_xsk.c505 tx_ring->xdp_tx_active--; in i40e_clean_xdp_tx_buffer()
506 dma_unmap_single(tx_ring->dev, in i40e_clean_xdp_tx_buffer()
528 head_idx += tx_ring->count; in i40e_clean_xdp_tx_irq()
539 ntc = tx_ring->next_to_clean; in i40e_clean_xdp_tx_irq()
542 tx_bi = &tx_ring->tx_bi[ntc]; in i40e_clean_xdp_tx_irq()
551 if (++ntc >= tx_ring->count) in i40e_clean_xdp_tx_irq()
557 if (unlikely(tx_ring->next_to_clean >= tx_ring->count)) in i40e_clean_xdp_tx_irq()
558 tx_ring->next_to_clean -= tx_ring->count; in i40e_clean_xdp_tx_irq()
569 return i40e_xmit_zc(tx_ring, I40E_DESC_UNUSED(tx_ring)); in i40e_clean_xdp_tx_irq()
635 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in i40e_xsk_clean_tx_ring()
[all …]
/linux/drivers/infiniband/hw/hfi1/
A Dipoib_tx.c136 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_drain_tx_ring() local
147 tx_ring->head = 0; in hfi1_ipoib_drain_tx_ring()
148 tx_ring->tail = 0; in hfi1_ipoib_drain_tx_ring()
158 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_poll_tx_ring() local
354 struct hfi1_ipoib_circ_buf *tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_common() local
478 tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_single()
481 smp_store_release(&tx_ring->tail, CIRC_NEXT(tx_ring->tail, tx_ring->max_items)); in hfi1_ipoib_send_dma_single()
541 tx_ring = &txq->tx_ring; in hfi1_ipoib_send_dma_list()
544 smp_store_release(&tx_ring->tail, CIRC_NEXT(tx_ring->tail, tx_ring->max_items)); in hfi1_ipoib_send_dma_list()
841 txq->tx_ring.sent_txreqs, txq->tx_ring.complete_txreqs, in hfi1_ipoib_tx_timeout()
[all …]
/linux/drivers/net/ethernet/freescale/enetc/
A Denetc.c29 int index = &priv->tx_ring[tx_ring->index] - priv->xdp_tx_ring; in enetc_rx_ring_from_xdp_tx_ring()
90 enetc_wr_reg_hot(tx_ring->tpir, tx_ring->next_to_use); in enetc_update_tx_ring_tail()
143 i = tx_ring->next_to_use; in enetc_map_tx_buffs()
180 if (tx_ring->tsd_enable) in enetc_map_tx_buffs()
312 i = tx_ring->bd_count; in enetc_map_tx_buffs()
584 tx_ring = priv->tx_ring[skb->queue_mapping]; in enetc_start_xmit()
865 enetc_wr_reg_hot(tx_ring->idr, BIT(tx_ring->index) | in enetc_clean_tx_ring()
1351 prefetchw(ENETC_TXBD(*tx_ring, tx_ring->next_to_use)); in enetc_xdp_xmit()
1907 if (!tx_ring->tx_swbd) in enetc_free_tx_ring()
2448 tx_ring = priv->tx_ring[i]; in enetc_setup_tc_mqprio()
[all …]
/linux/drivers/net/ethernet/intel/igbvf/
A Dnetdev.c430 tx_ring->size = ALIGN(tx_ring->size, 4096); in igbvf_setup_tx_resources()
432 tx_ring->desc = dma_alloc_coherent(&pdev->dev, tx_ring->size, in igbvf_setup_tx_resources()
517 memset(tx_ring->desc, 0, tx_ring->size); in igbvf_clean_tx_ring()
541 dma_free_coherent(&pdev->dev, tx_ring->size, tx_ring->desc, in igbvf_free_tx_resources()
875 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_intr_msix_tx() local
979 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_configure_msix() local
987 writel(tx_ring->itr_val, hw->hw_addr + tx_ring->itr_register); in igbvf_configure_msix()
1288 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_configure_tx() local
1916 struct igbvf_ring *tx_ring = adapter->tx_ring; in igbvf_watchdog_task() local
1986 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in igbvf_tx_ctxtdesc()
[all …]
/linux/drivers/net/ethernet/qlogic/qlcnic/
A Dqlcnic_io.c968 tx_ring = sds_ring->tx_ring; in qlcnic_poll()
1609 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_add()
1634 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_del()
1662 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_enable()
1690 tx_ring = &adapter->tx_ring[ring]; in qlcnic_82xx_napi_disable()
1961 tx_ring = adapter->tx_ring; in qlcnic_83xx_msix_sriov_vf_poll()
1989 tx_ring = adapter->tx_ring; in qlcnic_83xx_poll()
2065 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_enable()
2093 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_disable()
2139 tx_ring = &adapter->tx_ring[ring]; in qlcnic_83xx_napi_add()
[all …]
/linux/drivers/net/ethernet/broadcom/
A Dbcm4908_enet.c187 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_dma_free() local
198 if (tx_ring->cpu_addr) in bcm4908_enet_dma_free()
199 dma_free_coherent(dev, size, tx_ring->cpu_addr, tx_ring->dma_addr); in bcm4908_enet_dma_free()
205 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_dma_alloc() local
211 tx_ring->is_tx = 1; in bcm4908_enet_dma_alloc()
441 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_open() local
491 struct bcm4908_enet_dma_ring *tx_ring = &enet->tx_ring; in bcm4908_enet_stop() local
641 while (handled < weight && tx_ring->read_idx != tx_ring->write_idx) { in bcm4908_enet_poll_tx()
642 buf_desc = &tx_ring->buf_desc[tx_ring->read_idx]; in bcm4908_enet_poll_tx()
645 slot = &tx_ring->slots[tx_ring->read_idx]; in bcm4908_enet_poll_tx()
[all …]
/linux/drivers/net/ethernet/amazon/ena/
A Dena_netdev.c736 struct ena_ring *tx_ring = &adapter->tx_ring[qid]; in ena_setup_tx_resources() local
802 struct ena_ring *tx_ring = &adapter->tx_ring[qid]; in ena_free_tx_resources() local
1212 netif_dbg(tx_ring->adapter, ifdown, tx_ring->netdev, in ena_free_tx_bufs()
1231 tx_ring = &adapter->tx_ring[i]; in ena_free_all_tx_bufs()
1374 netif_dbg(tx_ring->adapter, tx_done, tx_ring->netdev, in ena_clean_tx_irq()
1961 tx_ring = ena_napi->tx_ring; in ena_io_poll()
2288 napi->tx_ring = &adapter->tx_ring[i]; in ena_init_napi_in_range()
2383 tx_ring = &adapter->tx_ring[qid]; in ena_create_io_tx_queue()
3059 tx_ring = &adapter->tx_ring[qid]; in ena_start_xmit()
3283 tx_ring = &adapter->tx_ring[i]; in ena_get_stats64()
[all …]
/linux/drivers/net/ethernet/intel/igc/
A Digc_xdp.c42 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_enable_pool() local
71 tx_ring = adapter->tx_ring[queue_id]; in igc_xdp_enable_pool()
77 igc_disable_tx_ring(tx_ring); in igc_xdp_enable_pool()
82 set_bit(IGC_RING_FLAG_AF_XDP_ZC, &tx_ring->flags); in igc_xdp_enable_pool()
87 igc_enable_tx_ring(tx_ring); in igc_xdp_enable_pool()
101 struct igc_ring *rx_ring, *tx_ring; in igc_xdp_disable_pool() local
117 tx_ring = adapter->tx_ring[queue_id]; in igc_xdp_disable_pool()
123 igc_disable_tx_ring(tx_ring); in igc_xdp_disable_pool()
129 clear_bit(IGC_RING_FLAG_AF_XDP_ZC, &tx_ring->flags); in igc_xdp_disable_pool()
134 igc_enable_tx_ring(tx_ring); in igc_xdp_disable_pool()
A Digc_dump.c118 struct igc_ring *tx_ring; in igc_rings_dump() local
138 tx_ring = adapter->tx_ring[n]; in igc_rings_dump()
139 buffer_info = &tx_ring->tx_buffer_info[tx_ring->next_to_clean]; in igc_rings_dump()
142 n, tx_ring->next_to_use, tx_ring->next_to_clean, in igc_rings_dump()
167 tx_ring = adapter->tx_ring[n]; in igc_rings_dump()
170 tx_ring->queue_index); in igc_rings_dump()
174 for (i = 0; tx_ring->desc && (i < tx_ring->count); i++) { in igc_rings_dump()
178 tx_desc = IGC_TX_DESC(tx_ring, i); in igc_rings_dump()
181 if (i == tx_ring->next_to_use && in igc_rings_dump()
182 i == tx_ring->next_to_clean) in igc_rings_dump()
[all …]
/linux/drivers/net/ethernet/intel/ixgbevf/
A Dixgbevf_main.c279 i -= tx_ring->count; in ixgbevf_clean_tx_irq()
367 if (check_for_tx_hang(tx_ring) && ixgbevf_check_tx_hang(tx_ring)) { in ixgbevf_clean_tx_irq()
3360 dma_free_coherent(tx_ring->dev, tx_ring->size, tx_ring->desc, in ixgbevf_free_tx_resources()
3403 tx_ring->size = tx_ring->count * sizeof(union ixgbe_adv_tx_desc); in ixgbevf_setup_tx_resources()
3404 tx_ring->size = ALIGN(tx_ring->size, 4096); in ixgbevf_setup_tx_resources()
3406 tx_ring->desc = dma_alloc_coherent(tx_ring->dev, tx_ring->size, in ixgbevf_setup_tx_resources()
3735 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in ixgbevf_tx_ctxtdesc()
4078 netif_stop_subqueue(tx_ring->netdev, tx_ring->queue_index); in __ixgbevf_maybe_stop_tx()
4092 netif_start_subqueue(tx_ring->netdev, tx_ring->queue_index); in __ixgbevf_maybe_stop_tx()
4145 first = &tx_ring->tx_buffer_info[tx_ring->next_to_use]; in ixgbevf_xmit_frame_ring()
[all …]
/linux/drivers/net/wireless/ath/ath11k/
A Ddp_tx.c90 struct dp_tx_ring *tx_ring; in ath11k_dp_tx() local
123 tx_ring = &dp->tx_ring[ti.ring_id]; in ath11k_dp_tx()
541 struct dp_tx_ring *tx_ring = &dp->tx_ring[ring_id]; in ath11k_dp_tx_completion_handler() local
551 tx_ring->tx_status_tail) && in ath11k_dp_tx_completion_handler()
553 memcpy(&tx_ring->tx_status[tx_ring->tx_status_head], in ath11k_dp_tx_completion_handler()
555 tx_ring->tx_status_head = in ath11k_dp_tx_completion_handler()
560 (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_head) == tx_ring->tx_status_tail)) { in ath11k_dp_tx_completion_handler()
569 while (ATH11K_TX_COMPL_NEXT(tx_ring->tx_status_tail) != tx_ring->tx_status_head) { in ath11k_dp_tx_completion_handler()
573 tx_ring->tx_status_tail = in ath11k_dp_tx_completion_handler()
575 tx_status = &tx_ring->tx_status[tx_ring->tx_status_tail]; in ath11k_dp_tx_completion_handler()
[all …]
/linux/drivers/net/ethernet/agere/
A Det131x.c489 struct tx_ring tx_ring; member
1639 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_config_tx_dma_regs() local
1752 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_init_send() local
2357 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_tx_dma_memory_alloc() local
2391 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_tx_dma_memory_free() local
2428 struct tx_ring *tx_ring = &adapter->tx_ring; in nic_send_packet() local
2589 struct tx_ring *tx_ring = &adapter->tx_ring; in send_packet() local
2640 struct tx_ring *tx_ring = &adapter->tx_ring; in free_send_packet() local
2696 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_free_busy_send_packets() local
2741 struct tx_ring *tx_ring = &adapter->tx_ring; in et131x_handle_send_pkts() local
[all …]
/linux/drivers/net/ethernet/intel/ixgbe/
A Dixgbe_xsk.c439 dma_unmap_single(tx_ring->dev, in ixgbe_clean_xdp_tx_buffer()
448 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_clean_xdp_tx_irq()
455 tx_bi = &tx_ring->tx_buffer_info[ntc]; in ixgbe_clean_xdp_tx_irq()
456 tx_desc = IXGBE_TX_DESC(tx_ring, ntc); in ixgbe_clean_xdp_tx_irq()
477 tx_bi = tx_ring->tx_buffer_info; in ixgbe_clean_xdp_tx_irq()
478 tx_desc = IXGBE_TX_DESC(tx_ring, 0); in ixgbe_clean_xdp_tx_irq()
485 tx_ring->next_to_clean = ntc; in ixgbe_clean_xdp_tx_irq()
488 tx_ring->stats.bytes += total_bytes; in ixgbe_clean_xdp_tx_irq()
490 u64_stats_update_end(&tx_ring->syncp); in ixgbe_clean_xdp_tx_irq()
536 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_xsk_clean_tx_ring()
[all …]
/linux/drivers/net/ethernet/oki-semi/pch_gbe/
A Dpch_gbe_main.c926 memset(tx_ring->desc, 0, tx_ring->size); in pch_gbe_clean_tx_ring()
1482 tx_ring->next_to_clean, tx_ring->next_to_use, in pch_gbe_clean_tx()
1717 tx_ring->size = tx_ring->count * (int)sizeof(struct pch_gbe_tx_desc); in pch_gbe_setup_tx_resources()
1719 tx_ring->desc = dma_alloc_coherent(&pdev->dev, tx_ring->size, in pch_gbe_setup_tx_resources()
1735 tx_ring->desc, (unsigned long long)tx_ring->dma, in pch_gbe_setup_tx_resources()
1736 tx_ring->next_to_clean, tx_ring->next_to_use); in pch_gbe_setup_tx_resources()
1794 dma_free_coherent(&pdev->dev, tx_ring->size, tx_ring->desc, in pch_gbe_free_tx_resources()
1795 tx_ring->dma); in pch_gbe_free_tx_resources()
1855 struct pch_gbe_tx_ring *tx_ring = adapter->tx_ring; in pch_gbe_up() local
2047 struct pch_gbe_tx_ring *tx_ring = adapter->tx_ring; in pch_gbe_xmit_frame() local
[all …]
/linux/drivers/net/ethernet/intel/ixgb/
A Dixgb_main.c884 adapter->tx_ring.desc, adapter->tx_ring.dma); in ixgb_free_tx_resources()
921 struct ixgb_desc_ring *tx_ring = &adapter->tx_ring; in ixgb_clean_tx_ring() local
938 memset(tx_ring->desc, 0, tx_ring->size); in ixgb_clean_tx_ring()
940 tx_ring->next_to_use = 0; in ixgb_clean_tx_ring()
1291 struct ixgb_desc_ring *tx_ring = &adapter->tx_ring; in ixgb_tx_map() local
1300 i = tx_ring->next_to_use; in ixgb_tx_map()
1378 i += tx_ring->count; in ixgb_tx_map()
1390 struct ixgb_desc_ring *tx_ring = &adapter->tx_ring; in ixgb_tx_queue() local
1409 i = tx_ring->next_to_use; in ixgb_tx_queue()
1440 struct ixgb_desc_ring *tx_ring = &adapter->tx_ring; in __ixgb_maybe_stop_tx() local
[all …]
/linux/drivers/net/ethernet/apm/xgene-v2/
A Dmain.c176 struct xge_desc_ring *tx_ring; in xge_start_xmit() local
184 tx_ring = pdata->tx_ring; in xge_start_xmit()
185 tail = tx_ring->tail; in xge_start_xmit()
240 struct xge_desc_ring *tx_ring; in xge_txc_poll() local
248 tx_ring = pdata->tx_ring; in xge_txc_poll()
249 head = tx_ring->head; in xge_txc_poll()
282 tx_ring->head = head; in xge_txc_poll()
448 pdata->tx_ring = ring; in xge_create_desc_rings()
555 struct xge_desc_ring *tx_ring; in xge_free_pending_skb() local
562 tx_ring = pdata->tx_ring; in xge_free_pending_skb()
[all …]
/linux/drivers/net/ethernet/marvell/prestera/
A Dprestera_rxtx.c503 tx_ring = &sdma->tx_ring; in prestera_sdma_tx_recycle_work_fn()
528 struct prestera_tx_ring *tx_ring = &sdma->tx_ring; in prestera_sdma_tx_init() local
536 if (!tx_ring->bufs) in prestera_sdma_tx_init()
545 tx_ring->burst = tx_ring->max_burst; in prestera_sdma_tx_init()
546 tx_ring->next_tx = 0; in prestera_sdma_tx_init()
727 tx_ring = &sdma->tx_ring; in prestera_sdma_xmit()
729 buf = &tx_ring->bufs[tx_ring->next_tx]; in prestera_sdma_xmit()
747 if (tx_ring->burst) { in prestera_sdma_xmit()
748 tx_ring->burst--; in prestera_sdma_xmit()
750 tx_ring->burst = tx_ring->max_burst; in prestera_sdma_xmit()
[all …]

Completed in 112 milliseconds

12345678910>>...12