Home
last modified time | relevance | path

Searched refs:xsk_pool (Results 1 – 25 of 26) sorted by relevance

12

/linux/drivers/net/ethernet/intel/i40e/
A Di40e_xsk.c367 xsk_buff_dma_sync_for_cpu(bi, rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
387 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in i40e_clean_rx_irq_zc()
389 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
391 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
404 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc->addr); in i40e_xmit_pkt()
424 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc[i].addr); in i40e_xmit_pkt_batch()
521 struct xsk_buff_pool *bp = tx_ring->xsk_pool; in i40e_clean_xdp_tx_irq()
566 if (xsk_uses_need_wakeup(tx_ring->xsk_pool)) in i40e_clean_xdp_tx_irq()
567 xsk_set_tx_need_wakeup(tx_ring->xsk_pool); in i40e_clean_xdp_tx_irq()
599 if (!vsi->xdp_rings[queue_id]->xsk_pool) in i40e_xsk_wakeup()
[all …]
A Di40e_txrx.h392 struct xsk_buff_pool *xsk_pool; member
A Di40e_txrx.c793 if (ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in i40e_clean_tx_ring()
1503 if (rx_ring->xsk_pool) { in i40e_clean_rx_ring()
1537 if (rx_ring->xsk_pool) in i40e_clean_rx_ring()
2709 bool wd = ring->xsk_pool ? in i40e_napi_poll()
2737 int cleaned = ring->xsk_pool ? in i40e_napi_poll()
A Di40e_main.c3206 ring->xsk_pool = i40e_xsk_pool(ring); in i40e_configure_tx_ring()
3340 ring->xsk_pool = i40e_xsk_pool(ring); in i40e_configure_rx_ring()
3341 if (ring->xsk_pool) { in i40e_configure_rx_ring()
3346 xsk_pool_get_rx_frame_size(ring->xsk_pool); in i40e_configure_rx_ring()
3431 if (ring->xsk_pool) { in i40e_configure_rx_ring()
3432 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in i40e_configure_rx_ring()
3443 ring->xsk_pool ? "AF_XDP ZC enabled " : "", in i40e_configure_rx_ring()
13063 if (vsi->xdp_rings[i]->xsk_pool) in i40e_xdp_setup()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
A Drx.h25 dma_info->xsk = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_page_alloc_pool()
41 if (!xsk_uses_need_wakeup(rq->xsk_pool)) in mlx5e_xsk_update_rx_wakeup()
45 xsk_set_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup()
47 xsk_clear_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup()
A Dtx.h18 if (!xsk_uses_need_wakeup(sq->xsk_pool)) in mlx5e_xsk_update_tx_wakeup()
22 xsk_clear_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup()
24 xsk_set_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup()
A Drx.c50 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
96 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_linear()
A Dtx.c69 struct xsk_buff_pool *pool = sq->xsk_pool; in mlx5e_xsk_tx()
A Dsetup.c70 rq->xsk_pool = pool; in mlx5e_init_xsk_rq()
/linux/drivers/net/ethernet/intel/ice/
A Dice_xsk.c244 xdp_ring->xsk_pool = ice_tx_xsk_pool(xdp_ring); in ice_qp_ena()
602 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ice_clean_rx_irq_zc()
604 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
606 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
639 if (!xsk_tx_peek_desc(xdp_ring->xsk_pool, &desc)) in ice_xmit_zc()
660 xsk_tx_release(xdp_ring->xsk_pool); in ice_xmit_zc()
734 xsk_tx_completed(xdp_ring->xsk_pool, xsk_frames); in ice_clean_tx_irq_zc()
736 if (xsk_uses_need_wakeup(xdp_ring->xsk_pool)) in ice_clean_tx_irq_zc()
737 xsk_set_tx_need_wakeup(xdp_ring->xsk_pool); in ice_clean_tx_irq_zc()
771 if (!vsi->xdp_rings[queue_id]->xsk_pool) in ice_xsk_wakeup()
[all …]
A Dice_base.c438 if (ring->xsk_pool) in ice_setup_rx_ctx()
508 ring->xsk_pool = ice_xsk_pool(ring); in ice_vsi_cfg_rxq()
509 if (ring->xsk_pool) { in ice_vsi_cfg_rxq()
515 xsk_pool_get_rx_frame_size(ring->xsk_pool); in ice_vsi_cfg_rxq()
521 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in ice_vsi_cfg_rxq()
549 if (ring->xsk_pool) { in ice_vsi_cfg_rxq()
552 if (!xsk_buff_can_alloc(ring->xsk_pool, num_bufs)) { in ice_vsi_cfg_rxq()
A Dice_txrx.h292 struct xsk_buff_pool *xsk_pool; member
314 struct xsk_buff_pool *xsk_pool; member
A Dice_txrx.c152 if (ice_ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in ice_clean_tx_ring()
392 if (rx_ring->xsk_pool) { in ice_clean_rx_ring()
422 if (rx_ring->xsk_pool) in ice_clean_rx_ring()
452 if (rx_ring->xsk_pool) { in ice_free_rx_ring()
1454 if (tx_ring->xsk_pool) in ice_napi_poll()
1487 cleaned = rx_ring->xsk_pool ? in ice_napi_poll()
A Dice_main.c2488 xdp_ring->xsk_pool = ice_tx_xsk_pool(xdp_ring); in ice_xdp_alloc_setup_rings()
2729 if (rx_ring->xsk_pool) in ice_vsi_rx_napi_schedule()
/linux/drivers/net/ethernet/intel/ixgbe/
A Dixgbe_xsk.c164 bi->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ixgbe_alloc_rx_buffers_zc()
301 xsk_buff_dma_sync_for_cpu(bi->xdp, rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
355 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ixgbe_clean_rx_irq_zc()
357 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
359 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
384 struct xsk_buff_pool *pool = xdp_ring->xsk_pool; in ixgbe_xmit_zc()
450 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_clean_xdp_tx_irq()
522 if (!ring->xsk_pool) in ixgbe_xsk_wakeup()
537 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_xsk_clean_tx_ring()
A Dixgbe_main.c3162 bool wd = ring->xsk_pool ? in ixgbe_poll()
3182 int cleaned = ring->xsk_pool ? in ixgbe_poll()
3477 ring->xsk_pool = NULL; in ixgbe_configure_tx_ring()
3479 ring->xsk_pool = ixgbe_xsk_pool(adapter, ring); in ixgbe_configure_tx_ring()
3719 if (rx_ring->xsk_pool) { in ixgbe_configure_srrctl()
4065 ring->xsk_pool = ixgbe_xsk_pool(adapter, ring); in ixgbe_configure_rx_ring()
4066 if (ring->xsk_pool) { in ixgbe_configure_rx_ring()
4150 if (ring->xsk_pool) in ixgbe_configure_rx_ring()
5300 if (rx_ring->xsk_pool) { in ixgbe_clean_rx_ring()
6000 if (tx_ring->xsk_pool) { in ixgbe_clean_tx_ring()
[all …]
A Dixgbe.h355 struct xsk_buff_pool *xsk_pool; member
/linux/drivers/net/ethernet/stmicro/stmmac/
A Dstmmac_main.c233 if (rx_q->xsk_pool) { in stmmac_disable_all_queues()
1682 if (rx_q->xsk_pool) { in __init_dma_rx_desc_rings()
1699 if (rx_q->xsk_pool) { in __init_dma_rx_desc_rings()
1751 if (rx_q->xsk_pool) in init_dma_rx_desc_rings()
1757 rx_q->xsk_pool = NULL; in init_dma_rx_desc_rings()
1886 tx_q->xsk_pool = NULL; in dma_free_tx_skbufs()
1913 if (rx_q->xsk_pool) in __free_dma_rx_desc_resources()
1919 rx_q->xsk_pool = NULL; in __free_dma_rx_desc_resources()
2366 if (rx_q->xsk_pool) { in stmmac_dma_operation_mode()
2619 if (tx_q->xsk_pool) { in stmmac_tx_clean()
[all …]
A Dstmmac.h73 struct xsk_buff_pool *xsk_pool; member
99 struct xsk_buff_pool *xsk_pool; member
/linux/drivers/net/ethernet/intel/igc/
A Digc_main.c252 if (tx_ring->xsk_pool && xsk_frames) in igc_clean_tx_ring()
431 if (ring->xsk_pool) in igc_clean_rx_ring()
601 if (ring->xsk_pool) { in igc_configure_rx_ring()
634 if (ring->xsk_pool) in igc_configure_rx_ring()
2078 bi->xdp = xsk_buff_alloc(ring->xsk_pool); in igc_alloc_rx_buffers_zc()
2568 xsk_set_rx_need_wakeup(ring->xsk_pool); in igc_clean_rx_irq_zc()
2754 if (tx_ring->xsk_pool) { in igc_clean_tx_irq()
3656 if (ring->xsk_pool) in igc_configure()
4282 int cleaned = rx_ring->xsk_pool ? in igc_poll()
6084 if (!ring->xsk_pool) in igc_xsk_wakeup()
[all …]
A Digc.h130 struct xsk_buff_pool *xsk_pool; member
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/
A Dxdp.c457 xsk_tx_completed(sq->xsk_pool, xsk_frames); in mlx5e_poll_xdpsq_cq()
495 xsk_tx_completed(sq->xsk_pool, xsk_frames); in mlx5e_free_xdpsq_descs()
/linux/drivers/net/ethernet/mellanox/mlx5/core/
A Den_main.c645 xsk_pool_set_rxq_info(rq->xsk_pool, &rq->xdp_rxq); in mlx5e_alloc_rq()
1143 struct xsk_buff_pool *xsk_pool, in mlx5e_alloc_xdpsq() argument
1159 sq->xsk_pool = xsk_pool; in mlx5e_alloc_xdpsq()
1161 sq->stats = sq->xsk_pool ? in mlx5e_alloc_xdpsq()
2199 struct xsk_buff_pool *xsk_pool, in mlx5e_open_channel() argument
2237 if (xsk_pool) { in mlx5e_open_channel()
2238 mlx5e_build_xsk_param(xsk_pool, &xsk); in mlx5e_open_channel()
2322 struct xsk_buff_pool *xsk_pool = NULL; in mlx5e_open_channels() local
3863 struct xsk_buff_pool *xsk_pool = in mlx5e_xsk_validate_mtu() local
3867 if (!xsk_pool) in mlx5e_xsk_validate_mtu()
[all …]
A Den.h531 struct xsk_buff_pool *xsk_pool; member
724 struct xsk_buff_pool *xsk_pool; member
1020 struct mlx5e_sq_param *param, struct xsk_buff_pool *xsk_pool,
A Den_rx.c295 if (rq->xsk_pool) in mlx5e_page_alloc()
327 if (rq->xsk_pool) in mlx5e_page_release()
414 if (rq->xsk_pool) { in mlx5e_alloc_rx_wqes()
421 if (unlikely(!xsk_buff_can_alloc(rq->xsk_pool, pages_desired))) in mlx5e_alloc_rx_wqes()
667 if (rq->xsk_pool && in mlx5e_alloc_rx_mpwqe()
668 unlikely(!xsk_buff_can_alloc(rq->xsk_pool, MLX5_MPWRQ_PAGES_PER_WQE))) { in mlx5e_alloc_rx_mpwqe()
993 if (unlikely(alloc_err == -ENOMEM && rq->xsk_pool)) in mlx5e_post_rx_mpwqes()

Completed in 128 milliseconds

12