Lines Matching refs:ring_idx

560 vmxnet3_rq_alloc_rx_buf(struct vmxnet3_rx_queue *rq, u32 ring_idx,  in vmxnet3_rq_alloc_rx_buf()  argument
564 struct vmxnet3_rx_buf_info *rbi_base = rq->buf_info[ring_idx]; in vmxnet3_rq_alloc_rx_buf()
565 struct vmxnet3_cmd_ring *ring = &rq->rx_ring[ring_idx]; in vmxnet3_rq_alloc_rx_buf()
1367 u32 idx, ring_idx; in vmxnet3_rq_rx_complete() local
1384 ring_idx = VMXNET3_GET_RING_IDX(adapter, rcd->rqID); in vmxnet3_rq_rx_complete()
1385 ring = rq->rx_ring + ring_idx; in vmxnet3_rq_rx_complete()
1386 vmxnet3_getRxDesc(rxd, &rq->rx_ring[ring_idx].base[idx].rxd, in vmxnet3_rq_rx_complete()
1388 rbi = rq->buf_info[ring_idx] + idx; in vmxnet3_rq_rx_complete()
1414 ring_idx, idx); in vmxnet3_rq_rx_complete()
1628 ring = rq->rx_ring + ring_idx; in vmxnet3_rq_rx_complete()
1649 rxprod_reg[ring_idx] + rq->qid * 8, in vmxnet3_rq_rx_complete()
1666 u32 i, ring_idx; in vmxnet3_rq_cleanup() local
1669 for (ring_idx = 0; ring_idx < 2; ring_idx++) { in vmxnet3_rq_cleanup()
1670 for (i = 0; i < rq->rx_ring[ring_idx].size; i++) { in vmxnet3_rq_cleanup()
1675 &rq->rx_ring[ring_idx].base[i].rxd, &rxDesc); in vmxnet3_rq_cleanup()
1678 rq->buf_info[ring_idx][i].skb) { in vmxnet3_rq_cleanup()
1681 dev_kfree_skb(rq->buf_info[ring_idx][i].skb); in vmxnet3_rq_cleanup()
1682 rq->buf_info[ring_idx][i].skb = NULL; in vmxnet3_rq_cleanup()
1684 rq->buf_info[ring_idx][i].page) { in vmxnet3_rq_cleanup()
1687 put_page(rq->buf_info[ring_idx][i].page); in vmxnet3_rq_cleanup()
1688 rq->buf_info[ring_idx][i].page = NULL; in vmxnet3_rq_cleanup()
1692 rq->rx_ring[ring_idx].gen = VMXNET3_INIT_GEN; in vmxnet3_rq_cleanup()
1693 rq->rx_ring[ring_idx].next2fill = in vmxnet3_rq_cleanup()
1694 rq->rx_ring[ring_idx].next2comp = 0; in vmxnet3_rq_cleanup()