/linux/drivers/net/ethernet/altera/ |
A D | altera_sgdma.c | 13 struct sgdma_descrip __iomem *ndesc, 173 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_tx_buffer() local 180 ndesc, /* next descriptor */ in sgdma_tx_buffer() 181 sgdma_txphysaddr(priv, ndesc), in sgdma_tx_buffer() 294 struct sgdma_descrip __iomem *ndesc, in sgdma_setup_descrip() argument 305 u32 ctrl = csrrd8(ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 307 csrwr8(ctrl, ndesc, sgdma_descroffs(control)); in sgdma_setup_descrip() 342 struct sgdma_descrip __iomem *ndesc = &descbase[1]; in sgdma_async_read() local 353 ndesc, /* next descriptor */ in sgdma_async_read() 354 sgdma_rxphysaddr(priv, ndesc), in sgdma_async_read()
|
/linux/drivers/net/ethernet/socionext/ |
A D | sni_ave.c | 689 ndesc = priv->tx.ndesc; in ave_tx_complete() 722 done_idx = (done_idx + 1) % ndesc; in ave_tx_complete() 749 u32 ndesc, cmdsts; in ave_rx_receive() local 753 ndesc = priv->rx.ndesc; in ave_rx_receive() 754 restpkt = ((proc_idx + ndesc - 1) - done_idx) % ndesc; in ave_rx_receive() 793 proc_idx = (proc_idx + 1) % ndesc; in ave_rx_receive() 808 done_idx = (done_idx + 1) % ndesc; in ave_rx_receive() 1406 ndesc = priv->tx.ndesc; in ave_start_xmit() 1407 freepkt = ((done_idx + ndesc - 1) - proc_idx) % ndesc; in ave_start_xmit() 1633 priv->tx.ndesc = AVE_NR_TXDESC; in ave_probe() [all …]
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
A D | chcr_ktls.c | 856 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 926 ndesc = DIV_ROUND_UP(len, 64); in chcr_ktls_xmit_tcb_cpls() 999 unsigned int ndesc; in chcr_ktls_write_tcp_options() local 1020 ndesc = DIV_ROUND_UP(len16, 4); in chcr_ktls_write_tcp_options() 1083 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_write_tcp_options() 1250 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_xmit_wr_complete() 1284 unsigned int flits = 0, ndesc; in chcr_ktls_xmit_wr_short() local 1440 chcr_txq_advance(&q->q, ndesc); in chcr_ktls_xmit_wr_short() 1469 unsigned int flits = 0, ndesc; in chcr_ktls_tx_plaintxt() local 1488 ndesc = DIV_ROUND_UP(flits, 8); in chcr_ktls_tx_plaintxt() [all …]
|
/linux/drivers/net/ethernet/ti/ |
A D | netcp_core.c | 114 *ndesc = le32_to_cpu(desc->next_desc); in get_pkt_info() 156 desc->next_desc = cpu_to_le32(ndesc); in set_pkt_info() 581 struct knav_dma_desc *ndesc; in netcp_free_rx_desc_chain() local 591 if (unlikely(!ndesc)) { in netcp_free_rx_desc_chain() 644 struct knav_dma_desc *desc, *ndesc; in netcp_process_one_rx_packet() local 695 if (unlikely(!ndesc)) { in netcp_process_one_rx_packet() 981 while (ndesc) { in netcp_free_tx_desc_chain() 992 ndesc = NULL; in netcp_free_tx_desc_chain() 996 if (!ndesc) in netcp_free_tx_desc_chain() 1132 if (IS_ERR_OR_NULL(ndesc)) { in netcp_tx_map_skb() [all …]
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ipsec/ |
A D | chcr_ipsec.c | 572 u16 ndesc; in ch_ipsec_crypto_wreq() local 581 ndesc = DIV_ROUND_UP(flits, 2); in ch_ipsec_crypto_wreq() 597 wr_mid = FW_CRYPTO_LOOKASIDE_WR_LEN16_V(ndesc); in ch_ipsec_crypto_wreq() 610 wr->req.ulptx.len = htonl(ndesc - 1); in ch_ipsec_crypto_wreq() 707 unsigned int last_desc, ndesc, flits = 0; in ch_ipsec_xmit() local 737 ndesc = flits_to_desc(flits); in ch_ipsec_xmit() 738 credits = txq_avail(&q->q) - ndesc; in ch_ipsec_xmit() 744 dev->name, qidx, credits, ndesc, txq_avail(&q->q), in ch_ipsec_xmit() 749 last_desc = q->q.pidx + ndesc - 1; in ch_ipsec_xmit() 787 txq_advance(&q->q, ndesc); in ch_ipsec_xmit() [all …]
|
/linux/drivers/net/wireless/mediatek/mt76/ |
A D | dma.c | 86 writel(q->ndesc, &q->regs->ring_size); in mt76_dma_sync_idx() 100 for (i = 0; i < q->ndesc; i++) in mt76_dma_queue_reset() 119 q->ndesc = n_desc; in mt76_dma_alloc_queue() 123 size = q->ndesc * sizeof(struct mt76_desc); in mt76_dma_alloc_queue() 128 size = q->ndesc * sizeof(*q->entry); in mt76_dma_alloc_queue() 157 q->head = (q->head + 1) % q->ndesc; in mt76_dma_add_buf() 312 q->tail = (q->tail + 1) % q->ndesc; in mt76_dma_dequeue() 325 if (q->queued + 1 >= q->ndesc - 1) in mt76_dma_tx_queue_skb_raw() 400 if (q->queued + (tx_info.nbuf + 1) / 2 >= q->ndesc - 1) { in mt76_dma_tx_queue_skb() 453 while (q->queued < q->ndesc - 1) { in mt76_dma_rx_fill() [all …]
|
A D | usb.c | 696 for (i = 0; i < q->ndesc; i++) { in mt76u_submit_rx_buffers() 721 q->ndesc = MT_NUM_RX_ENTRIES; in mt76u_alloc_rx_queue() 724 for (i = 0; i < q->ndesc; i++) { in mt76u_alloc_rx_queue() 745 for (i = 0; i < q->ndesc; i++) { in mt76u_free_rx_queue() 781 for (j = 0; j < q->ndesc; j++) in mt76u_stop_rx() 795 for (j = 0; j < q->ndesc; j++) in mt76u_resume_rx() 910 if (q->queued == q->ndesc) in mt76u_tx_queue_skb() 1003 q->ndesc = MT_NUM_TX_ENTRIES; in mt76u_alloc_tx() 1004 for (j = 0; j < q->ndesc; j++) { in mt76u_alloc_tx() 1028 for (j = 0; j < q->ndesc; j++) { in mt76u_free_tx() [all …]
|
A D | sdio.c | 313 q->ndesc = MT_NUM_RX_ENTRIES; in mt76s_alloc_rx_queue() 336 q->ndesc = MT_NUM_TX_ENTRIES; in mt76s_alloc_tx_queue() 374 q->tail = (q->tail + 1) % q->ndesc; in mt76s_get_next_rx_entry() 526 if (q->queued == q->ndesc) in mt76s_tx_queue_skb() 540 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb() 552 if (q->queued == q->ndesc) in mt76s_tx_queue_skb_raw() 564 q->head = (q->head + 1) % q->ndesc; in mt76s_tx_queue_skb_raw() 612 for (j = 0; j < q->ndesc; j++) { in mt76s_deinit()
|
A D | sdio_txrx.c | 113 int index = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 124 if (q->queued + i + 1 == q->ndesc) in mt76s_rx_run_queue() 130 q->head = (q->head + i) % q->ndesc; in mt76s_rx_run_queue() 276 q->first = (q->first + 1) % q->ndesc; in mt76s_tx_run_queue()
|
A D | tx.c | 434 q->queued + MT_TXQ_FREE_THR >= q->ndesc; in mt76_txq_stopped() 515 q->queued + 2 * MT_TXQ_FREE_THR >= q->ndesc) { in mt76_txq_schedule_list() 692 q->tail = (q->tail + 1) % q->ndesc; in mt76_queue_tx_complete()
|
A D | debugfs.c | 83 queued = mt76_is_usb(dev) ? q->ndesc - q->queued : q->queued; in mt76_rx_queues_read()
|
/linux/drivers/usb/mon/ |
A D | mon_bin.c | 448 struct urb *urb, unsigned int ndesc) in mon_bin_collate_isodesc() argument 455 while (ndesc-- != 0) { in mon_bin_collate_isodesc() 472 while (ndesc-- != 0) { in mon_bin_get_isodesc() 495 unsigned int ndesc, lendesc; in mon_bin_event() local 513 ndesc = 0; in mon_bin_event() 515 ndesc = ISODESC_MAX; in mon_bin_event() 517 ndesc = urb->number_of_packets; in mon_bin_event() 522 ndesc = 0; in mon_bin_event() 524 lendesc = ndesc*sizeof(struct mon_bin_isodesc); in mon_bin_event() 595 if (ndesc != 0) { in mon_bin_event() [all …]
|
A D | mon_text.c | 200 int i, ndesc; in mon_text_event() local 233 if ((ndesc = urb->number_of_packets) > ISODESC_MAX) in mon_text_event() 234 ndesc = ISODESC_MAX; in mon_text_event() 237 for (i = 0; i < ndesc; i++) { in mon_text_event() 591 int ndesc; /* Display this many */ in mon_text_read_isodesc() local 597 ndesc = ep->numdesc; in mon_text_read_isodesc() 598 if (ndesc > ISODESC_MAX) in mon_text_read_isodesc() 599 ndesc = ISODESC_MAX; in mon_text_read_isodesc() 600 if (ndesc < 0) in mon_text_read_isodesc() 601 ndesc = 0; in mon_text_read_isodesc() [all …]
|
/linux/drivers/net/ethernet/chelsio/cxgb4/ |
A D | sge.c | 1733 txq_advance(&q->q, ndesc); in cxgb4_eth_xmit() 2134 while (ndesc--) { in cxgb4_eosw_txq_free_desc() 2275 u8 flits, ndesc; in ethofld_hard_xmit() local 2512 pidx += eosw_txq->ndesc; in eosw_txq_flush_pending_skbs() 2671 u32 ctrl0, ndesc, flits; in cxgb4_selftest_lb_pkt() local 2723 txq_advance(&q->q, ndesc); in cxgb4_selftest_lb_pkt() 2749 unsigned int ndesc; in ctrl_xmit() local 2771 txq_advance(&q->q, ndesc); in ctrl_xmit() 2802 written += ndesc; in restart_ctrlq() 3035 written += ndesc; in service_ofldq() [all …]
|
/linux/drivers/net/ethernet/chelsio/cxgb3/ |
A D | sge.c | 1107 if (likely(ndesc == 1)) { in write_wr_hdr_sgl() 1130 ndesc--; in write_wr_hdr_sgl() 1160 WARN_ON(ndesc != 0); in write_wr_hdr_sgl() 1308 q->in_use += ndesc; in t3_eth_xmit() 1320 q->unacked += ndesc; in t3_eth_xmit() 1324 q->pidx += ndesc; in t3_eth_xmit() 1722 q->in_use += ndesc; in ofld_xmit() 1724 q->pidx += ndesc; in ofld_xmit() 1775 q->in_use += ndesc; in restart_offloadq() 1777 q->pidx += ndesc; in restart_offloadq() [all …]
|
/linux/drivers/dma/ |
A D | idma64.c | 187 static struct idma64_desc *idma64_alloc_desc(unsigned int ndesc) in idma64_alloc_desc() argument 195 desc->hw = kcalloc(ndesc, sizeof(*desc->hw), GFP_NOWAIT); in idma64_alloc_desc() 209 if (desc->ndesc) { in idma64_desc_free() 210 unsigned int i = desc->ndesc; in idma64_desc_free() 272 unsigned int i = desc->ndesc; in idma64_desc_fill() 312 desc->ndesc = i; in idma64_prep_slave_sg() 321 desc->ndesc = sg_len; in idma64_prep_slave_sg() 354 } while (++i < desc->ndesc); in idma64_active_desc_size()
|
A D | idma64.h | 117 unsigned int ndesc; member
|
/linux/drivers/net/wireless/ath/ath9k/ |
A D | init.c | 285 int nbuf, int ndesc, bool is_tx) in ath_descdma_setup() argument 292 name, nbuf, ndesc); in ath_descdma_setup() 308 dd->dd_desc_len = desc_len * nbuf * ndesc; in ath_descdma_setup() 348 for (i = 0; i < nbuf; i++, bf++, ds += (desc_len * ndesc)) { in ath_descdma_setup() 364 ds += (desc_len * ndesc); in ath_descdma_setup() 379 for (i = 0; i < nbuf; i++, bf++, ds += (desc_len * ndesc)) { in ath_descdma_setup() 395 ds += (desc_len * ndesc); in ath_descdma_setup()
|
/linux/drivers/infiniband/hw/qib/ |
A D | qib_verbs.c | 143 u32 ndesc = 1; /* count the header */ in qib_count_sge() local 150 ndesc = 0; in qib_count_sge() 153 ndesc++; in qib_count_sge() 173 return ndesc; in qib_count_sge() 782 u32 ndesc; in qib_verbs_send_dma() local 817 ndesc = qib_count_sge(ss, len); in qib_verbs_send_dma() 818 if (ndesc >= ppd->sdma_descq_cnt) in qib_verbs_send_dma() 819 ndesc = 0; in qib_verbs_send_dma() 821 ndesc = 1; in qib_verbs_send_dma() 822 if (ndesc) { in qib_verbs_send_dma() [all …]
|
A D | qib_user_sdma.c | 808 int *maxpkts, int *ndesc) in qib_user_sdma_queue_pkts() argument 1028 *ndesc += pkt->naddr; in qib_user_sdma_queue_pkts() 1414 int ndesc = 0; in qib_user_sdma_writev() local 1417 iov, dim, &list, &mxp, &ndesc); in qib_user_sdma_writev() 1430 if (qib_sdma_descq_freecnt(ppd) < ndesc) { in qib_user_sdma_writev()
|
/linux/drivers/net/ethernet/chelsio/cxgb4vf/ |
A D | sge.c | 1162 unsigned int flits, ndesc; in t4vf_eth_xmit() local 1216 ndesc = flits_to_desc(flits); in t4vf_eth_xmit() 1217 credits = txq_avail(&txq->q) - ndesc; in t4vf_eth_xmit() 1367 ndesc, credits, txq->q.pidx, skb->len, ssi->nr_frags); in t4vf_eth_xmit() 1438 last_desc = tq->pidx + ndesc - 1; in t4vf_eth_xmit() 1449 txq_advance(&txq->q, ndesc); in t4vf_eth_xmit() 1451 ring_tx_db(adapter, &txq->q, ndesc); in t4vf_eth_xmit()
|
/linux/drivers/infiniband/ulp/srp/ |
A D | ib_srp.h | 337 unsigned int ndesc; member
|
/linux/drivers/net/ethernet/atheros/ |
A D | ag71xx.c | 1518 int i, ring_mask, ndesc, split; in ag71xx_fill_dma_desc() local 1522 ndesc = 0; in ag71xx_fill_dma_desc() 1531 i = (ring->curr + ndesc) & ring_mask; in ag71xx_fill_dma_desc() 1555 if (!ndesc) in ag71xx_fill_dma_desc() 1559 ndesc++; in ag71xx_fill_dma_desc() 1562 return ndesc; in ag71xx_fill_dma_desc()
|
/linux/drivers/crypto/inside-secure/ |
A D | safexcel.c | 1019 int ret, i, nreq, ndesc, tot_descs, handled = 0; in safexcel_handle_result_descriptor() local 1035 ndesc = ctx->handle_result(priv, ring, req, in safexcel_handle_result_descriptor() 1037 if (ndesc < 0) { in safexcel_handle_result_descriptor() 1039 ndesc); in safexcel_handle_result_descriptor() 1049 tot_descs += ndesc; in safexcel_handle_result_descriptor()
|
/linux/drivers/net/ethernet/rdc/ |
A D | r6040.c | 166 __le32 ndesc; /* 8-B */ member 302 desc->ndesc = cpu_to_le32(mapping); in r6040_init_ring_desc() 307 desc->ndesc = cpu_to_le32(desc_dma); in r6040_init_ring_desc()
|