/linux/drivers/scsi/ |
A D | stex.c | 343 u16 rq_size; member 357 u16 rq_size; member 1540 .rq_size = 1048, 1553 .rq_size = 1048, 1566 .rq_size = 1048, 1579 .rq_size = 1048, 1592 .rq_size = 512, 1605 .rq_size = 512, 1769 hba->rq_size = ci->rq_size; in stex_probe() 1869 memset(msg_h, 0, hba->rq_size); in stex_hba_stop() [all …]
|
/linux/drivers/net/ethernet/qlogic/netxen/ |
A D | netxen_nic_ctx.c | 270 size_t rq_size, rsp_size; in nx_fw_cmd_create_rx_ctx() local 280 rq_size = in nx_fw_cmd_create_rx_ctx() 285 addr = dma_alloc_coherent(&adapter->pdev->dev, rq_size, in nx_fw_cmd_create_rx_ctx() 350 cmd.req.arg3 = rq_size; in nx_fw_cmd_create_rx_ctx() 394 dma_free_coherent(&adapter->pdev->dev, rq_size, prq, hostrq_phys_addr); in nx_fw_cmd_create_rx_ctx() 424 size_t rq_size, rsp_size; in nx_fw_cmd_create_tx_ctx() local 433 rq_size = SIZEOF_HOSTRQ_TX(nx_hostrq_tx_ctx_t); in nx_fw_cmd_create_tx_ctx() 434 rq_addr = dma_alloc_coherent(&adapter->pdev->dev, rq_size, in nx_fw_cmd_create_tx_ctx() 476 cmd.req.arg3 = rq_size; in nx_fw_cmd_create_tx_ctx() 500 dma_free_coherent(&adapter->pdev->dev, rq_size, rq_addr, rq_phys_addr); in nx_fw_cmd_create_tx_ctx()
|
/linux/drivers/infiniband/hw/irdma/ |
A D | puda.c | 651 u32 sq_size, rq_size; in irdma_puda_qp_create() local 655 rq_size = rsrc->rq_size * IRDMA_QP_WQE_MIN_SIZE; in irdma_puda_qp_create() 677 ukqp->shadow_area = ukqp->rq_base[rsrc->rq_size].elem; in irdma_puda_qp_create() 679 qp->shadow_area_pa = qp->rq_pa + rq_size; in irdma_puda_qp_create() 687 ukqp->rq_size = rsrc->rq_size; in irdma_puda_qp_create() 691 IRDMA_RING_INIT(ukqp->rq_ring, ukqp->rq_size); in irdma_puda_qp_create() 1017 rqwridsize = info->rq_size * 8; in irdma_puda_create_rsrc() 1065 rsrc->rq_size = info->rq_size; in irdma_puda_create_rsrc() 1066 rsrc->cq_size = info->rq_size + info->sq_size; in irdma_puda_create_rsrc() 1069 rsrc->cq_size += info->rq_size; in irdma_puda_create_rsrc() [all …]
|
A D | user.h | 343 u32 rq_size; member 391 u32 rq_size; member 425 u32 rq_size, u8 shift, u32 *wqdepth);
|
A D | puda.h | 91 u32 rq_size; member 114 u32 rq_size; member
|
A D | uk.c | 1327 u32 rq_size, u8 shift, u32 *rqdepth) in irdma_get_rqdepth() argument 1329 *rqdepth = irdma_qp_round_up((rq_size << shift) + IRDMA_RQ_RSVD); in irdma_get_rqdepth() 1431 qp->rq_size = info->rq_size; in irdma_uk_qp_init() 1435 IRDMA_RING_INIT(qp->rq_ring, qp->rq_size); in irdma_uk_qp_init()
|
A D | verbs.c | 613 status = irdma_get_rqdepth(uk_attrs, ukinfo->rq_size, rqshift, in irdma_setup_kmode_qp() 655 ukinfo->rq_size = rqdepth >> rqshift; in irdma_setup_kmode_qp() 811 int rq_size; in irdma_create_qp() local 824 rq_size = init_attr->cap.max_recv_wr; in irdma_create_qp() 829 init_info.qp_uk_init_info.rq_size = rq_size; in irdma_create_qp() 964 iwqp->max_recv_wr = rq_size; in irdma_create_qp() 986 uresp.actual_rq_size = rq_size; in irdma_create_qp()
|
A D | hw.c | 1454 info.rq_size = info.sq_size; in irdma_initialize_ilq() 1484 info.rq_size = info.sq_size; in irdma_initialize_ieq()
|
/linux/drivers/scsi/bnx2i/ |
A D | bnx2i_init.c | 65 unsigned int rq_size = BNX2I_RQ_WQES_DEFAULT; variable 66 module_param(rq_size, int, 0664); 67 MODULE_PARM_DESC(rq_size, "Configure RQ size");
|
A D | bnx2i.h | 796 extern unsigned int rq_size;
|
A D | bnx2i_iscsi.c | 855 hba->max_rqes = rq_size; in bnx2i_alloc_hba() 856 hba->max_cqes = hba->max_sqes + rq_size; in bnx2i_alloc_hba()
|
/linux/drivers/net/ethernet/qlogic/qlcnic/ |
A D | qlcnic_ctx.c | 250 size_t rq_size, rsp_size; in qlcnic_82xx_fw_cmd_create_rx_ctx() local 260 rq_size = SIZEOF_HOSTRQ_RX(struct qlcnic_hostrq_rx_ctx, nrds_rings, in qlcnic_82xx_fw_cmd_create_rx_ctx() 265 addr = dma_alloc_coherent(&adapter->pdev->dev, rq_size, in qlcnic_82xx_fw_cmd_create_rx_ctx() 343 cmd.req.arg[3] = rq_size; in qlcnic_82xx_fw_cmd_create_rx_ctx() 387 dma_free_coherent(&adapter->pdev->dev, rq_size, prq, hostrq_phys_addr); in qlcnic_82xx_fw_cmd_create_rx_ctx() 426 size_t rq_size, rsp_size; in qlcnic_82xx_fw_cmd_create_tx_ctx() local 435 rq_size = SIZEOF_HOSTRQ_TX(struct qlcnic_hostrq_tx_ctx); in qlcnic_82xx_fw_cmd_create_tx_ctx() 436 rq_addr = dma_alloc_coherent(&adapter->pdev->dev, rq_size, in qlcnic_82xx_fw_cmd_create_tx_ctx() 489 cmd.req.arg[3] = rq_size; in qlcnic_82xx_fw_cmd_create_tx_ctx() 518 dma_free_coherent(&adapter->pdev->dev, rq_size, rq_addr, rq_phys_addr); in qlcnic_82xx_fw_cmd_create_tx_ctx()
|
/linux/drivers/infiniband/ulp/srpt/ |
A D | ib_srpt.c | 1791 WARN_ON(ch->rq_size < 1); in srpt_create_ch_ib() 1804 ch->rq_size + sq_size, ret); in srpt_create_ch_ib() 1807 ch->cq_size = ch->rq_size + sq_size; in srpt_create_ch_ib() 1869 for (i = 0; i < ch->rq_size; i++) in srpt_create_ch_ib() 2114 ch->sport->sdev, ch->rq_size, in srpt_release_channel_work() 2120 sdev, ch->rq_size, in srpt_release_channel_work() 2258 for (i = 0; i < ch->rq_size; i++) in srpt_cm_req_recv() 2295 for (i = 0; i < ch->rq_size; i++) in srpt_cm_req_recv() 2314 tag_num = ch->rq_size; in srpt_cm_req_recv() 2468 ch->sport->sdev, ch->rq_size, in srpt_cm_req_recv() [all …]
|
A D | ib_srpt.h | 309 int rq_size; member
|
/linux/include/uapi/rdma/ |
A D | cxgb4-abi.h | 83 __u32 rq_size; member
|
/linux/drivers/infiniband/hw/efa/ |
A D | efa_verbs.c | 453 qp->rq_cpu_addr, qp->rq_size, in efa_destroy_qp() 456 qp->rq_size, DMA_TO_DEVICE); in efa_destroy_qp() 521 if (qp->rq_size) { in qp_mmap_entries_setup() 537 address, qp->rq_size, in qp_mmap_entries_setup() 543 resp->rq_mmap_size = qp->rq_size; in qp_mmap_entries_setup() 695 qp->rq_size = PAGE_ALIGN(create_qp_params.rq_ring_size_in_bytes); in efa_create_qp() 696 if (qp->rq_size) { in efa_create_qp() 698 qp->rq_size, DMA_TO_DEVICE); in efa_create_qp() 706 qp->rq_cpu_addr, qp->rq_size, &qp->rq_dma_addr); in efa_create_qp() 754 if (qp->rq_size) in efa_create_qp() [all …]
|
A D | efa.h | 105 size_t rq_size; member
|
/linux/drivers/infiniband/sw/siw/ |
A D | siw_verbs.c | 406 qp->attrs.rq_size = 0; in siw_create_qp() 420 qp->attrs.rq_size = num_rqe; in siw_create_qp() 516 qp_attr->cap.max_recv_wr = qp->attrs.rq_size; in siw_query_qp() 966 if (qp->srq || qp->attrs.rq_size == 0) { in siw_post_receive() 1026 u32 idx = qp->rq_put % qp->attrs.rq_size; in siw_post_receive()
|
A D | siw_qp.c | 1296 while (qp->attrs.rq_size) { in siw_rq_flush() 1298 &qp->recvq[qp->rq_get % qp->attrs.rq_size]; in siw_rq_flush()
|
A D | siw.h | 261 u32 rq_size; member
|
A D | siw_qp_rx.c | 349 rqe = &qp->recvq[qp->rq_get % qp->attrs.rq_size]; in siw_rqe_get()
|
/linux/drivers/net/ethernet/microsoft/mana/ |
A D | mana_en.c | 1403 u32 cq_size, rq_size; in mana_create_rxq() local 1420 err = mana_alloc_rx_wqe(apc, rxq, &rq_size, &cq_size); in mana_create_rxq() 1424 rq_size = PAGE_ALIGN(rq_size); in mana_create_rxq() 1431 spec.queue_size = rq_size; in mana_create_rxq()
|
/linux/block/ |
A D | blk-mq.c | 2867 size_t rq_size, left; in blk_mq_alloc_rqs() local 2880 rq_size = round_up(sizeof(struct request) + set->cmd_size, in blk_mq_alloc_rqs() 2882 left = rq_size * depth; in blk_mq_alloc_rqs() 2901 if (order_to_size(this_order) < rq_size) in blk_mq_alloc_rqs() 2917 entries_per_page = order_to_size(this_order) / rq_size; in blk_mq_alloc_rqs() 2919 left -= to_do * rq_size; in blk_mq_alloc_rqs() 2929 p += rq_size; in blk_mq_alloc_rqs()
|
/linux/drivers/infiniband/hw/bnxt_re/ |
A D | roce_hsi.h | 1171 __le32 rq_size; member 1303 __le32 rq_size; member 1747 __le32 rq_size; member 2328 __le32 rq_size; member
|
A D | qplib_fp.c | 870 req.rq_size = cpu_to_le32(rq->max_wqe); in bnxt_qplib_create_qp1() 1027 req.rq_size = cpu_to_le32(rq->max_wqe); in bnxt_qplib_create_qp() 1319 req.rq_size = cpu_to_le32(qp->rq.hwq.max_elements); in bnxt_qplib_modify_qp()
|