Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 – 25 of 38) sorted by relevance

12

/linux/drivers/net/ethernet/huawei/hinic/
A Dhinic_hw_qp.h182 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
186 unsigned int wqe_size, u16 *prod_idx);
188 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size);
192 unsigned int wqe_size);
196 unsigned int wqe_size, u16 *cons_idx);
200 unsigned int *wqe_size, u16 *cons_idx);
202 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
208 unsigned int wqe_size, u16 *prod_idx);
214 unsigned int wqe_size,
218 unsigned int wqe_size,
[all …]
A Dhinic_hw_qp.c679 hinic_return_wqe(sq->wq, wqe_size); in hinic_sq_return_wqe()
699 hinic_cpu_to_be32(sq_wqe, wqe_size); in hinic_sq_write_wqe()
701 hinic_write_wqe(sq->wq, hw_wqe, wqe_size); in hinic_sq_write_wqe()
738 *wqe_size = ALIGN(*wqe_size, sq->wq->wqebb_size); in hinic_sq_read_wqebb()
771 hinic_put_wqe(sq->wq, wqe_size); in hinic_sq_put_wqe()
841 unsigned int wqe_size, in hinic_rq_read_wqe() argument
876 unsigned int wqe_size, in hinic_rq_read_next_wqe() argument
884 wqe_size = ALIGN(wqe_size, wq->wqebb_size); in hinic_rq_read_next_wqe()
885 num_wqebbs = wqe_size / wq->wqebb_size; in hinic_rq_read_next_wqe()
903 unsigned int wqe_size) in hinic_rq_put_wqe() argument
[all …]
A Dhinic_tx.c499 unsigned int wqe_size; in hinic_lb_xmit_frame() local
511 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_lb_xmit_frame()
529 wqe_size = 0; in hinic_lb_xmit_frame()
560 unsigned int wqe_size; in hinic_xmit_frame() local
592 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
613 wqe_size = 0; in hinic_xmit_frame()
671 unsigned int wqe_size; in free_all_tx_skbs() local
685 hinic_sq_put_wqe(sq, wqe_size); in free_all_tx_skbs()
707 unsigned int wqe_size; in free_tx_poll() local
727 if (wqe_size > wq->wqebb_size) { in free_tx_poll()
[all …]
A Dhinic_hw_wq.h96 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
99 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size);
101 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
103 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
109 unsigned int wqe_size);
A Dhinic_hw_wq.c743 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
751 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
792 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
794 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
806 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
808 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
824 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() argument
827 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_read_wqe()
889 unsigned int wqe_size) in hinic_write_wqe() argument
899 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_write_wqe()
A Dhinic_hw_cmdq.c160 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local
164 wqe_size = WQE_LCMD_SIZE; in cmdq_wqe_size_from_bdlen()
167 wqe_size = WQE_SCMD_SIZE; in cmdq_wqe_size_from_bdlen()
171 return wqe_size; in cmdq_wqe_size_from_bdlen()
537 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
541 wqe_size = cmdq_wqe_size_from_bdlen(bufdesc_len); in clear_wqe_complete_bit()
542 if (wqe_size == WQE_LCMD_SIZE) { in clear_wqe_complete_bit()
/linux/drivers/infiniband/hw/qedr/
A Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
[all …]
A Dverbs.c3311 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3374 if (wqe_size) in qedr_prepare_sq_sges()
3579 qp->wqe_wr_id[qp->sq.prod].wqe_size = swqe->wqe_size; in __qedr_post_send()
3592 qp->wqe_wr_id[qp->sq.prod].wqe_size = swqe->wqe_size; in __qedr_post_send()
3605 qp->wqe_wr_id[qp->sq.prod].wqe_size = swqe->wqe_size; in __qedr_post_send()
3625 qp->wqe_wr_id[qp->sq.prod].wqe_size = rwqe->wqe_size; in __qedr_post_send()
3638 qp->wqe_wr_id[qp->sq.prod].wqe_size = rwqe->wqe_size; in __qedr_post_send()
3655 qp->wqe_wr_id[qp->sq.prod].wqe_size = rwqe->wqe_size; in __qedr_post_send()
3685 qp->wqe_wr_id[qp->sq.prod].wqe_size = awqe1->wqe_size; in __qedr_post_send()
3695 qp->wqe_wr_id[qp->sq.prod].wqe_size = iwqe->wqe_size; in __qedr_post_send()
[all …]
A Dqedr.h433 u8 wqe_size; member
444 u8 wqe_size; member
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/
A Dtxrx.h83 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() argument
88 memset(wqe, 0, wqe_size); in mlx5e_fetch_wqe()
429 static inline u16 mlx5e_stop_room_for_wqe(u16 wqe_size) in mlx5e_stop_room_for_wqe() argument
442 if (__builtin_constant_p(wqe_size)) in mlx5e_stop_room_for_wqe()
443 BUILD_BUG_ON(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
445 WARN_ON_ONCE(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
447 return wqe_size * 2 - 1; in mlx5e_stop_room_for_wqe()
450 static inline bool mlx5e_icosq_can_post_wqe(struct mlx5e_icosq *sq, u16 wqe_size) in mlx5e_icosq_can_post_wqe() argument
452 u16 room = sq->reserved_room + mlx5e_stop_room_for_wqe(wqe_size); in mlx5e_icosq_can_post_wqe()
A Dparams.c476 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_get_log_cq_size() local
481 return order_base_2((wqe_size / rsrv_size) * wq_size * (pkt_per_rsrv + 1)); in mlx5e_shampo_get_log_cq_size()
684 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_hd_per_wqe() local
688 hd_per_wqe = (wqe_size / resv_size) * pkt_per_resv; in mlx5e_shampo_hd_per_wqe()
690 __func__, hd_per_wqe, resv_size, wqe_size, pkt_per_resv); in mlx5e_shampo_hd_per_wqe()
/linux/drivers/infiniband/sw/rxe/
A Drxe_qp.c205 int wqe_size; in rxe_qp_init_req() local
225 wqe_size = max_t(int, init->cap.max_send_sge * sizeof(struct ib_sge), in rxe_qp_init_req()
228 wqe_size / sizeof(struct ib_sge); in rxe_qp_init_req()
229 qp->sq.max_inline = init->cap.max_inline_data = wqe_size; in rxe_qp_init_req()
230 wqe_size += sizeof(struct rxe_send_wqe); in rxe_qp_init_req()
234 wqe_size, type); in rxe_qp_init_req()
278 int wqe_size; in rxe_qp_init_resp() local
285 wqe_size = rcv_wqe_size(qp->rq.max_sge); in rxe_qp_init_resp()
288 qp_num(qp), qp->rq.max_wr, qp->rq.max_sge, wqe_size); in rxe_qp_init_resp()
292 wqe_size, type); in rxe_qp_init_resp()
/linux/drivers/infiniband/hw/ocrdma/
A Docrdma_verbs.c366 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
498 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
500 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
1946 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1953 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1965 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local
2038 wqe_size = roundup(wqe_size, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_reg()
2204 u32 wqe_size = 0; in ocrdma_build_rqe() local
2209 wqe_size = sizeof(*sge) + sizeof(*rqe); in ocrdma_build_rqe()
2211 rqe->cw = ((wqe_size / OCRDMA_WQE_STRIDE) << in ocrdma_build_rqe()
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
A Dqplib_fp.h95 u16 wqe_size; member
254 u16 wqe_size; member
569 return (que->wqe_size * que->max_wqe) / sizeof(struct sq_sge); in bnxt_qplib_get_depth()
584 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot() argument
586 return (wqe_size / sizeof(struct sq_sge)); in bnxt_qplib_set_rq_max_slot()
A Dib_verbs.c861 u16 wqe_size, calc_ils; in bnxt_re_get_wqe_size() local
863 wqe_size = bnxt_re_get_swqe_size(nsge); in bnxt_re_get_wqe_size()
866 wqe_size = max_t(u16, calc_ils, wqe_size); in bnxt_re_get_wqe_size()
867 wqe_size = ALIGN(wqe_size, sizeof(struct sq_send_hdr)); in bnxt_re_get_wqe_size()
869 return wqe_size; in bnxt_re_get_wqe_size()
900 qplqp->max_inline_data = sq->wqe_size - in bnxt_re_setup_swqe_size()
927 bytes = (qplib_qp->sq.max_wqe * qplib_qp->sq.wqe_size); in bnxt_re_init_user_qp()
935 ((qplib_qp->sq.max_wqe * qplib_qp->sq.wqe_size) / in bnxt_re_init_user_qp()
1049 qp->qplib_qp.sq.wqe_size = bnxt_re_get_wqe_size(0, 6); in bnxt_re_create_shadow_qp()
1060 qp->qplib_qp.rq.wqe_size = bnxt_re_get_rwqe_size(6); in bnxt_re_create_shadow_qp()
[all …]
A Droce_hsi.h194 u8 wqe_size; member
217 u8 wqe_size; member
237 u8 wqe_size; member
286 u8 wqe_size; member
309 u8 wqe_size; member
324 u8 wqe_size; member
521 u8 wqe_size; member
537 u8 wqe_size; member
/linux/drivers/infiniband/hw/mlx5/
A Dqp.c261 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq() local
263 if (buflen < wqe_size) in mlx5_ib_read_wqe_rq()
295 if (buflen < wqe_size) in mlx5_ib_read_wqe_srq()
354 int wqe_size; in set_rq_size() local
385 wqe_size = in set_rq_size()
389 wqe_size = roundup_pow_of_two(wqe_size); in set_rq_size()
395 wqe_size, in set_rq_size()
512 int wqe_size; in calc_sq_size() local
518 wqe_size = calc_send_wqe(attr); in calc_sq_size()
520 if (wqe_size < 0) in calc_sq_size()
[all …]
A Dodp.c1115 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq() local
1117 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_srq()
1122 *wqe_end = *wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_srq()
1134 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq() local
1141 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_rq()
1146 *wqe_end = wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_rq()
/linux/drivers/infiniband/hw/vmw_pvrdma/
A Dpvrdma_qp.c147 qp->rq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_rq_wqe_hdr) + in pvrdma_set_rq_size()
150 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size()
172 qp->sq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_sq_wqe_hdr) + in pvrdma_set_sq_size()
177 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size()
637 qp->sq.offset + n * qp->sq.wqe_size); in get_sq_wqe()
643 qp->rq.offset + n * qp->rq.wqe_size); in get_rq_wqe()
A Dpvrdma.h155 int wqe_size; member
170 int wqe_size; member
/linux/drivers/scsi/elx/libefc_sli/
A Dsli4.c750 size = sli4->wqe_size; in sli_get_queue_entry_size()
1377 memset(buf, 0, sli->wqe_size); in sli_abort_wqe()
1426 memset(buf, 0, sli->wqe_size); in sli_els_request64_wqe()
1571 memset(buf, 0, sli->wqe_size); in sli_fcp_icmnd64_wqe()
1638 memset(buf, 0, sli->wqe_size); in sli_fcp_iread64_wqe()
1741 memset(buf, 0, sli->wqe_size); in sli_fcp_iwrite64_wqe()
1832 memset(buf, 0, sli->wqe_size); in sli_fcp_treceive64_wqe()
1966 memset(buf, 0, sli4->wqe_size); in sli_fcp_trsp64_wqe()
2231 memset(buf, 0, sli->wqe_size); in sli_send_frame_wqe()
2291 memset(buf, 0, sli->wqe_size); in sli_xmit_bls_rsp64_wqe()
[all …]
/linux/include/uapi/rdma/
A Docrdma-abi.h55 __u32 wqe_size; member
A Dib_user_verbs.h826 __u32 wqe_size; member
845 __u32 wqe_size; member
858 __u32 wqe_size; member
/linux/drivers/net/ethernet/microsoft/mana/
A Dgdma_main.c1025 u32 wqe_size; in mana_gd_post_work_request() local
1047 wqe_size = ALIGN(sizeof(struct gdma_wqe) + client_oob_size + in mana_gd_post_work_request()
1049 if (wqe_size > max_wqe_size) in mana_gd_post_work_request()
1052 if (wq->monitor_avl_buf && wqe_size > mana_gd_wq_avail_space(wq)) { in mana_gd_post_work_request()
1059 wqe_info->wqe_size_in_bu = wqe_size / GDMA_WQE_BU_SIZE; in mana_gd_post_work_request()
1069 wq->head += wqe_size / GDMA_WQE_BU_SIZE; in mana_gd_post_work_request()
/linux/drivers/net/ethernet/mellanox/mlx5/core/
A Den.h170 #define MLX5E_KLM_MAX_ENTRIES_PER_WQE(wqe_size)\ argument
171 (((wqe_size) - sizeof(struct mlx5e_umr_wqe)) / sizeof(struct mlx5_klm))
173 #define MLX5E_KLM_ENTRIES_PER_WQE(wqe_size)\ argument
174 ALIGN_DOWN(MLX5E_KLM_MAX_ENTRIES_PER_WQE(wqe_size), MLX5_UMR_KLM_ALIGNMENT)

Completed in 130 milliseconds

12