Home
last modified time | relevance | path

Searched refs:wqe (Results 1 – 25 of 122) sorted by relevance

12345

/linux/fs/
A Dio-wq.c183 struct io_wqe *wqe = worker->wqe; in io_worker_cancel_cb() local
207 struct io_wqe *wqe = worker->wqe; in io_worker_exit() local
301 return create_io_worker(wqe->wq, wqe, acct->index); in io_wqe_create_worker()
320 wqe = worker->wqe; in create_worker_cb()
343 struct io_wqe *wqe = worker->wqe; in io_queue_worker_create() local
390 struct io_wqe *wqe = worker->wqe; in io_wqe_dec_running() local
464 struct io_wqe *wqe = worker->wqe; in io_get_next_work() local
543 struct io_wqe *wqe = worker->wqe; in io_worker_handle_work() local
613 struct io_wqe *wqe = worker->wqe; in io_wqe_worker() local
756 wqe = worker->wqe; in create_worker_cont()
[all …]
/linux/drivers/infiniband/sw/rxe/
A Drxe_req.c28 wqe->wr.opcode); in retry_first_write_send()
37 wqe->iova += qp->mtu; in retry_first_write_send()
77 wqe->dma.resid = wqe->dma.length; in req_retry()
78 wqe->dma.cur_sge = 0; in req_retry()
92 npsn = (wqe->dma.length - wqe->dma.resid) / in req_retry()
175 wqe->mask = wr_opcode_mask(wqe->wr.opcode, qp); in req_next_wqe()
176 return wqe; in req_next_wqe()
389 pkt->wqe = wqe; in init_req_packet()
468 u8 *tmp = &wqe->dma.inline_data[wqe->dma.sge_offset]; in finish_packet()
651 if (unlikely(!wqe)) in rxe_requester()
[all …]
A Drxe_comp.c140 struct rxe_send_wqe *wqe; in get_wqe() local
146 *wqe_p = wqe; in get_wqe()
149 if (!wqe || wqe->state == wqe_state_posted) in get_wqe()
237 (wqe->first_psn == wqe->last_psn && in check_ack()
463 if (wqe->has_rd_atomic) { in complete_ack()
464 wqe->has_rd_atomic = 0; in complete_ack()
495 do_complete(qp, wqe); in complete_ack()
519 do_complete(qp, wqe); in complete_wqe()
539 do_complete(qp, wqe); in rxe_drain_resp_pkts()
693 if (!wqe || (wqe->state == wqe_state_posted)) { in rxe_completer()
[all …]
A Drxe_mw.c74 u32 key = wqe->wr.wr.mw.rkey & 0xff; in rxe_check_bind_mw()
152 ((wqe->wr.wr.mw.addr + wqe->wr.wr.mw.length) > in rxe_check_bind_mw()
166 u32 key = wqe->wr.wr.mw.rkey & 0xff; in rxe_do_bind_mw()
169 mw->access = wqe->wr.wr.mw.access; in rxe_do_bind_mw()
171 mw->addr = wqe->wr.wr.mw.addr; in rxe_do_bind_mw()
172 mw->length = wqe->wr.wr.mw.length; in rxe_do_bind_mw()
198 u32 mw_rkey = wqe->wr.wr.mw.mw_rkey; in rxe_bind_mw()
199 u32 mr_lkey = wqe->wr.wr.mw.mr_lkey; in rxe_bind_mw()
213 if (likely(wqe->wr.wr.mw.length)) { in rxe_bind_mw()
230 ret = rxe_check_bind_mw(qp, wqe, mw, mr); in rxe_bind_mw()
[all …]
/linux/drivers/infiniband/sw/siw/
A Dsiw_qp_tx.c562 (data_len != 0 || wqe->processed < wqe->bytes)) { in siw_tx_hdt()
619 if (data_len > 0 && wqe->processed < wqe->bytes) { in siw_tx_hdt()
720 cpu_to_be64(wqe->sqe.raddr + wqe->processed); in siw_prepare_fpdu()
722 data_len = wqe->bytes - wqe->processed; in siw_prepare_fpdu()
826 wqe->bytes = 0; in siw_qp_sq_proc_tx()
829 wqe->bytes = wqe->sqe.sge[0].length; in siw_qp_sq_proc_tx()
857 tx_type(wqe), wqe->wr_status, wqe->bytes, wqe->processed, in siw_qp_sq_proc_tx()
858 wqe->sqe.id); in siw_qp_sq_proc_tx()
882 wqe->processed = wqe->bytes; in siw_qp_sq_proc_tx()
1060 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes, in siw_qp_sq_process()
[all …]
A Dsiw_qp.c272 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_qp_mpa_rts()
891 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_activate_tx_from_sq()
911 wqe->sqe.sge[0].laddr = (uintptr_t)&wqe->sqe.sge[1]; in siw_activate_tx_from_sq()
998 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_activate_tx()
1213 tx_type(wqe), wqe->wr_status); in siw_sq_flush()
1215 siw_wqe_put_mem(wqe, tx_type(wqe)); in siw_sq_flush()
1225 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes, in siw_sq_flush()
1273 rx_type(wqe), wqe->wr_status); in siw_rq_flush()
1275 siw_wqe_put_mem(wqe, rx_type(wqe)); in siw_rq_flush()
1278 siw_rqe_complete(qp, &wqe->rqe, wqe->bytes, in siw_rq_flush()
[all …]
A Dsiw_qp_rx.c204 (wqe->processed + srx->fpdu_part_rem != wqe->bytes))) { in siw_rresp_check_ntoh()
207 wqe->processed + srx->fpdu_part_rem, wqe->bytes); in siw_rresp_check_ntoh()
316 if (unlikely(wqe->bytes < wqe->processed + srx->fpdu_part_rem)) { in siw_send_check_ntoh()
318 wqe->bytes, wqe->processed, srx->fpdu_part_rem); in siw_send_check_ntoh()
360 wqe->bytes = 0; in siw_rqe_get()
370 wqe->bytes += wqe->rqe.sge[i].length; in siw_rqe_get()
405 return wqe; in siw_rqe_get()
798 qp_id(qp), wqe->wr_status, wqe->sqe.opcode); in siw_proc_rresp()
1254 rv = siw_rqe_complete(qp, &wqe->rqe, wqe->processed, in siw_rdmap_complete()
1258 rv = siw_rqe_complete(qp, &wqe->rqe, wqe->processed, in siw_rdmap_complete()
[all …]
/linux/drivers/infiniband/sw/rdmavt/
A Dtrace_tx.h50 TP_ARGS(qp, wqe, wr_num_sge),
73 __entry->wqe = wqe;
77 __entry->psn = wqe->psn;
78 __entry->lpsn = wqe->lpsn;
79 __entry->length = wqe->length;
86 __entry->ssn = wqe->ssn;
94 __entry->wqe,
117 TP_ARGS(qp, wqe, idx),
132 __entry->wqe = wqe;
138 __entry->ssn = wqe->ssn;
[all …]
A Dqp.c981 struct rvt_swqe *wqe; in free_ud_wq_attr() local
1001 struct rvt_swqe *wqe; in alloc_ud_wq_attr() local
1006 wqe->ud_wr.attr = kzalloc_node(sizeof(*wqe->ud_wr.attr), in alloc_ud_wq_attr()
1966 struct rvt_swqe *wqe; in rvt_post_one_wr() local
2045 wqe->length = 0; in rvt_post_one_wr()
2087 wqe->ssn = 0; in rvt_post_one_wr()
2088 wqe->psn = 0; in rvt_post_one_wr()
2089 wqe->lpsn = 0; in rvt_post_one_wr()
2093 wqe->lpsn = wqe->psn + in rvt_post_one_wr()
2094 (wqe->length ? in rvt_post_one_wr()
[all …]
/linux/drivers/infiniband/hw/irdma/
A Duda.c23 __le64 *wqe; in irdma_sc_access_ah() local
27 if (!wqe) in irdma_sc_access_ah()
41 set_64bit_val(wqe, 40, in irdma_sc_access_ah()
44 set_64bit_val(wqe, 32, in irdma_sc_access_ah()
48 set_64bit_val(wqe, 56, in irdma_sc_access_ah()
51 set_64bit_val(wqe, 48, in irdma_sc_access_ah()
55 set_64bit_val(wqe, 32, in irdma_sc_access_ah()
68 wqe, 24, in irdma_sc_access_ah()
122 __le64 *wqe; in irdma_access_mcast_grp() local
131 if (!wqe) { in irdma_access_mcast_grp()
[all …]
A Duk.c62 __le64 *wqe; in irdma_nop_1() local
97 __le64 *wqe; in irdma_clr_wqes() local
237 return wqe; in irdma_qp_get_next_send_wqe()
262 return wqe; in irdma_qp_get_next_recv_wqe()
307 if (!wqe) in irdma_uk_rdma_write()
402 if (!wqe) in irdma_uk_rdma_read()
492 if (!wqe) in irdma_uk_send()
701 if (!wqe) in irdma_uk_inline_rdma_write()
768 if (!wqe) in irdma_uk_inline_send()
837 if (!wqe) in irdma_uk_stag_local_invalidate()
[all …]
A Dctrl.c160 __le64 *wqe; in irdma_sc_add_arp_cache_entry() local
164 if (!wqe) in irdma_sc_add_arp_cache_entry()
201 if (!wqe) in irdma_sc_del_arp_cache_entry()
236 if (!wqe) in irdma_sc_manage_apbvt_entry()
287 if (!wqe) in irdma_sc_manage_qhash_table_entry()
438 if (!wqe) in irdma_sc_qp_create()
488 if (!wqe) in irdma_sc_qp_modify()
557 if (!wqe) in irdma_sc_qp_destroy()
748 if (!wqe) in irdma_sc_alloc_local_mac_entry()
784 if (!wqe) in irdma_sc_add_local_mac_entry()
[all …]
/linux/drivers/infiniband/hw/hfi1/
A Drc.c723 wqe->psn, wqe->lpsn, in hfi1_make_rc_req()
763 wqe->psn, wqe->lpsn, in hfi1_make_rc_req()
1034 wqe->psn, wqe->lpsn, req); in hfi1_make_rc_req()
1089 wqe->psn, wqe->lpsn, req); in hfi1_make_rc_req()
1132 wqe->psn, wqe->lpsn, req); in hfi1_make_rc_req()
1591 wqe = do_rc_completion(qp, wqe, ibp); in hfi1_restart_rc()
1796 wqe, in hfi1_rc_send_complete()
1842 wqe, in do_rc_completion()
1902 return wqe; in do_rc_completion()
2064 wqe = do_rc_completion(qp, wqe, ibp); in do_rc_ack()
[all …]
A Duc.c26 struct rvt_swqe *wqe; in hfi1_make_uc_req() local
111 qp->s_psn = wqe->psn; in hfi1_make_uc_req()
112 qp->s_sge.sge = wqe->sg_list[0]; in hfi1_make_uc_req()
115 qp->s_sge.total_len = wqe->length; in hfi1_make_uc_req()
116 len = wqe->length; in hfi1_make_uc_req()
118 switch (wqe->wr.opcode) { in hfi1_make_uc_req()
137 qp->s_wqe = wqe; in hfi1_make_uc_req()
147 cpu_to_be32(wqe->rdma_wr.rkey); in hfi1_make_uc_req()
166 qp->s_wqe = wqe; in hfi1_make_uc_req()
196 qp->s_wqe = wqe; in hfi1_make_uc_req()
[all …]
A Dtid_rdma.h214 void __trdma_clean_swqe(struct rvt_qp *qp, struct rvt_swqe *wqe);
223 if (!wqe->priv) in trdma_clean_swqe()
225 __trdma_clean_swqe(qp, wqe); in trdma_clean_swqe()
244 u32 hfi1_build_tid_rdma_read_packet(struct rvt_swqe *wqe,
265 struct rvt_swqe *wqe) in hfi1_setup_tid_rdma_wqe() argument
267 if (wqe->priv && in hfi1_setup_tid_rdma_wqe()
268 (wqe->wr.opcode == IB_WR_RDMA_READ || in hfi1_setup_tid_rdma_wqe()
269 wqe->wr.opcode == IB_WR_RDMA_WRITE) && in hfi1_setup_tid_rdma_wqe()
270 wqe->length >= TID_RDMA_MIN_SEGMENT_SIZE) in hfi1_setup_tid_rdma_wqe()
271 setup_tid_rdma_wqe(qp, wqe); in hfi1_setup_tid_rdma_wqe()
[all …]
A Dtid_rdma.c2708 wqe); in handle_read_kdeth_eflags()
2728 wqe = do_rc_completion(qp, wqe, ibp); in handle_read_kdeth_eflags()
2808 wqe); in handle_read_kdeth_eflags()
3064 wqe->psn, wqe->lpsn, in hfi1_tid_rdma_restart_req()
3335 wqe->lpsn = wqe->psn; in setup_tid_rdma_wqe()
3339 wqe->lpsn += rvt_div_round_up_mtu(qp, wqe->length) - 1; in setup_tid_rdma_wqe()
3357 wqe->psn, wqe->lpsn, in setup_tid_rdma_wqe()
3386 cpu_to_be64(wqe->rdma_wr.remote_addr + (wqe->length - *len)); in hfi1_build_tid_rdma_write_req()
4564 wqe = do_rc_completion(qp, wqe, in hfi1_rc_rcv_tid_rdma_ack()
4824 qp, 0, wqe->wr.opcode, wqe->psn, wqe->lpsn, req); in hfi1_tid_retry_timeout()
[all …]
/linux/drivers/infiniband/hw/qib/
A Dqib_rc.c222 struct rvt_swqe *wqe; in qib_make_rc_req() local
310 len = wqe->length; in qib_make_rc_req()
859 struct rvt_swqe *wqe; in reset_sending_psn() local
885 struct rvt_swqe *wqe; in qib_rc_send_complete() local
924 wqe, in qib_rc_send_complete()
963 wqe, in do_rc_completion()
993 return wqe; in do_rc_completion()
1012 struct rvt_swqe *wqe; in do_rc_ack() local
1099 wqe = do_rc_completion(qp, wqe, ibp); in do_rc_ack()
1227 struct rvt_swqe *wqe; in rdma_seq_err() local
[all …]
A Dqib_uc.c53 struct rvt_swqe *wqe; in qib_make_uc_req() local
98 qp->s_psn = wqe->psn; in qib_make_uc_req()
99 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_uc_req()
103 len = wqe->length; in qib_make_uc_req()
105 switch (wqe->wr.opcode) { in qib_make_uc_req()
124 qp->s_wqe = wqe; in qib_make_uc_req()
134 cpu_to_be32(wqe->rdma_wr.rkey); in qib_make_uc_req()
153 qp->s_wqe = wqe; in qib_make_uc_req()
172 if (wqe->wr.opcode == IB_WR_SEND) in qib_make_uc_req()
182 qp->s_wqe = wqe; in qib_make_uc_req()
[all …]
A Dqib_ud.c238 struct rvt_swqe *wqe; in qib_make_ud_req() local
275 ah_attr = rvt_get_swqe_ah_attr(wqe); in qib_make_ud_req()
300 qib_ud_loopback(qp, wqe); in qib_make_ud_req()
309 extra_bytes = -wqe->length & 3; in qib_make_ud_req()
314 qp->s_cur_size = wqe->length; in qib_make_ud_req()
317 qp->s_wqe = wqe; in qib_make_ud_req()
318 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_ud_req()
319 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_ud_req()
320 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_ud_req()
321 qp->s_sge.total_len = wqe->length; in qib_make_ud_req()
[all …]
/linux/drivers/infiniband/hw/cxgb4/
A Dqp.c521 wqe->send.r3 = 0; in build_rdma_send()
522 wqe->send.r4 = 0; in build_rdma_send()
684 wqe->read.r2 = 0; in build_rdma_read()
685 wqe->read.r5 = 0; in build_rdma_read()
779 ret = build_isgl((__be64 *)wqe, (__be64 *)(wqe + 1), in build_srq_recv()
888 wqe->inv.r2 = 0; in build_inv_stag()
1353 memcpy(&pwr->wqe, wqe, len16 * 16); in defer_srq_wr()
1381 wqe = &lwqe; in c4iw_post_srq_recv()
1576 wqe = __skb_put_zero(skb, sizeof(*wqe)); in post_terminate()
1707 wqe = __skb_put_zero(skb, sizeof(*wqe)); in rdma_fini()
[all …]
/linux/drivers/scsi/lpfc/
A Dlpfc_nvmet.c80 union lpfc_wqe128 *wqe; in lpfc_nvmet_cmd_template() local
1488 union lpfc_wqe128 *wqe; in lpfc_nvmet_setup_io_context() local
1586 wqe = &nvmewqe->wqe; in lpfc_nvmet_setup_io_context()
2582 union lpfc_wqe128 *wqe; in lpfc_nvmet_prep_ls_wqe() local
2620 wqe = &nvmewqe->wqe; in lpfc_nvmet_prep_ls_wqe()
2707 union lpfc_wqe128 *wqe; in lpfc_nvmet_prep_fcp_wqe() local
2765 wqe = &nvmewqe->wqe; in lpfc_nvmet_prep_fcp_wqe()
2778 memcpy(&wqe->words[7], in lpfc_nvmet_prep_fcp_wqe()
2864 memcpy(&wqe->words[3], in lpfc_nvmet_prep_fcp_wqe()
3343 union lpfc_wqe128 *wqe = &pwqeq->wqe; in lpfc_nvmet_prep_abort_wqe() local
[all …]
A Dlpfc_nvme.c388 union lpfc_wqe128 *wqe; in lpfc_nvme_gen_req() local
399 wqe = &genwqe->wqe; in lpfc_nvme_gen_req()
818 union lpfc_wqe128 *wqe; in lpfc_nvme_adj_fcp_sgls() local
827 wqe = &lpfc_ncmd->cur_iocbq.wqe; in lpfc_nvme_adj_fcp_sgls()
1188 union lpfc_wqe128 *wqe = &pwqeq->wqe; in lpfc_nvme_prep_io_cmd() local
1198 memcpy(&wqe->words[7], in lpfc_nvme_prep_io_cmd()
1221 memcpy(&wqe->words[7], in lpfc_nvme_prep_io_cmd()
1295 union lpfc_wqe128 *wqe = &lpfc_ncmd->cur_iocbq.wqe; in lpfc_nvme_prep_io_dma() local
1429 &wqe->words[13]; in lpfc_nvme_prep_io_dma()
1967 union lpfc_wqe128 *wqe; in lpfc_get_nvme_buf() local
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
A Dib_verbs.c395 memset(wqe, 0, sizeof(*wqe)); in bnxt_re_create_fence_wqe()
424 memcpy(&wqe, fence_wqe, sizeof(wqe)); in bnxt_re_bind_fence_mw()
1762 wqe.wr_id = wr->wr_id; in bnxt_re_post_srq_recv()
2241 wqe->sg_list[i].addr = wqe->sg_list[i - 1].addr; in bnxt_re_build_qp1_send_v2()
2242 wqe->sg_list[i].lkey = wqe->sg_list[i - 1].lkey; in bnxt_re_build_qp1_send_v2()
2243 wqe->sg_list[i].size = wqe->sg_list[i - 1].size; in bnxt_re_build_qp1_send_v2()
2270 wqe->num_sge++; in bnxt_re_build_qp1_send_v2()
2612 memset(&wqe, 0, sizeof(wqe)); in bnxt_re_post_send()
2706 memset(&wqe, 0, sizeof(wqe)); in bnxt_re_post_recv_shadow_qp()
2709 memset(&wqe, 0, sizeof(wqe)); in bnxt_re_post_recv_shadow_qp()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
A Dktls_txrx.c55 mlx5e_ktls_build_static_params(struct mlx5e_set_tls_static_params_wqe *wqe, in mlx5e_ktls_build_static_params() argument
61 struct mlx5_wqe_umr_ctrl_seg *ucseg = &wqe->uctrl; in mlx5e_ktls_build_static_params()
62 struct mlx5_wqe_ctrl_seg *cseg = &wqe->ctrl; in mlx5e_ktls_build_static_params()
67 #define STATIC_PARAMS_DS_CNT DIV_ROUND_UP(sizeof(*wqe), MLX5_SEND_WQE_DS) in mlx5e_ktls_build_static_params()
78 fill_static_params(&wqe->params, info, key_id, resync_tcp_sn); in mlx5e_ktls_build_static_params()
98 mlx5e_ktls_build_progress_params(struct mlx5e_set_tls_progress_params_wqe *wqe, in mlx5e_ktls_build_progress_params() argument
104 struct mlx5_wqe_ctrl_seg *cseg = &wqe->ctrl; in mlx5e_ktls_build_progress_params()
109 #define PROGRESS_PARAMS_DS_CNT DIV_ROUND_UP(sizeof(*wqe), MLX5_SEND_WQE_DS) in mlx5e_ktls_build_progress_params()
117 fill_progress_params(&wqe->params, tis_tir_num, next_record_tcp_sn); in mlx5e_ktls_build_progress_params()
/linux/drivers/infiniband/hw/mthca/
A Dmthca_srq.c158 void *wqe; in mthca_alloc_srq_buf() local
185 next = wqe = get_wqe(srq, i); in mthca_alloc_srq_buf()
188 *wqe_to_link(wqe) = i + 1; in mthca_alloc_srq_buf()
191 *wqe_to_link(wqe) = -1; in mthca_alloc_srq_buf()
495 void *wqe; in mthca_tavor_post_srq_recv() local
504 wqe = get_wqe(srq, ind); in mthca_tavor_post_srq_recv()
505 next_ind = *wqe_to_link(wqe); in mthca_tavor_post_srq_recv()
515 srq->last = wqe; in mthca_tavor_post_srq_recv()
588 void *wqe; in mthca_arbel_post_srq_recv() local
594 wqe = get_wqe(srq, ind); in mthca_arbel_post_srq_recv()
[all …]

Completed in 885 milliseconds

12345