Home
last modified time | relevance | path

Searched defs:wqe (Results 1 – 25 of 67) sorted by relevance

123

/drivers/infiniband/sw/rxe/
Drxe_req.c45 struct rxe_send_wqe *wqe, in retry_first_write_send()
70 struct rxe_send_wqe *wqe; in req_retry() local
134 struct rxe_send_wqe *wqe = queue_head(qp->sq.queue); in req_next_wqe() local
317 static int next_opcode(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in next_opcode()
348 static inline int check_init_depth(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in check_init_depth()
379 struct rxe_send_wqe *wqe, in init_req_packet()
473 static int fill_packet(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in fill_packet()
518 struct rxe_send_wqe *wqe, in update_wqe_state()
530 struct rxe_send_wqe *wqe, in update_wqe_psn()
552 static void save_state(struct rxe_send_wqe *wqe, in save_state()
[all …]
Drxe_comp.c166 struct rxe_send_wqe *wqe; in get_wqe() local
198 struct rxe_send_wqe *wqe) in check_psn()
237 struct rxe_send_wqe *wqe) in check_ack()
368 struct rxe_send_wqe *wqe) in do_read()
387 struct rxe_send_wqe *wqe) in do_atomic()
403 static void make_send_cqe(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in make_send_cqe()
441 static void do_complete(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in do_complete()
467 struct rxe_send_wqe *wqe) in complete_ack()
513 struct rxe_send_wqe *wqe) in complete_wqe()
535 struct rxe_send_wqe *wqe; in rxe_drain_resp_pkts() local
[all …]
/drivers/infiniband/hw/cxgb3/
Diwch_qp.c42 static int build_rdma_send(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_send()
87 static int build_rdma_write(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_write()
128 static int build_rdma_read(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_read()
149 static int build_memreg(union t3_wr *wqe, struct ib_reg_wr *wr, in build_memreg()
192 static int build_inv_stag(union t3_wr *wqe, struct ib_send_wr *wr, in build_inv_stag()
248 static int build_rdma_recv(struct iwch_qp *qhp, union t3_wr *wqe, in build_rdma_recv()
288 static int build_zero_stag_recv(struct iwch_qp *qhp, union t3_wr *wqe, in build_zero_stag_recv()
360 union t3_wr *wqe; in iwch_post_send() local
472 union t3_wr *wqe; in iwch_post_receive() local
663 union t3_wr *wqe; in iwch_post_zb_read() local
[all …]
/drivers/infiniband/hw/i40iw/
Di40iw_ctrl.c51 void i40iw_insert_wqe_hdr(u64 *wqe, u64 header) in i40iw_insert_wqe_hdr()
600 u64 *wqe = NULL; in i40iw_sc_cqp_get_next_send_wqe_idx() local
828 u64 *wqe; in i40iw_sc_manage_push_page() local
870 u64 *wqe; in i40iw_sc_manage_hmc_pm_func_table() local
908 u64 *wqe; in i40iw_sc_set_hmc_resource_profile() local
981 u64 *wqe; in i40iw_sc_commit_fpm_values() local
1043 u64 *wqe; in i40iw_sc_query_fpm_values() local
1093 u64 *wqe; in i40iw_sc_add_arp_cache_entry() local
1139 u64 *wqe; in i40iw_sc_del_arp_cache_entry() local
1172 u64 *wqe; in i40iw_sc_query_arp_cache_entry() local
[all …]
Di40iw_uk.c49 u64 header, *wqe; in i40iw_nop_1() local
141 u64 *wqe = NULL; in i40iw_qp_get_next_send_wqe() local
205 static void i40iw_set_fragment(u64 *wqe, u32 offset, struct i40iw_sge *sge) in i40iw_set_fragment()
222 u64 *wqe = NULL; in i40iw_qp_get_next_recv_wqe() local
250 u64 *wqe; in i40iw_rdma_write() local
319 u64 *wqe; in i40iw_rdma_read() local
367 u64 *wqe; in i40iw_send() local
426 u64 *wqe; in i40iw_inline_rdma_write() local
502 u64 *wqe; in i40iw_inline_send() local
573 u64 *wqe; in i40iw_stag_local_invalidate() local
[all …]
Di40iw_puda.c112 u64 *wqe; in i40iw_puda_post_recvbuf() local
208 u64 *wqe = NULL; in i40iw_puda_get_next_send_wqe() local
374 u64 *wqe; in i40iw_puda_send() local
519 u64 *wqe; in i40iw_puda_qp_wqe() local
628 u64 *wqe; in i40iw_puda_cq_wqe() local
989 u64 *wqe; in i40iw_ilq_putback_rcvbuf() local
Di40iw_vf.c56 u64 *wqe; in i40iw_manage_vf_pble_bp() local
/drivers/infiniband/hw/qib/
Dqib_rc.c42 static u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe, in restart_sge()
226 struct rvt_swqe *wqe; in qib_make_rc_req() local
751 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); in reset_psn() local
835 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_restart_rc() local
871 struct rvt_swqe *wqe; in reset_sending_psn() local
897 struct rvt_swqe *wqe; in qib_rc_send_complete() local
973 struct rvt_swqe *wqe, in do_rc_completion()
1042 struct rvt_swqe *wqe; in do_rc_ack() local
1257 struct rvt_swqe *wqe; in rdma_seq_err() local
1306 struct rvt_swqe *wqe; in qib_rc_rcv_resp() local
Dqib_ruc.c44 static int qib_init_sge(struct rvt_qp *qp, struct rvt_rwqe *wqe) in qib_init_sge()
109 struct rvt_rwqe *wqe; in qib_get_rwqe() local
341 struct rvt_swqe *wqe; in qib_ruc_loopback() local
776 void qib_send_complete(struct rvt_qp *qp, struct rvt_swqe *wqe, in qib_send_complete()
Dqib_qp.c390 struct rvt_swqe *wqe) in qib_check_send_wqe()
429 struct rvt_swqe *wqe; in qib_qp_iter_print() local
/drivers/infiniband/hw/cxgb4/
Dqp.c458 static int build_rdma_send(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_send()
525 static int build_rdma_write(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_write()
568 static int build_rdma_read(union t4_wr *wqe, struct ib_send_wr *wr, u8 *len16) in build_rdma_read()
597 static int build_rdma_recv(struct c4iw_qp *qhp, union t4_recv_wr *wqe, in build_rdma_recv()
644 static int build_memreg(struct t4_sq *sq, union t4_wr *wqe, in build_memreg()
709 static int build_inv_stag(union t4_wr *wqe, struct ib_send_wr *wr, u8 *len16) in build_inv_stag()
929 union t4_wr *wqe = NULL; in c4iw_post_send() local
1078 union t4_recv_wr *wqe = NULL; in c4iw_post_receive() local
1293 struct fw_ri_wr *wqe; in post_terminate() local
1424 struct fw_ri_wr *wqe; in rdma_fini() local
[all …]
/drivers/infiniband/hw/hfi1/
Drc.c60 static u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe, in restart_sge()
264 struct rvt_swqe *wqe; in hfi1_make_rc_req() local
955 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); in reset_psn() local
1041 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in hfi1_restart_rc() local
1081 struct rvt_swqe *wqe; in reset_sending_psn() local
1109 struct rvt_swqe *wqe; in hfi1_rc_send_complete() local
1204 struct rvt_swqe *wqe, in do_rc_completion()
1293 struct rvt_swqe *wqe; in do_rc_ack() local
1509 struct rvt_swqe *wqe; in rdma_seq_err() local
1551 struct rvt_swqe *wqe; in rc_rcv_resp() local
Druc.c60 static int init_sge(struct rvt_qp *qp, struct rvt_rwqe *wqe) in init_sge()
125 struct rvt_rwqe *wqe; in hfi1_rvt_get_rwqe() local
334 struct rvt_swqe *wqe; in ruc_loopback() local
1102 void hfi1_send_complete(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_send_complete()
Dud.c286 static void hfi1_make_bth_deth(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_make_bth_deth()
323 struct rvt_swqe *wqe) in hfi1_make_ud_req_9B()
393 struct rvt_swqe *wqe) in hfi1_make_ud_req_16B()
477 struct rvt_swqe *wqe; in hfi1_make_ud_req() local
/drivers/net/ethernet/mellanox/mlx5/core/
Den_rx.c256 static int mlx5e_alloc_rx_wqe(struct mlx5e_rq *rq, struct mlx5e_rx_wqe *wqe, u16 ix) in mlx5e_alloc_rx_wqe()
354 struct mlx5e_umr_wqe *wqe; in mlx5e_post_umr_wqe() local
422 struct mlx5e_rx_wqe *wqe = mlx5_wq_ll_get_wqe(wq, wq->head); in mlx5e_post_rx_mpwqe() local
467 struct mlx5e_rx_wqe *wqe = mlx5_wq_ll_get_wqe(wq, wq->head); in mlx5e_post_rx_wqes() local
758 struct mlx5e_tx_wqe *wqe; in mlx5e_xmit_xdp_doorbell() local
773 struct mlx5e_tx_wqe *wqe = mlx5_wq_cyc_get_wqe(wq, pi); in mlx5e_xmit_xdp_frame() local
922 struct mlx5e_rx_wqe *wqe; in mlx5e_handle_rx_cqe() local
965 struct mlx5e_rx_wqe *wqe; in mlx5e_handle_rx_cqe_rep() local
1047 struct mlx5e_rx_wqe *wqe = mlx5_wq_ll_get_wqe(&rq->wq, wqe_id); in mlx5e_handle_rx_cqe_mpwrq() local
1268 struct mlx5e_rx_wqe *wqe; in mlx5i_handle_rx_cqe() local
[all …]
Den_tx.c312 struct mlx5e_tx_wqe *wqe, u16 pi) in mlx5e_sq_xmit()
382 struct mlx5e_tx_wqe *wqe = mlx5_wq_cyc_get_wqe(wq, pi); in mlx5e_xmit() local
552 struct mlx5i_tx_wqe *wqe = mlx5_wq_cyc_get_wqe(wq, pi); in mlx5i_sq_xmit() local
/drivers/infiniband/hw/bnxt_re/
Dib_verbs.c414 struct bnxt_qplib_swqe *wqe = &fence->bind_wqe; in bnxt_re_create_fence_wqe() local
442 struct bnxt_qplib_swqe wqe; in bnxt_re_bind_fence_mw() local
1634 struct bnxt_qplib_swqe *wqe, in bnxt_re_build_qp1_send_v2()
1819 struct bnxt_qplib_swqe *wqe, in bnxt_re_build_qp1_shadow_qp_recv()
1864 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_send_wqe()
1902 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_rdma_wqe()
1934 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_atomic_wqe()
1961 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_inv_wqe()
1980 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_reg_wqe()
2022 struct bnxt_qplib_swqe *wqe) in bnxt_re_copy_inline_data()
[all …]
/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_cmdq.c63 #define CMDQ_WQE_HEADER(wqe) ((struct hinic_cmdq_header *)(wqe)) argument
191 static void cmdq_prepare_wqe_ctrl(struct hinic_cmdq_wqe *wqe, int wrapped, in cmdq_prepare_wqe_ctrl()
248 static void cmdq_set_direct_wqe_data(struct hinic_cmdq_direct_wqe *wqe, in cmdq_set_direct_wqe_data()
257 static void cmdq_set_lcmd_wqe(struct hinic_cmdq_wqe *wqe, in cmdq_set_lcmd_wqe()
285 static void cmdq_set_direct_wqe(struct hinic_cmdq_wqe *wqe, in cmdq_set_direct_wqe()
541 struct hinic_cmdq_wqe *wqe) in clear_wqe_complete_bit()
575 struct hinic_cmdq_wqe *wqe) in cmdq_arm_ceq_handler()
Dhinic_hw_wq.c88 #define WQE_IN_RANGE(wqe, start, end) \ argument
92 #define WQE_SHADOW_PAGE(wq, wqe) \ argument
848 static inline bool wqe_shadow(struct hinic_wq *wq, struct hinic_hw_wqe *wqe) in wqe_shadow()
862 void hinic_write_wqe(struct hinic_wq *wq, struct hinic_hw_wqe *wqe, in hinic_write_wqe()
/drivers/infiniband/hw/mthca/
Dmthca_srq.c90 static inline int *wqe_to_link(void *wqe) in wqe_to_link()
151 void *wqe; in mthca_alloc_srq_buf() local
487 void *wqe; in mthca_tavor_post_srq_recv() local
586 void *wqe; in mthca_arbel_post_srq_recv() local
Dmthca_cq.c126 __be32 wqe; member
140 __be32 wqe; member
546 u32 wqe = be32_to_cpu(cqe->wqe); in mthca_poll_one() local
552 s32 wqe; in mthca_poll_one() local
/drivers/scsi/lpfc/
Dlpfc_nvme.c261 union lpfc_wqe *wqe; in lpfc_nvme_gen_req() local
528 struct lpfc_iocbq *wqe, *next_wqe; in lpfc_nvme_ls_abort() local
585 union lpfc_wqe128 *wqe; in lpfc_nvme_adj_fcp_sgls() local
957 union lpfc_wqe128 *wqe = (union lpfc_wqe128 *)&pwqeq->wqe; in lpfc_nvme_prep_io_cmd() local
1096 union lpfc_wqe128 *wqe = (union lpfc_wqe128 *)&lpfc_ncmd->cur_iocbq.wqe; in lpfc_nvme_prep_io_dma() local
1921 union lpfc_wqe128 *wqe; in lpfc_new_nvme_buf() local
/drivers/infiniband/sw/rdmavt/
Dqp.c443 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_last); in rvt_clear_mr_refs() local
479 static bool rvt_swqe_has_lkey(struct rvt_swqe *wqe, u32 lkey) in rvt_swqe_has_lkey()
502 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, s_last); in rvt_qp_sends_has_lkey() local
1557 struct rvt_rwqe *wqe; in rvt_post_recv() local
1724 struct rvt_swqe *wqe; in rvt_post_one_wr() local
1960 struct rvt_rwqe *wqe; in rvt_post_srq_recv() local
/drivers/infiniband/hw/mlx5/
Dodp.c782 struct mlx5_ib_qp *qp, void *wqe, in pagefault_data_segments()
868 struct mlx5_ib_qp *qp, void **wqe, void **wqe_end, int wqe_length) in mlx5_ib_mr_initiator_pfault_handler()
971 struct mlx5_ib_qp *qp, void **wqe, void **wqe_end, int wqe_length) in mlx5_ib_mr_responder_pfault_handler()
1026 void *wqe, *wqe_end; in mlx5_ib_mr_wqe_pfault_handler() local
/drivers/scsi/bnx2fc/
Dbnx2fc_hwi.c626 static void bnx2fc_process_unsol_compl(struct bnx2fc_rport *tgt, u16 wqe) in bnx2fc_process_unsol_compl()
873 void bnx2fc_process_cq_compl(struct bnx2fc_rport *tgt, u16 wqe) in bnx2fc_process_cq_compl()
998 static struct bnx2fc_work *bnx2fc_alloc_work(struct bnx2fc_rport *tgt, u16 wqe) in bnx2fc_alloc_work()
1012 static void bnx2fc_pending_work(struct bnx2fc_rport *tgt, unsigned int wqe) in bnx2fc_pending_work()
1040 u16 wqe; in bnx2fc_process_new_cqes() local

123