/drivers/infiniband/ulp/ipoib/ |
D | ipoib_ib.c | 108 priv->rx_wr.wr_id = id | IPOIB_OP_RECV; in ipoib_ib_post_receive() 178 unsigned int wr_id = wc->wr_id & ~IPOIB_OP_RECV; in ipoib_ib_handle_rx_wc() local 184 wr_id, wc->status); in ipoib_ib_handle_rx_wc() 186 if (unlikely(wr_id >= ipoib_recvq_size)) { in ipoib_ib_handle_rx_wc() 188 wr_id, ipoib_recvq_size); in ipoib_ib_handle_rx_wc() 192 skb = priv->rx_ring[wr_id].skb; in ipoib_ib_handle_rx_wc() 198 wc->status, wr_id, wc->vendor_err); in ipoib_ib_handle_rx_wc() 199 ipoib_ud_dma_unmap_rx(priv, priv->rx_ring[wr_id].mapping); in ipoib_ib_handle_rx_wc() 201 priv->rx_ring[wr_id].skb = NULL; in ipoib_ib_handle_rx_wc() 212 memcpy(mapping, priv->rx_ring[wr_id].mapping, in ipoib_ib_handle_rx_wc() [all …]
|
D | ipoib_cm.c | 75 .wr_id = IPOIB_CM_RX_DRAIN_WRID, 99 priv->cm.rx_wr.wr_id = id | IPOIB_OP_CM | IPOIB_OP_RECV; in ipoib_cm_post_receive_srq() 125 wr->wr_id = id | IPOIB_OP_CM | IPOIB_OP_RECV; in ipoib_cm_post_receive_nonsrq() 564 unsigned int wr_id = wc->wr_id & ~(IPOIB_OP_CM | IPOIB_OP_RECV); in ipoib_cm_handle_rx_wc() local 574 wr_id, wc->status); in ipoib_cm_handle_rx_wc() 576 if (unlikely(wr_id >= ipoib_recvq_size)) { in ipoib_cm_handle_rx_wc() 577 if (wr_id == (IPOIB_CM_RX_DRAIN_WRID & ~(IPOIB_OP_CM | IPOIB_OP_RECV))) { in ipoib_cm_handle_rx_wc() 585 wr_id, ipoib_recvq_size); in ipoib_cm_handle_rx_wc() 594 skb = rx_ring[wr_id].skb; in ipoib_cm_handle_rx_wc() 599 wc->status, wr_id, wc->vendor_err); in ipoib_cm_handle_rx_wc() [all …]
|
/drivers/infiniband/ulp/iser/ |
D | iser_verbs.c | 1005 ib_conn->beacon.wr_id = ISER_BEACON_WRID; in iser_connect() 1059 rx_wr.wr_id = (uintptr_t)iser_conn->login_resp_buf; in iser_post_recvl() 1083 rx_wr->wr_id = (uintptr_t)rx_desc; in iser_post_recvm() 1120 wr->wr_id = (uintptr_t)tx_desc; in iser_post_send() 1146 is_iser_tx_desc(struct iser_conn *iser_conn, void *wr_id) in is_iser_tx_desc() argument 1151 if (wr_id >= start && wr_id < start + len) in is_iser_tx_desc() 1171 void *wr_id = (void *)(uintptr_t)wc->wr_id; in iser_handle_comp_error() local 1180 if (wc->wr_id == ISER_FASTREG_LI_WRID) in iser_handle_comp_error() 1183 if (is_iser_tx_desc(iser_conn, wr_id)) { in iser_handle_comp_error() 1184 struct iser_tx_desc *desc = wr_id; in iser_handle_comp_error() [all …]
|
D | iser_memory.c | 422 inv_wr->wr_id = ISER_FASTREG_LI_WRID; in iser_inv_rkey() 455 wr->wr.wr_id = ISER_FASTREG_LI_WRID; in iser_reg_sig_mr() 504 wr->wr.wr_id = ISER_FASTREG_LI_WRID; in iser_fast_reg_mr()
|
/drivers/infiniband/ulp/srpt/ |
D | ib_srpt.h | 144 static inline enum srpt_opcode opcode_from_wr_id(u64 wr_id) in opcode_from_wr_id() argument 146 return wr_id >> 32; in opcode_from_wr_id() 148 static inline u32 idx_from_wr_id(u64 wr_id) in idx_from_wr_id() argument 150 return (u32)wr_id; in idx_from_wr_id()
|
D | ib_srpt.c | 781 wr.wr_id = encode_wr_id(SRPT_RECV, ioctx->ioctx.index); in srpt_post_recv() 823 wr.wr_id = encode_wr_id(SRPT_SEND, ioctx->ioctx.index); in srpt_post_send() 1384 static void srpt_handle_send_err_comp(struct srpt_rdma_ch *ch, u64 wr_id) in srpt_handle_send_err_comp() argument 1392 index = idx_from_wr_id(wr_id); in srpt_handle_send_err_comp() 1903 index = idx_from_wr_id(wc->wr_id); in srpt_process_rcv_completion() 1941 index = idx_from_wr_id(wc->wr_id); in srpt_process_send_completion() 1942 opcode = opcode_from_wr_id(wc->wr_id); in srpt_process_send_completion() 1956 srpt_handle_send_err_comp(ch, wc->wr_id); in srpt_process_send_completion() 1988 if (opcode_from_wr_id(wc[i].wr_id) == SRPT_RECV) in srpt_process_completion() 2819 wr.wr.wr_id = encode_wr_id(i == n_rdma - 1 ? in srpt_perform_rdmas() [all …]
|
/drivers/staging/rdma/ehca/ |
D | ehca_reqs.c | 55 static u64 replace_wr_id(u64 wr_id, u16 idx) in replace_wr_id() argument 59 ret = wr_id & ~QMAP_IDX_MASK; in replace_wr_id() 65 static u16 get_app_wr_id(u64 wr_id) in get_app_wr_id() argument 67 return wr_id & QMAP_IDX_MASK; in get_app_wr_id() 87 wqe_p->work_request_id = replace_wr_id(recv_wr->wr_id, rq_map_idx); in ehca_write_rwqe() 121 "send_flags=%x opcode=%x", idx, ud_wr->wr.wr_id, in trace_ud_wr() 177 wqe_p->work_request_id = replace_wr_id(send_wr->wr_id, sq_map_idx); in ehca_write_swqe() 179 qmap_entry->app_wr_id = get_app_wr_id(send_wr->wr_id); in ehca_write_swqe() 561 qmap_entry->app_wr_id = get_app_wr_id(recv_wr->wr_id); in internal_post_recv() 744 wc->wr_id = replace_wr_id(cqe->work_request_id, qmap_entry->app_wr_id); in ehca_poll_cq_one() [all …]
|
/drivers/infiniband/hw/mlx4/ |
D | mad.c | 591 wr.wr.wr_id = ((u64) tun_tx_ix) | MLX4_TUN_SET_WRID_QPN(dest_qpt); in mlx4_ib_send_to_slave() 1180 recv_wr.wr_id = (u64) index | MLX4_TUN_WRID_RECV | in mlx4_ib_post_pv_qp_buf() 1295 wr.wr.wr_id = ((u64) wire_tx_ix) | MLX4_TUN_SET_WRID_QPN(src_qpnum); in mlx4_ib_send_to_wire() 1339 struct mlx4_ib_demux_pv_qp *tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc->wr_id)]; in mlx4_ib_multiplex_mad() 1340 int wr_ix = wc->wr_id & (MLX4_NUM_TUNNEL_BUFS - 1); in mlx4_ib_multiplex_mad() 1591 tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc.wr_id)]; in mlx4_ib_tunnel_comp_worker() 1597 wc.wr_id & in mlx4_ib_tunnel_comp_worker() 1601 "buf:%lld\n", wc.wr_id); in mlx4_ib_tunnel_comp_worker() 1606 wc.wr_id, wc.status); in mlx4_ib_tunnel_comp_worker() 1607 ib_destroy_ah(tun_qp->tx_ring[wc.wr_id & in mlx4_ib_tunnel_comp_worker() [all …]
|
D | cq.c | 624 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_qp_sw_comp() 757 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_poll_one() 762 wc->wr_id = srq->wrid[wqe_ctr]; in mlx4_ib_poll_one() 767 wc->wr_id = srq->wrid[wqe_ctr]; in mlx4_ib_poll_one() 772 wc->wr_id = wq->wrid[tail]; in mlx4_ib_poll_one()
|
/drivers/staging/rdma/ipath/ |
D | ipath_srq.c | 80 wqe->wr_id = wr->wr_id; in ipath_post_srq_receive() 290 p->wr_id = wqe->wr_id; in ipath_modify_srq()
|
D | ipath_ruc.c | 145 wc.wr_id = wqe->wr_id; in ipath_init_sge() 211 qp->r_wr_id = wqe->wr_id; in ipath_get_rwqe() 435 wc.wr_id = qp->r_wr_id; in ipath_ruc_loopback() 713 wc.wr_id = wqe->wr.wr_id; in ipath_send_complete()
|
D | ipath_ud.c | 148 wc.wr_id = wqe->wr_id; in ipath_ud_loopback() 556 wc.wr_id = qp->r_wr_id; in ipath_ud_rcv()
|
/drivers/staging/rdma/hfi1/ |
D | srq.c | 97 wqe->wr_id = wr->wr_id; in hfi1_post_srq_receive() 303 p->wr_id = wqe->wr_id; in hfi1_modify_srq()
|
D | ruc.c | 136 wc.wr_id = wqe->wr_id; in init_sge() 208 qp->r_wr_id = wqe->wr_id; in hfi1_get_rwqe() 577 wc.wr_id = qp->r_wr_id; in ruc_loopback() 924 wc.wr_id = wqe->wr.wr_id; in hfi1_send_complete()
|
/drivers/infiniband/hw/qib/ |
D | qib_srq.c | 80 wqe->wr_id = wr->wr_id; in qib_post_srq_receive() 286 p->wr_id = wqe->wr_id; in qib_modify_srq()
|
D | qib_ruc.c | 118 wc.wr_id = wqe->wr_id; in qib_init_sge() 190 qp->r_wr_id = wqe->wr_id; in qib_get_rwqe() 555 wc.wr_id = qp->r_wr_id; in qib_ruc_loopback() 797 wc.wr_id = wqe->wr.wr_id; in qib_send_complete()
|
/drivers/infiniband/ulp/isert/ |
D | ib_isert.c | 987 rx_wr->wr_id = (uintptr_t)rx_desc; in isert_post_recvm() 1009 rx_wr.wr_id = (uintptr_t)rx_desc; in isert_post_recv() 1032 send_wr.wr_id = (uintptr_t)tx_desc; in isert_post_send() 1102 send_wr->wr_id = (uintptr_t)&isert_cmd->tx_desc; in isert_init_send_wr() 1125 rx_wr.wr_id = (uintptr_t)isert_conn->login_req_buf; in isert_rdma_post_recvl() 2035 is_isert_tx_desc(struct isert_conn *isert_conn, void *wr_id) in is_isert_tx_desc() argument 2040 if ((wr_id >= start && wr_id < start + len) || in is_isert_tx_desc() 2041 (wr_id == isert_conn->login_req_buf)) in is_isert_tx_desc() 2050 if (wc->wr_id == ISER_BEACON_WRID) { in isert_cq_comp_err() 2054 } else if (is_isert_tx_desc(isert_conn, (void *)(uintptr_t)wc->wr_id)) { in isert_cq_comp_err() [all …]
|
/drivers/net/ethernet/ibm/ehea/ |
D | ehea_qmr.h | 101 u64 wr_id; member 140 u64 wr_id; /* work request ID */ member 163 u64 wr_id; /* work request ID from WQE */ member
|
D | ehea_main.c | 501 rwqe->wr_id = EHEA_BMASK_SET(EHEA_WR_ID_TYPE, wqe_type) in ehea_refill_rq_def() 578 int skb_index = EHEA_BMASK_GET(EHEA_WR_ID_INDEX, cqe->wr_id); in get_skb_by_index() 793 swqe->wr_id = SWQE_RESTART_CHECK; in check_sqs() 832 if (cqe->wr_id == SWQE_RESTART_CHECK) { in ehea_proc_cqes() 855 if (likely(EHEA_BMASK_GET(EHEA_WR_ID_TYPE, cqe->wr_id) in ehea_proc_cqes() 858 index = EHEA_BMASK_GET(EHEA_WR_ID_INDEX, cqe->wr_id); in ehea_proc_cqes() 864 swqe_av += EHEA_BMASK_GET(EHEA_WR_ID_REFILL, cqe->wr_id); in ehea_proc_cqes() 2078 swqe->wr_id = EHEA_BMASK_SET(EHEA_WR_ID_TYPE, EHEA_SWQE3_TYPE) in ehea_start_xmit() 2081 swqe->wr_id |= EHEA_BMASK_SET(EHEA_WR_ID_REFILL, in ehea_start_xmit() 2088 swqe->wr_id = in ehea_start_xmit() [all …]
|
/drivers/net/ethernet/cisco/enic/ |
D | vnic_wq.h | 61 uint64_t wr_id; /* Cookie */ member 146 buf->wr_id = wrid; in vnic_wq_post()
|
D | vnic_rq.h | 76 uint64_t wr_id; member 133 buf->wr_id = wrid; in vnic_rq_post()
|
/drivers/infiniband/hw/cxgb3/ |
D | iwch_qp.c | 282 qhp->wq.rq_size_log2)].wr_id = wr->wr_id; in build_rdma_recv() 345 qhp->wq.rq_size_log2)].wr_id = wr->wr_id; in build_zero_stag_recv() 437 sqp->wr_id = wr->wr_id; in iwch_post_send() 448 __func__, (unsigned long long) wr->wr_id, idx, in iwch_post_send() 512 "wqe %p \n", __func__, (unsigned long long) wr->wr_id, in iwch_post_receive() 591 sqp->wr_id = mw_bind->wr_id; in iwch_bind_mw()
|
/drivers/infiniband/hw/mlx5/ |
D | cq.c | 192 wc->wr_id = srq->wrid[wqe_ctr]; in handle_responder() 199 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in handle_responder() 479 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 505 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 513 wc->wr_id = srq->wrid[wqe_ctr]; in mlx5_poll_one() 517 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx5_poll_one()
|
/drivers/infiniband/ulp/srp/ |
D | ib_srp.c | 460 static struct ib_recv_wr wr = { .wr_id = SRP_LAST_WR_ID }; in srp_destroy_qp() 1062 .wr_id = LOCAL_INV_WR_ID_MASK, in srp_inv_rkey() 1367 wr.wr.wr_id = FAST_REG_WR_ID_MASK; in srp_map_finish_fr() 1766 wr.wr_id = (uintptr_t) iu; in srp_post_send() 1786 wr.wr_id = (uintptr_t) iu; in srp_post_recv() 1929 struct srp_iu *iu = (struct srp_iu *) (uintptr_t) wc->wr_id; in srp_handle_recv() 1995 static void srp_handle_qp_err(u64 wr_id, enum ib_wc_status wc_status, in srp_handle_qp_err() argument 2000 if (wr_id == SRP_LAST_WR_ID) { in srp_handle_qp_err() 2006 if (wr_id & LOCAL_INV_WR_ID_MASK) { in srp_handle_qp_err() 2010 } else if (wr_id & FAST_REG_WR_ID_MASK) { in srp_handle_qp_err() [all …]
|
/drivers/infiniband/core/ |
D | mad.c | 693 u64 wr_id, u16 slid, u16 pkey_index, u8 port_num, in build_smp_wc() argument 697 wc->wr_id = wr_id; in build_smp_wc() 835 send_wr->wr.wr_id, drslid, in handle_outgoing_dr_smp() 1042 mad_send_wr->send_wr.wr.wr_id = (unsigned long) mad_send_wr; in ib_create_send_mad() 1154 mad_send_wr->send_wr.wr.wr_id = (unsigned long)&mad_send_wr->mad_list; in ib_send_mad() 1988 mad_recv_wc->wc->wr_id = 0; in ib_mad_complete_recv() 2004 mad_recv_wc->wc->wr_id = (unsigned long) &mad_send_wr->send_buf; in ib_mad_complete_recv() 2192 mad_list = (struct ib_mad_list_head *)(unsigned long)wc->wr_id; in ib_mad_recv_done_handler() 2431 mad_list = (struct ib_mad_list_head *)(unsigned long)wc->wr_id; in ib_mad_send_done_handler() 2505 mad_list = (struct ib_mad_list_head *)(unsigned long)wc->wr_id; in mad_error_handler() [all …]
|