/drivers/net/ethernet/cisco/enic/ |
D | enic_res.h | 52 u64 wrid = 0; in enic_queue_wq_desc_ex() local 66 (u8)cq_entry, compressed_send, wrid); in enic_queue_wq_desc_ex() 127 u64 wrid = 0; in enic_queue_rq_desc() local 135 vnic_rq_post(rq, os_buf, os_buf_index, dma_addr, len, wrid); in enic_queue_rq_desc()
|
D | vnic_rq.h | 122 uint64_t wrid) in vnic_rq_post() argument 130 buf->wr_id = wrid; in vnic_rq_post()
|
D | vnic_wq.h | 135 uint8_t compressed_send, uint64_t wrid) in vnic_wq_post() argument 146 buf->wr_id = wrid; in vnic_wq_post()
|
/drivers/infiniband/hw/cxgb4/ |
D | t4fw_ri_api.h | 548 __u16 wrid; member 576 __u16 wrid; member 601 __u16 wrid; member 632 __u16 wrid; member 649 __u16 wrid; member 658 __u16 wrid; member 696 __u16 wrid; member 733 __u16 wrid; member 745 __u16 wrid; member
|
D | t4.h | 119 static inline void init_wr_hdr(union t4_wr *wqe, u16 wrid, in init_wr_hdr() argument 124 wqe->send.wrid = wrid; in init_wr_hdr()
|
/drivers/infiniband/hw/mlx4/ |
D | srq.c | 172 srq->wrid = kvmalloc_array(srq->msrq.max, in mlx4_ib_create_srq() 174 if (!srq->wrid) { in mlx4_ib_create_srq() 207 kvfree(srq->wrid); in mlx4_ib_create_srq() 284 kvfree(msrq->wrid); in mlx4_ib_destroy_srq() 340 srq->wrid[srq->head] = wr->wr_id; in mlx4_ib_post_srq_recv()
|
D | cq.c | 621 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_qp_sw_comp() 737 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_poll_one() 742 wc->wr_id = srq->wrid[wqe_ctr]; in mlx4_ib_poll_one() 747 wc->wr_id = srq->wrid[wqe_ctr]; in mlx4_ib_poll_one() 752 wc->wr_id = wq->wrid[tail]; in mlx4_ib_poll_one()
|
D | qp.c | 1139 qp->sq.wrid = kvmalloc_array(qp->sq.wqe_cnt, in create_qp_common() 1141 qp->rq.wrid = kvmalloc_array(qp->rq.wqe_cnt, in create_qp_common() 1143 if (!qp->sq.wrid || !qp->rq.wrid) { in create_qp_common() 1232 kvfree(qp->sq.wrid); in create_qp_common() 1233 kvfree(qp->rq.wrid); in create_qp_common() 1450 kvfree(qp->sq.wrid); in destroy_qp_common() 1451 kvfree(qp->rq.wrid); in destroy_qp_common() 3554 qp->sq.wrid[(qp->sq.head + nreq) & (qp->sq.wqe_cnt - 1)] = wr->wr_id; in _mlx4_ib_post_send() 3873 qp->rq.wrid[ind] = wr->wr_id; in _mlx4_ib_post_recv()
|
D | mlx4_ib.h | 163 u64 *wrid; member 378 u64 *wrid; member
|
/drivers/infiniband/hw/mthca/ |
D | mthca_srq.c | 151 kfree(srq->wrid); in mthca_free_srq_buf() 165 srq->wrid = kmalloc_array(srq->max, sizeof(u64), GFP_KERNEL); in mthca_alloc_srq_buf() 166 if (!srq->wrid) in mthca_alloc_srq_buf() 173 kfree(srq->wrid); in mthca_alloc_srq_buf() 540 srq->wrid[ind] = wr->wr_id; in mthca_tavor_post_srq_recv() 623 srq->wrid[ind] = wr->wr_id; in mthca_arbel_post_srq_recv()
|
D | mthca_provider.h | 220 u64 *wrid; member 273 u64 *wrid; member
|
D | mthca_qp.c | 1066 qp->wrid = kmalloc_array(qp->rq.max + qp->sq.max, sizeof(u64), in mthca_alloc_wqe_buf() 1068 if (!qp->wrid) in mthca_alloc_wqe_buf() 1079 kfree(qp->wrid); in mthca_alloc_wqe_buf() 1089 kfree(qp->wrid); in mthca_free_wqe_buf() 1774 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_tavor_post_send() 1885 qp->wrid[ind] = wr->wr_id; in mthca_tavor_post_receive() 2104 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_arbel_post_send() 2211 qp->wrid[ind] = wr->wr_id; in mthca_arbel_post_receive()
|
D | mthca_cq.c | 537 entry->wr_id = (*cur_qp)->wrid[wqe_index + in mthca_poll_one() 544 entry->wr_id = srq->wrid[wqe_index]; in mthca_poll_one() 558 entry->wr_id = (*cur_qp)->wrid[wqe_index]; in mthca_poll_one()
|
/drivers/infiniband/hw/hns/ |
D | hns_roce_srq.c | 253 srq->wrid = kvmalloc_array(srq->wqe_cnt, sizeof(u64), GFP_KERNEL); in alloc_srq_wrid() 254 if (!srq->wrid) in alloc_srq_wrid() 262 kvfree(srq->wrid); in free_srq_wrid() 263 srq->wrid = NULL; in free_srq_wrid()
|
D | hns_roce_qp.c | 978 hr_qp->sq.wrid = sq_wrid; in alloc_kernel_wrid() 979 hr_qp->rq.wrid = rq_wrid; in alloc_kernel_wrid() 989 kfree(hr_qp->rq.wrid); in free_kernel_wrid() 990 kfree(hr_qp->sq.wrid); in free_kernel_wrid()
|
D | hns_roce_device.h | 360 u64 *wrid; /* Work request ID */ member 481 u64 *wrid; member
|
/drivers/infiniband/hw/mlx5/ |
D | srq.c | 147 srq->wrid = kvmalloc_array(srq->msrq.max, sizeof(u64), GFP_KERNEL); in create_srq_kernel() 148 if (!srq->wrid) { in create_srq_kernel() 187 kvfree(srq->wrid); in destroy_srq_kernel() 437 srq->wrid[srq->head] = wr->wr_id; in mlx5_ib_post_srq_recv()
|
D | cq.c | 190 wc->wr_id = srq->wrid[wqe_ctr]; in handle_responder() 197 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in handle_responder() 408 wc->wr_id = wq->wrid[idx]; in sw_comp() 501 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 527 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 535 wc->wr_id = srq->wrid[wqe_ctr]; in mlx5_poll_one() 539 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx5_poll_one()
|
D | mem.c | 130 qp->sq.wrid[idx] = wr_id; in post_send_nop()
|
D | qp.c | 1007 kvfree(qp->sq.wrid); in destroy_qp() 1009 kvfree(qp->rq.wrid); in destroy_qp() 1101 qp->sq.wrid = kvmalloc_array(qp->sq.wqe_cnt, in _create_kernel_qp() 1102 sizeof(*qp->sq.wrid), GFP_KERNEL); in _create_kernel_qp() 1105 qp->rq.wrid = kvmalloc_array(qp->rq.wqe_cnt, in _create_kernel_qp() 1106 sizeof(*qp->rq.wrid), GFP_KERNEL); in _create_kernel_qp() 1112 if (!qp->sq.wrid || !qp->sq.wr_data || !qp->rq.wrid || in _create_kernel_qp() 1123 kvfree(qp->sq.wrid); in _create_kernel_qp() 1125 kvfree(qp->rq.wrid); in _create_kernel_qp()
|
D | mlx5_ib.h | 335 u64 *wrid; member 591 u64 *wrid; member
|
/drivers/infiniband/hw/bnxt_re/ |
D | bnxt_re.h | 110 u64 wrid; member
|
/drivers/infiniband/hw/irdma/ |
D | user.h | 327 u64 wrid; member
|
D | uk.c | 235 qp->sq_wrtrk_array[*wqe_idx].wrid = info->wr_id; in irdma_qp_get_next_send_wqe() 1242 info->wr_id = qp->sq_wrtrk_array[wqe_idx].wrid; in irdma_uk_cq_poll_cmpl() 1268 info->wr_id = qp->sq_wrtrk_array[tail].wrid; in irdma_uk_cq_poll_cmpl()
|
/drivers/infiniband/hw/ocrdma/ |
D | ocrdma.h | 394 uint64_t wrid; member
|