Searched refs:wqe_cnt (Results 1 – 16 of 16) sorted by relevance
/drivers/infiniband/hw/hns/ |
D | hns_roce_qp.c | 280 hr_qp->rq.wqe_cnt = hr_qp->rq.max_gs = 0; in hns_roce_set_rq_size() 290 hr_qp->rq.wqe_cnt = roundup_pow_of_two(max_cnt); in hns_roce_set_rq_size() 292 if ((u32)hr_qp->rq.wqe_cnt > hr_dev->caps.max_wqes) { in hns_roce_set_rq_size() 303 cap->max_recv_wr = hr_qp->rq.max_post = hr_qp->rq.wqe_cnt; in hns_roce_set_rq_size() 324 hr_qp->sq.wqe_cnt = 1 << ucmd->log_sq_bb_count; in hns_roce_set_user_sq_size() 328 hr_qp->buff_size = HNS_ROCE_ALOGN_UP((hr_qp->rq.wqe_cnt << in hns_roce_set_user_sq_size() 330 HNS_ROCE_ALOGN_UP((hr_qp->sq.wqe_cnt << in hns_roce_set_user_sq_size() 334 hr_qp->rq.offset = HNS_ROCE_ALOGN_UP((hr_qp->sq.wqe_cnt << in hns_roce_set_user_sq_size() 361 hr_qp->sq.wqe_cnt = roundup_pow_of_two(max_cnt); in hns_roce_set_kernel_sq_size() 362 if ((u32)hr_qp->sq.wqe_cnt > hr_dev->caps.max_wqes) { in hns_roce_set_kernel_sq_size() [all …]
|
D | hns_roce_hw_v1.c | 104 wqe = get_send_wqe(qp, ind & (qp->sq.wqe_cnt - 1)); in hns_roce_v1_post_send() 105 qp->sq.wrid[(qp->sq.head + nreq) & (qp->sq.wqe_cnt - 1)] = in hns_roce_v1_post_send() 298 (qp->sq.head & ((qp->sq.wqe_cnt << 1) - 1))); in hns_roce_v1_post_send() 337 ind = hr_qp->rq.head & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v1_post_recv() 369 ind = (ind + 1) & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v1_post_recv() 2164 ((*cur_qp)->sq.wqe_cnt-1)); in hns_roce_v1_poll_one() 2200 (wq->wqe_cnt - 1); in hns_roce_v1_poll_one() 2202 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in hns_roce_v1_poll_one() 2236 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in hns_roce_v1_poll_one() 2467 ilog2((unsigned int)hr_qp->sq.wqe_cnt)); in hns_roce_v1_m_sqp() [all …]
|
D | hns_roce_device.h | 270 int wqe_cnt; /* WQE num */ member
|
/drivers/infiniband/hw/mlx4/ |
D | qp.c | 228 stamp = ind & qp->sq.wqe_cnt ? cpu_to_be32(0x7fffffff) : in stamp_send_wqe() 230 buf = get_send_wqe(qp, ind & (qp->sq.wqe_cnt - 1)); in stamp_send_wqe() 235 ctrl = buf = get_send_wqe(qp, n & (qp->sq.wqe_cnt - 1)); in stamp_send_wqe() 251 ctrl = wqe = get_send_wqe(qp, n & (qp->sq.wqe_cnt - 1)); in post_nop_wqe() 276 (n & qp->sq.wqe_cnt ? cpu_to_be32(1 << 31) : 0); in post_nop_wqe() 284 unsigned s = qp->sq.wqe_cnt - (ind & (qp->sq.wqe_cnt - 1)); in pad_wraparound() 403 qp->rq.wqe_cnt = qp->rq.max_gs = 0; in set_rq_size() 414 qp->rq.wqe_cnt = roundup_pow_of_two(max(1U, cap->max_recv_wr)); in set_rq_size() 422 cap->max_recv_wr = qp->rq.max_post = qp->rq.wqe_cnt; in set_rq_size() 426 min(dev->dev->caps.max_wqes - MLX4_IB_SQ_MAX_SPARE, qp->rq.wqe_cnt); in set_rq_size() [all …]
|
D | cq.c | 625 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_qp_sw_comp() 741 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx4_ib_poll_one() 755 tail = wq->tail & (wq->wqe_cnt - 1); in mlx4_ib_poll_one()
|
D | mlx4_ib.h | 174 int wqe_cnt; member
|
/drivers/infiniband/hw/mlx5/ |
D | qp.c | 152 if (wq->wqe_cnt == 0) { in mlx5_ib_read_user_wqe() 158 offset = wq->offset + ((wqe_index % wq->wqe_cnt) << wq->wqe_shift); in mlx5_ib_read_user_wqe() 159 wq_end = wq->offset + (wq->wqe_cnt << wq->wqe_shift); in mlx5_ib_read_user_wqe() 252 qp->rq.wqe_cnt = 0; in set_rq_size() 258 qp->rq.wqe_cnt = ucmd->rq_wqe_count; in set_rq_size() 265 qp->rq.max_post = qp->rq.wqe_cnt; in set_rq_size() 272 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size() 282 qp->rq.max_post = qp->rq.wqe_cnt; in set_rq_size() 412 qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB; in calc_sq_size() 413 if (qp->sq.wqe_cnt > (1 << MLX5_CAP_GEN(dev->mdev, log_max_qp_sz))) { in calc_sq_size() [all …]
|
D | cq.c | 198 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in handle_responder() 392 idx = tail & (qp->sq.wqe_cnt - 1); in handle_atomics() 457 idx = wq->last_poll & (wq->wqe_cnt - 1); in sw_send_comp() 486 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in sw_recv_comp() 577 idx = wqe_ctr & (wq->wqe_cnt - 1); in mlx5_poll_one() 605 idx = wqe_ctr & (wq->wqe_cnt - 1); in mlx5_poll_one() 618 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx5_poll_one()
|
D | mlx5_ib.h | 247 int wqe_cnt; member
|
/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_qp.c | 137 qp->rq.wqe_cnt = roundup_pow_of_two(max(1U, req_cap->max_recv_wr)); in pvrdma_set_rq_size() 141 req_cap->max_recv_wr = qp->rq.wqe_cnt; in pvrdma_set_rq_size() 147 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size() 162 qp->sq.wqe_cnt = roundup_pow_of_two(max(1U, req_cap->max_send_wr)); in pvrdma_set_sq_size() 166 req_cap->max_send_wr = qp->sq.wqe_cnt; in pvrdma_set_sq_size() 174 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size() 626 qp->sq.ring, qp->sq.wqe_cnt, &tail))) { in pvrdma_post_send() 787 qp->sq.wqe_cnt); in pvrdma_post_send() 846 qp->rq.ring, qp->rq.wqe_cnt, &tail))) { in pvrdma_post_recv() 872 qp->rq.wqe_cnt); in pvrdma_post_recv()
|
D | pvrdma.h | 154 int wqe_cnt; member
|
/drivers/infiniband/hw/mthca/ |
D | mthca_srq.c | 54 __be16 wqe_cnt; member 66 __be16 wqe_cnt; member
|
/drivers/infiniband/hw/bnxt_re/ |
D | qplib_fp.h | 254 u64 wqe_cnt; member
|
D | ib_verbs.c | 2070 qp->qplib_qp.wqe_cnt == BNXT_RE_UD_QP_HW_STALL) { in bnxt_ud_qp_hw_stall_workaround() 2077 qp->qplib_qp.wqe_cnt = 0; in bnxt_ud_qp_hw_stall_workaround()
|
D | qplib_fp.c | 1521 qp->wqe_cnt++; in bnxt_qplib_post_send()
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | mlx4.h | 377 __be16 wqe_cnt; member
|