Lines Matching refs:recv_cq
764 qp_context->cqn_rcv = cpu_to_be32(to_mcq(ibqp->recv_cq)->cqn); in __mthca_modify_qp()
828 mthca_cq_clean(dev, to_mcq(qp->ibqp.recv_cq), qp->qpn, in __mthca_modify_qp()
830 if (qp->ibqp.send_cq != qp->ibqp.recv_cq) in __mthca_modify_qp()
1152 struct mthca_cq *recv_cq, in mthca_alloc_qp_common() argument
1280 struct mthca_cq *recv_cq, in mthca_alloc_qp() argument
1306 err = mthca_alloc_qp_common(dev, pd, send_cq, recv_cq, in mthca_alloc_qp()
1321 static void mthca_lock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq) in mthca_lock_cqs() argument
1323 if (send_cq == recv_cq) in mthca_lock_cqs()
1325 else if (send_cq->cqn < recv_cq->cqn) { in mthca_lock_cqs()
1327 spin_lock_nested(&recv_cq->lock, SINGLE_DEPTH_NESTING); in mthca_lock_cqs()
1329 spin_lock_irq(&recv_cq->lock); in mthca_lock_cqs()
1334 static void mthca_unlock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq) in mthca_unlock_cqs() argument
1336 if (send_cq == recv_cq) in mthca_unlock_cqs()
1338 else if (send_cq->cqn < recv_cq->cqn) { in mthca_unlock_cqs()
1339 spin_unlock(&recv_cq->lock); in mthca_unlock_cqs()
1343 spin_unlock_irq(&recv_cq->lock); in mthca_unlock_cqs()
1350 struct mthca_cq *recv_cq, in mthca_alloc_sqp() argument
1385 err = mthca_alloc_qp_common(dev, pd, send_cq, recv_cq, in mthca_alloc_sqp()
1399 mthca_lock_cqs(send_cq, recv_cq); in mthca_alloc_sqp()
1405 mthca_unlock_cqs(send_cq, recv_cq); in mthca_alloc_sqp()
1430 struct mthca_cq *recv_cq; in mthca_free_qp() local
1433 recv_cq = to_mcq(qp->ibqp.recv_cq); in mthca_free_qp()
1439 mthca_lock_cqs(send_cq, recv_cq); in mthca_free_qp()
1447 mthca_unlock_cqs(send_cq, recv_cq); in mthca_free_qp()
1461 mthca_cq_clean(dev, recv_cq, qp->qpn, in mthca_free_qp()
1463 if (send_cq != recv_cq) in mthca_free_qp()
1839 if (mthca_wq_overflow(&qp->rq, nreq, qp->ibqp.recv_cq)) { in mthca_tavor_post_receive()
2180 if (mthca_wq_overflow(&qp->rq, nreq, qp->ibqp.recv_cq)) { in mthca_arbel_post_receive()