• Home
  • Raw
  • Download

Lines Matching refs:sq

220 			(n << qp->sq.wqe_shift);  in get_send_wqe()
223 (n << qp->sq.wqe_shift)) >> in get_send_wqe()
225 ((qp->send_wqe_offset + (n << qp->sq.wqe_shift)) & in get_send_wqe()
498 qp_attr->cap.max_send_wr = qp->sq.max; in mthca_query_qp()
500 qp_attr->cap.max_send_sge = qp->sq.max_gs; in mthca_query_qp()
602 if (qp->sq.max) in __mthca_modify_qp()
603 qp_context->sq_size_stride = ilog2(qp->sq.max) << 3; in __mthca_modify_qp()
604 qp_context->sq_size_stride |= qp->sq.wqe_shift - 4; in __mthca_modify_qp()
723 qp_context->snd_db_index = cpu_to_be32(qp->sq.db_index); in __mthca_modify_qp()
824 mthca_wq_reset(&qp->sq); in __mthca_modify_qp()
825 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1); in __mthca_modify_qp()
831 *qp->sq.db = 0; in __mthca_modify_qp()
854 spin_lock_irq(&qp->sq.lock); in mthca_modify_qp()
858 spin_unlock_irq(&qp->sq.lock); in mthca_modify_qp()
951 1 << qp->sq.wqe_shift)); in mthca_adjust_qp_caps()
955 qp->sq.max_gs = min_t(int, dev->limits.max_sg, in mthca_adjust_qp_caps()
987 size = qp->sq.max_gs * sizeof (struct mthca_data_seg); in mthca_alloc_wqe_buf()
1027 for (qp->sq.wqe_shift = 6; 1 << qp->sq.wqe_shift < size; in mthca_alloc_wqe_buf()
1028 qp->sq.wqe_shift++) in mthca_alloc_wqe_buf()
1032 1 << qp->sq.wqe_shift); in mthca_alloc_wqe_buf()
1043 (qp->sq.max << qp->sq.wqe_shift)); in mthca_alloc_wqe_buf()
1045 qp->wrid = kmalloc((qp->rq.max + qp->sq.max) * sizeof (u64), in mthca_alloc_wqe_buf()
1066 (qp->sq.max << qp->sq.wqe_shift)), in mthca_free_wqe_buf()
1121 qp->sq.db_index = mthca_alloc_db(dev, MTHCA_DB_TYPE_SQ, in mthca_alloc_memfree()
1122 qp->qpn, &qp->sq.db); in mthca_alloc_memfree()
1123 if (qp->sq.db_index < 0) { in mthca_alloc_memfree()
1136 mthca_free_db(dev, MTHCA_DB_TYPE_SQ, qp->sq.db_index); in mthca_free_memfree()
1159 mthca_wq_reset(&qp->sq); in mthca_alloc_qp_common()
1162 spin_lock_init(&qp->sq.lock); in mthca_alloc_qp_common()
1209 for (i = 0; i < qp->sq.max; ++i) { in mthca_alloc_qp_common()
1211 next->nda_op = cpu_to_be32((((i + 1) & (qp->sq.max - 1)) << in mthca_alloc_qp_common()
1212 qp->sq.wqe_shift) + in mthca_alloc_qp_common()
1224 qp->sq.last = get_send_wqe(qp, qp->sq.max - 1); in mthca_alloc_qp_common()
1253 qp->sq.max = cap->max_send_wr ? in mthca_set_qp_size()
1257 qp->sq.max = cap->max_send_wr; in mthca_set_qp_size()
1261 qp->sq.max_gs = max_t(int, cap->max_send_sge, in mthca_set_qp_size()
1361 sqp->header_buf_size = sqp->qp.sq.max * MTHCA_UD_HEADER_SIZE; in mthca_alloc_sqp()
1626 spin_lock_irqsave(&qp->sq.lock, flags); in mthca_tavor_post_send()
1630 ind = qp->sq.next_ind; in mthca_tavor_post_send()
1633 if (mthca_wq_overflow(&qp->sq, nreq, qp->ibqp.send_cq)) { in mthca_tavor_post_send()
1636 qp->sq.head, qp->sq.tail, in mthca_tavor_post_send()
1637 qp->sq.max, nreq); in mthca_tavor_post_send()
1644 prev_wqe = qp->sq.last; in mthca_tavor_post_send()
1645 qp->sq.last = wqe; in mthca_tavor_post_send()
1729 if (wr->num_sge > qp->sq.max_gs) { in mthca_tavor_post_send()
1761 cpu_to_be32(((ind << qp->sq.wqe_shift) + in mthca_tavor_post_send()
1778 if (unlikely(ind >= qp->sq.max)) in mthca_tavor_post_send()
1779 ind -= qp->sq.max; in mthca_tavor_post_send()
1786 mthca_write64(((qp->sq.next_ind << qp->sq.wqe_shift) + in mthca_tavor_post_send()
1798 qp->sq.next_ind = ind; in mthca_tavor_post_send()
1799 qp->sq.head += nreq; in mthca_tavor_post_send()
1801 spin_unlock_irqrestore(&qp->sq.lock, flags); in mthca_tavor_post_send()
1941 spin_lock_irqsave(&qp->sq.lock, flags); in mthca_arbel_post_send()
1945 ind = qp->sq.head & (qp->sq.max - 1); in mthca_arbel_post_send()
1952 ((qp->sq.head & 0xffff) << 8) | f0 | op0; in mthca_arbel_post_send()
1954 qp->sq.head += MTHCA_ARBEL_MAX_WQES_PER_SEND_DB; in mthca_arbel_post_send()
1961 *qp->sq.db = cpu_to_be32(qp->sq.head & 0xffff); in mthca_arbel_post_send()
1974 if (mthca_wq_overflow(&qp->sq, nreq, qp->ibqp.send_cq)) { in mthca_arbel_post_send()
1977 qp->sq.head, qp->sq.tail, in mthca_arbel_post_send()
1978 qp->sq.max, nreq); in mthca_arbel_post_send()
1985 prev_wqe = qp->sq.last; in mthca_arbel_post_send()
1986 qp->sq.last = wqe; in mthca_arbel_post_send()
2070 if (wr->num_sge > qp->sq.max_gs) { in mthca_arbel_post_send()
2102 cpu_to_be32(((ind << qp->sq.wqe_shift) + in mthca_arbel_post_send()
2119 if (unlikely(ind >= qp->sq.max)) in mthca_arbel_post_send()
2120 ind -= qp->sq.max; in mthca_arbel_post_send()
2125 dbhi = (nreq << 24) | ((qp->sq.head & 0xffff) << 8) | f0 | op0; in mthca_arbel_post_send()
2127 qp->sq.head += nreq; in mthca_arbel_post_send()
2134 *qp->sq.db = cpu_to_be32(qp->sq.head & 0xffff); in mthca_arbel_post_send()
2152 spin_unlock_irqrestore(&qp->sq.lock, flags); in mthca_arbel_post_send()