• Home
  • Raw
  • Download

Lines Matching refs:wqe

42 	struct siw_wqe *wqe = &c_tx->wqe_active;  in siw_try_1seg()  local
43 struct siw_sge *sge = &wqe->sqe.sge[0]; in siw_try_1seg()
46 if (bytes > MAX_HDR_INLINE || wqe->sqe.num_sge != 1) in siw_try_1seg()
52 if (tx_flags(wqe) & SIW_WQE_INLINE) { in siw_try_1seg()
53 memcpy(paddr, &wqe->sqe.sge[1], bytes); in siw_try_1seg()
55 struct siw_mem *mem = wqe->mem[0]; in siw_try_1seg()
121 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_qp_prepare_tx() local
125 switch (tx_type(wqe)) { in siw_qp_prepare_tx()
137 c_tx->pkt.rreq.sink_stag = htonl(wqe->sqe.sge[0].lkey); in siw_qp_prepare_tx()
139 cpu_to_be64(wqe->sqe.sge[0].laddr); in siw_qp_prepare_tx()
140 c_tx->pkt.rreq.source_stag = htonl(wqe->sqe.rkey); in siw_qp_prepare_tx()
141 c_tx->pkt.rreq.source_to = cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
142 c_tx->pkt.rreq.read_size = htonl(wqe->sqe.sge[0].length); in siw_qp_prepare_tx()
149 if (tx_flags(wqe) & SIW_WQE_SOLICITED) in siw_qp_prepare_tx()
171 if (tx_flags(wqe) & SIW_WQE_SOLICITED) in siw_qp_prepare_tx()
185 c_tx->pkt.send_inv.inval_stag = cpu_to_be32(wqe->sqe.rkey); in siw_qp_prepare_tx()
197 c_tx->pkt.rwrite.sink_stag = htonl(wqe->sqe.rkey); in siw_qp_prepare_tx()
198 c_tx->pkt.rwrite.sink_to = cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
211 c_tx->pkt.rresp.sink_stag = cpu_to_be32(wqe->sqe.rkey); in siw_qp_prepare_tx()
212 c_tx->pkt.rresp.sink_to = cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
221 siw_dbg_qp(tx_qp(c_tx), "stale wqe type %d\n", tx_type(wqe)); in siw_qp_prepare_tx()
231 wqe->processed = data; in siw_qp_prepare_tx()
246 cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
279 if (c_tx->zcopy_tx && wqe->bytes >= SENDPAGE_THRESH && in siw_qp_prepare_tx()
280 !(tx_flags(wqe) & SIW_WQE_SIGNALLED)) in siw_qp_prepare_tx()
433 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_tx_hdt() local
434 struct siw_sge *sge = &wqe->sqe.sge[c_tx->sge_idx]; in siw_tx_hdt()
461 wqe->processed += data_len; in siw_tx_hdt()
468 if (!(tx_flags(wqe) & SIW_WQE_INLINE)) { in siw_tx_hdt()
469 mem = wqe->mem[sge_idx]; in siw_tx_hdt()
509 wqe->processed -= c_tx->bytes_unsent; in siw_tx_hdt()
564 wqe->processed -= c_tx->bytes_unsent; in siw_tx_hdt()
572 (data_len != 0 || wqe->processed < wqe->bytes)) { in siw_tx_hdt()
602 rv = siw_0copy_tx(s, page_array, &wqe->sqe.sge[c_tx->sge_idx], in siw_tx_hdt()
618 wqe->processed -= data_len; in siw_tx_hdt()
629 if (data_len > 0 && wqe->processed < wqe->bytes) { in siw_tx_hdt()
650 wqe->processed -= data_len - rv; in siw_tx_hdt()
660 sge = &wqe->sqe.sge[c_tx->sge_idx]; in siw_tx_hdt()
713 static void siw_prepare_fpdu(struct siw_qp *qp, struct siw_wqe *wqe) in siw_prepare_fpdu() argument
727 c_tx->pkt.c_untagged.ddp_mo = cpu_to_be32(wqe->processed); in siw_prepare_fpdu()
730 cpu_to_be64(wqe->sqe.raddr + wqe->processed); in siw_prepare_fpdu()
732 data_len = wqe->bytes - wqe->processed; in siw_prepare_fpdu()
771 static int siw_check_sgl_tx(struct ib_pd *pd, struct siw_wqe *wqe, in siw_check_sgl_tx() argument
774 struct siw_sge *sge = &wqe->sqe.sge[0]; in siw_check_sgl_tx()
775 int i, len, num_sge = wqe->sqe.num_sge; in siw_check_sgl_tx()
785 int rv = siw_check_sge(pd, sge, &wqe->mem[i], perms, 0, in siw_check_sgl_tx()
801 static int siw_qp_sq_proc_tx(struct siw_qp *qp, struct siw_wqe *wqe) in siw_qp_sq_proc_tx() argument
808 if (unlikely(wqe->wr_status == SIW_WR_IDLE)) in siw_qp_sq_proc_tx()
814 if (wqe->wr_status == SIW_WR_QUEUED) { in siw_qp_sq_proc_tx()
815 if (!(wqe->sqe.flags & SIW_WQE_INLINE)) { in siw_qp_sq_proc_tx()
816 if (tx_type(wqe) == SIW_OP_READ_RESPONSE) in siw_qp_sq_proc_tx()
817 wqe->sqe.num_sge = 1; in siw_qp_sq_proc_tx()
819 if (tx_type(wqe) != SIW_OP_READ && in siw_qp_sq_proc_tx()
820 tx_type(wqe) != SIW_OP_READ_LOCAL_INV) { in siw_qp_sq_proc_tx()
826 rv = siw_check_sgl_tx(qp->pd, wqe, 0); in siw_qp_sq_proc_tx()
828 if (tx_type(wqe) == in siw_qp_sq_proc_tx()
834 wqe->bytes = rv; in siw_qp_sq_proc_tx()
836 wqe->bytes = 0; in siw_qp_sq_proc_tx()
839 wqe->bytes = wqe->sqe.sge[0].length; in siw_qp_sq_proc_tx()
841 if (wqe->bytes > SIW_MAX_INLINE) { in siw_qp_sq_proc_tx()
845 wqe->sqe.sge[0].laddr = in siw_qp_sq_proc_tx()
846 (u64)(uintptr_t)&wqe->sqe.sge[1]; in siw_qp_sq_proc_tx()
849 wqe->wr_status = SIW_WR_INPROGRESS; in siw_qp_sq_proc_tx()
850 wqe->processed = 0; in siw_qp_sq_proc_tx()
857 siw_prepare_fpdu(qp, wqe); in siw_qp_sq_proc_tx()
867 tx_type(wqe), wqe->wr_status, wqe->bytes, wqe->processed, in siw_qp_sq_proc_tx()
868 wqe->sqe.id); in siw_qp_sq_proc_tx()
875 enum siw_opcode tx_type = tx_type(wqe); in siw_qp_sq_proc_tx()
892 wqe->processed = wqe->bytes; in siw_qp_sq_proc_tx()
921 siw_prepare_fpdu(qp, wqe); in siw_qp_sq_proc_tx()
986 static int siw_qp_sq_proc_local(struct siw_qp *qp, struct siw_wqe *wqe) in siw_qp_sq_proc_local() argument
990 switch (tx_type(wqe)) { in siw_qp_sq_proc_local()
992 rv = siw_fastreg_mr(qp->pd, &wqe->sqe); in siw_qp_sq_proc_local()
996 rv = siw_invalidate_stag(qp->pd, wqe->sqe.rkey); in siw_qp_sq_proc_local()
1034 struct siw_wqe *wqe = tx_wqe(qp); in siw_qp_sq_process() local
1039 siw_dbg_qp(qp, "enter for type %d\n", tx_type(wqe)); in siw_qp_sq_process()
1049 tx_type = tx_type(wqe); in siw_qp_sq_process()
1052 rv = siw_qp_sq_proc_tx(qp, wqe); in siw_qp_sq_process()
1054 rv = siw_qp_sq_proc_local(qp, wqe); in siw_qp_sq_process()
1064 siw_wqe_put_mem(wqe, tx_type); in siw_qp_sq_process()
1069 if (tx_flags(wqe) & SIW_WQE_SIGNALLED) in siw_qp_sq_process()
1070 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes, in siw_qp_sq_process()
1082 siw_wqe_put_mem(wqe, tx_type); in siw_qp_sq_process()
1092 wqe->wr_status = SIW_WR_IDLE; in siw_qp_sq_process()
1124 tx_type(wqe), rv); in siw_qp_sq_process()
1150 siw_wqe_put_mem(wqe, tx_type); in siw_qp_sq_process()
1155 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes, in siw_qp_sq_process()
1167 siw_wqe_put_mem(wqe, SIW_OP_READ_RESPONSE); in siw_qp_sq_process()
1175 wqe->wr_status = SIW_WR_IDLE; in siw_qp_sq_process()