Lines Matching refs:qe
185 static void nvme_rdma_free_qe(struct ib_device *ibdev, struct nvme_rdma_qe *qe, in nvme_rdma_free_qe() argument
188 ib_dma_unmap_single(ibdev, qe->dma, capsule_size, dir); in nvme_rdma_free_qe()
189 kfree(qe->data); in nvme_rdma_free_qe()
192 static int nvme_rdma_alloc_qe(struct ib_device *ibdev, struct nvme_rdma_qe *qe, in nvme_rdma_alloc_qe() argument
195 qe->data = kzalloc(capsule_size, GFP_KERNEL); in nvme_rdma_alloc_qe()
196 if (!qe->data) in nvme_rdma_alloc_qe()
199 qe->dma = ib_dma_map_single(ibdev, qe->data, capsule_size, dir); in nvme_rdma_alloc_qe()
200 if (ib_dma_mapping_error(ibdev, qe->dma)) { in nvme_rdma_alloc_qe()
201 kfree(qe->data); in nvme_rdma_alloc_qe()
1040 struct nvme_rdma_qe *qe, struct ib_sge *sge, u32 num_sge, in nvme_rdma_post_send() argument
1046 sge->addr = qe->dma; in nvme_rdma_post_send()
1050 qe->cqe.done = nvme_rdma_send_done; in nvme_rdma_post_send()
1053 wr.wr_cqe = &qe->cqe; in nvme_rdma_post_send()
1090 struct nvme_rdma_qe *qe) in nvme_rdma_post_recv() argument
1096 list.addr = qe->dma; in nvme_rdma_post_recv()
1100 qe->cqe.done = nvme_rdma_recv_done; in nvme_rdma_post_recv()
1103 wr.wr_cqe = &qe->cqe; in nvme_rdma_post_recv()
1189 struct nvme_rdma_qe *qe = in __nvme_rdma_recv_done() local
1193 struct nvme_completion *cqe = qe->data; in __nvme_rdma_recv_done()
1202 ib_dma_sync_single_for_cpu(ibdev, qe->dma, len, DMA_FROM_DEVICE); in __nvme_rdma_recv_done()
1214 ib_dma_sync_single_for_device(ibdev, qe->dma, len, DMA_FROM_DEVICE); in __nvme_rdma_recv_done()
1216 nvme_rdma_post_recv(queue, qe); in __nvme_rdma_recv_done()