Searched refs:sq_wqe (Results 1 – 4 of 4) sorted by relevance
/drivers/net/ethernet/huawei/hinic/ |
D | hinic_tx.c | 495 struct hinic_sq_wqe *sq_wqe; in hinic_lb_xmit_frame() local 510 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame() 511 if (!sq_wqe) { in hinic_lb_xmit_frame() 514 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame() 515 if (sq_wqe) { in hinic_lb_xmit_frame() 531 hinic_sq_prepare_wqe(txq->sq, prod_idx, sq_wqe, txq->sges, nr_sges); in hinic_lb_xmit_frame() 532 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_lb_xmit_frame() 556 struct hinic_sq_wqe *sq_wqe; in hinic_xmit_frame() local 591 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_xmit_frame() 592 if (!sq_wqe) { in hinic_xmit_frame() [all …]
|
D | hinic_hw_qp.c | 598 struct hinic_sq_wqe *sq_wqe, struct hinic_sge *sges, in hinic_sq_prepare_wqe() argument 603 sq_prepare_ctrl(&sq_wqe->ctrl, prod_idx, nr_sges); in hinic_sq_prepare_wqe() 605 sq_prepare_task(&sq_wqe->task); in hinic_sq_prepare_wqe() 608 sq_wqe->buf_descs[i].sge = sges[i]; in hinic_sq_prepare_wqe() 670 return &hw_wqe->sq_wqe; in hinic_sq_get_wqe() 692 struct hinic_sq_wqe *sq_wqe, in hinic_sq_write_wqe() argument 695 struct hinic_hw_wqe *hw_wqe = (struct hinic_hw_wqe *)sq_wqe; in hinic_sq_write_wqe() 700 hinic_cpu_to_be32(sq_wqe, wqe_size); in hinic_sq_write_wqe() 720 struct hinic_sq_wqe *sq_wqe; in hinic_sq_read_wqebb() local 732 sq_wqe = &hw_wqe->sq_wqe; in hinic_sq_read_wqebb() [all …]
|
D | hinic_hw_wqe.h | 450 struct hinic_sq_wqe sq_wqe; member
|
/drivers/infiniband/hw/hns/ |
D | hns_roce_hw_v1.c | 2181 struct hns_roce_wqe_ctrl_seg *sq_wqe; in hns_roce_v1_poll_one() local 2278 sq_wqe = hns_roce_get_send_wqe(*cur_qp, in hns_roce_v1_poll_one() 2283 switch (le32_to_cpu(sq_wqe->flag) & HNS_ROCE_WQE_OPCODE_MASK) { in hns_roce_v1_poll_one() 2304 wc->wc_flags = (le32_to_cpu(sq_wqe->flag) & HNS_ROCE_WQE_IMM ? in hns_roce_v1_poll_one()
|