/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | tls_rxtx.h | 58 struct mlx5_cqe64 *cqe, u32 *cqe_bcnt) in mlx5e_tls_handle_rx_skb() 70 mlx5e_accel_is_tls(struct mlx5_cqe64 *cqe, struct sk_buff *skb) { return false; } in mlx5e_accel_is_tls() 73 struct mlx5_cqe64 *cqe, u32 *cqe_bcnt) {} in mlx5e_tls_handle_rx_skb()
|
D | ipsec_rxtx.h | 80 static inline bool mlx5_ipsec_is_rx_flow(struct mlx5_cqe64 *cqe) in mlx5_ipsec_is_rx_flow() 96 struct mlx5_cqe64 *cqe) in mlx5e_ipsec_offload_handle_rx_skb() 99 static inline bool mlx5_ipsec_is_rx_flow(struct mlx5_cqe64 *cqe) { return false; } in mlx5_ipsec_is_rx_flow()
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/ |
D | rxe_cq.c | 12 int cqe, int comp_vector) in rxe_cq_chk_attr() 57 int rxe_cq_from_init(struct rxe_dev *rxe, struct rxe_cq *cq, int cqe, in rxe_cq_from_init() 90 int rxe_cq_resize_queue(struct rxe_cq *cq, int cqe, in rxe_cq_resize_queue() 105 int rxe_cq_post(struct rxe_cq *cq, struct rxe_cqe *cqe, int solicited) in rxe_cq_post()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/ |
D | cq.c | 81 struct mlx4_cqe *cqe = get_cqe(cq, n & cq->ibcq.cqe); in get_sw_cqe() local 133 static void mlx4_ib_free_cq_buf(struct mlx4_ib_dev *dev, struct mlx4_ib_cq_buf *buf, int cqe) in mlx4_ib_free_cq_buf() 140 struct ib_umem **umem, u64 buf_addr, int cqe) in mlx4_ib_get_cq_umem() 356 struct mlx4_cqe *cqe, *new_cqe; in mlx4_ib_cq_resize_copy_cqes() local 500 static void dump_cqe(void *cqe) in dump_cqe() 510 static void mlx4_ib_handle_error_cqe(struct mlx4_err_cqe *cqe, in mlx4_ib_handle_error_cqe() 581 unsigned tail, struct mlx4_cqe *cqe, int is_eth) in use_tunnel_data() 660 struct mlx4_cqe *cqe; in mlx4_ib_poll_one() local 919 struct mlx4_cqe *cqe, *dest; in __mlx4_ib_cq_clean() local
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en_rx.c | 114 struct mlx5_cqe64 *cqe = mlx5_cqwq_get_wqe(wq, ci); in mlx5e_cqes_update_owner() local 122 struct mlx5_cqe64 *cqe = mlx5_cqwq_get_wqe(wq, ci); in mlx5e_cqes_update_owner() local 638 struct mlx5_cqe64 *cqe; in mlx5e_poll_ico_cq() local 773 static void mlx5e_lro_update_tcp_hdr(struct mlx5_cqe64 *cqe, struct tcphdr *tcp) in mlx5e_lro_update_tcp_hdr() 789 static void mlx5e_lro_update_hdr(struct sk_buff *skb, struct mlx5_cqe64 *cqe, in mlx5e_lro_update_hdr() 843 static inline void mlx5e_skb_set_hash(struct mlx5_cqe64 *cqe, in mlx5e_skb_set_hash() 969 struct mlx5_cqe64 *cqe, in mlx5e_handle_csum() 1039 static inline void mlx5e_build_rx_skb(struct mlx5_cqe64 *cqe, in mlx5e_build_rx_skb() 1095 struct mlx5_cqe64 *cqe, in mlx5e_complete_rx_cqe() 1136 mlx5e_skb_from_cqe_linear(struct mlx5e_rq *rq, struct mlx5_cqe64 *cqe, in mlx5e_skb_from_cqe_linear() [all …]
|
D | en_tc.h | 279 static inline bool mlx5e_cqe_regb_chain(struct mlx5_cqe64 *cqe) in mlx5e_cqe_regb_chain() 299 static inline bool mlx5e_cqe_regb_chain(struct mlx5_cqe64 *cqe) in mlx5e_cqe_regb_chain() 302 mlx5e_tc_update_skb(struct mlx5_cqe64 *cqe, struct sk_buff *skb) in mlx5e_tc_update_skb()
|
/kernel/linux/linux-5.10/drivers/scsi/qedi/ |
D | qedi_fw.c | 31 union iscsi_cqe *cqe, in qedi_process_logout_resp() 82 union iscsi_cqe *cqe, in qedi_process_text_resp() 176 union iscsi_cqe *cqe, in qedi_process_tmf_resp() 247 union iscsi_cqe *cqe, in qedi_process_login_resp() 309 struct iscsi_cqe_unsolicited *cqe, in qedi_get_rq_bdq_buf() 348 struct iscsi_cqe_unsolicited *cqe, in qedi_put_rq_bdq_buf() 386 struct iscsi_cqe_unsolicited *cqe, in qedi_unsol_pdu_adjust_bdq() 398 union iscsi_cqe *cqe, in qedi_process_nopin_mesg() 469 union iscsi_cqe *cqe, in qedi_process_async_mesg() 523 union iscsi_cqe *cqe, in qedi_process_reject_mesg() [all …]
|
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/octeontx2/nic/ |
D | otx2_txrx.c | 81 struct nix_cqe_tx_s *cqe, in otx2_snd_pkt_handler() 167 struct nix_cqe_rx_s *cqe, struct sk_buff *skb) in otx2_set_rxhash() 188 static void otx2_free_rcv_seg(struct otx2_nic *pfvf, struct nix_cqe_rx_s *cqe, in otx2_free_rcv_seg() 208 struct nix_cqe_rx_s *cqe, int qidx) in otx2_check_rcv_errors() 275 struct nix_cqe_rx_s *cqe) in otx2_rcv_pkt_handler() 305 struct nix_cqe_rx_s *cqe; in otx2_rx_napi_handler() local 362 struct nix_cqe_tx_s *cqe; in otx2_tx_napi_handler() local 910 struct nix_cqe_rx_s *cqe; in otx2_cleanup_rx_cqes() local 937 struct nix_cqe_tx_s *cqe; in otx2_cleanup_tx_cqes() local
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/cxgb4/ |
D | cq.c | 186 struct t4_cqe cqe; in insert_recv_cqe() local 220 struct t4_cqe cqe; in insert_sq_cqe() local 422 static int cqe_completes_wr(struct t4_cqe *cqe, struct t4_wq *wq) in cqe_completes_wr() 445 struct t4_cqe *cqe; in c4iw_count_rcqes() local 544 static int poll_cq(struct t4_wq *wq, struct t4_cq *cq, struct t4_cqe *cqe, in poll_cq() 757 struct t4_cqe cqe; in __c4iw_poll_cq_one() local
|
/kernel/linux/linux-5.10/net/sunrpc/xprtrdma/ |
D | frwr_ops.c | 362 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_fastreg() local 446 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv() local 467 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_wake() local 570 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_done() local
|
/kernel/linux/linux-5.10/drivers/scsi/bnx2i/ |
D | bnx2i_hwi.c | 1338 struct cqe *cqe) in bnx2i_process_scsi_cmd_resp() 1435 struct cqe *cqe) in bnx2i_process_login_resp() 1503 struct cqe *cqe) in bnx2i_process_text_resp() 1564 struct cqe *cqe) in bnx2i_process_tmf_resp() 1603 struct cqe *cqe) in bnx2i_process_logout_resp() 1649 struct cqe *cqe) in bnx2i_process_nopin_local_cmpl() 1690 struct cqe *cqe) in bnx2i_process_nopin_mesg() 1742 struct cqe *cqe) in bnx2i_process_async_mesg() 1792 struct cqe *cqe) in bnx2i_process_reject_mesg() 1829 struct cqe *cqe) in bnx2i_process_cmd_cleanup_resp() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/ |
D | cq.c | 81 void *cqe = get_cqe(cq, n & cq->ibcq.cqe); in get_sw_cqe() local 117 static void handle_good_req(struct ib_wc *wc, struct mlx5_cqe64 *cqe, in handle_good_req() 166 static void handle_responder(struct ib_wc *wc, struct mlx5_cqe64 *cqe, in handle_responder() 270 static void dump_cqe(struct mlx5_ib_dev *dev, struct mlx5_err_cqe *cqe) in dump_cqe() 277 struct mlx5_err_cqe *cqe, in mlx5_handle_error_cqe() 356 static void get_sig_err_item(struct mlx5_sig_err_cqe *cqe, in get_sig_err_item() 453 void *cqe; in mlx5_poll_one() local 844 void *cqe; in init_cq_frag_buf() local 1049 void *cqe, *dest; in __mlx5_ib_cq_clean() local
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/ |
D | mthca_cq.c | 174 static inline struct mthca_cqe *cqe_sw(struct mthca_cqe *cqe) in cqe_sw() 184 static inline void set_cqe_hw(struct mthca_cqe *cqe) in set_cqe_hw() 191 __be32 *cqe = cqe_ptr; in dump_cqe() local 264 static inline int is_recv_cqe(struct mthca_cqe *cqe) in is_recv_cqe() 276 struct mthca_cqe *cqe; in mthca_cq_clean() local 366 void mthca_free_cq_buf(struct mthca_dev *dev, struct mthca_cq_buf *buf, int cqe) in mthca_free_cq_buf() 374 struct mthca_err_cqe *cqe, in handle_error_cqe() 485 struct mthca_cqe *cqe; in mthca_poll_one() local
|
/kernel/linux/linux-5.10/drivers/net/ethernet/qlogic/qede/ |
D | qede_fp.c | 656 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_set_gro_params() 839 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_tpa_start() 965 struct eth_fast_path_rx_tpa_cont_cqe *cqe) in qede_tpa_cont() 980 struct eth_fast_path_rx_tpa_end_cqe *cqe) in qede_tpa_end() 1070 static bool qede_pkt_is_ip_fragmented(struct eth_fast_path_rx_reg_cqe *cqe, in qede_pkt_is_ip_fragmented() 1090 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_xdp() 1187 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_build_jumbo() 1244 union eth_rx_cqe *cqe, in qede_rx_process_tpa_cqe() 1269 union eth_rx_cqe *cqe; in qede_rx_process_cqe() local
|
D | qede_ptp.h | 23 union eth_rx_cqe *cqe, in qede_ptp_record_rx_ts()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_verbs.c | 1032 struct ocrdma_cqe *cqe = NULL; in ocrdma_flush_cq() local 1592 struct ocrdma_cqe *cqe; in ocrdma_discard_cqes() local 2440 struct ocrdma_cqe *cqe) in ocrdma_set_cqe_status_flushed() 2471 static bool ocrdma_update_err_cqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_cqe() 2493 static int ocrdma_update_err_rcqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_rcqe() 2503 static int ocrdma_update_err_scqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_scqe() 2514 struct ocrdma_cqe *cqe, struct ib_wc *ibwc, in ocrdma_poll_err_scqe() 2558 struct ocrdma_cqe *cqe, in ocrdma_poll_success_scqe() 2583 static bool ocrdma_poll_scqe(struct ocrdma_qp *qp, struct ocrdma_cqe *cqe, in ocrdma_poll_scqe() 2600 struct ocrdma_cqe *cqe) in ocrdma_update_ud_rcqe() [all …]
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/fpga/ |
D | conn.c | 251 struct mlx5_cqe64 *cqe, u8 status) in mlx5_fpga_conn_rq_cqe() 291 struct mlx5_cqe64 *cqe, u8 status) in mlx5_fpga_conn_sq_cqe() 333 struct mlx5_cqe64 *cqe) in mlx5_fpga_conn_handle_cqe() 368 struct mlx5_cqe64 *cqe; in mlx5_fpga_conn_cqes() local 418 struct mlx5_cqe64 *cqe; in mlx5_fpga_conn_create_cq() local
|
/kernel/linux/linux-5.10/drivers/infiniband/ulp/iser/ |
D | iser_memory.c | 221 struct ib_cqe *cqe, in iser_inv_rkey() 240 struct ib_cqe *cqe = &iser_task->iser_conn->ib_conn.reg_cqe; in iser_reg_sig_mr() local 297 struct ib_cqe *cqe = &iser_task->iser_conn->ib_conn.reg_cqe; in iser_fast_reg_mr() local
|
D | iscsi_iser.h | 248 struct ib_cqe cqe; member 275 struct ib_cqe cqe; member 295 struct ib_cqe cqe; member 578 iser_rx(struct ib_cqe *cqe) in iser_rx() 584 iser_tx(struct ib_cqe *cqe) in iser_tx() 590 iser_login(struct ib_cqe *cqe) in iser_login()
|
/kernel/linux/linux-5.10/drivers/scsi/qedf/ |
D | qedf_io.c | 1124 void qedf_scsi_completion(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_scsi_completion() 1448 void qedf_process_warning_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_warning_compl() 1518 void qedf_process_error_detect(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_error_detect() 1973 void qedf_process_abts_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_abts_compl() 2292 void qedf_process_cleanup_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_cleanup_compl() 2524 void qedf_process_tmf_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_tmf_compl() 2539 struct fcoe_cqe *cqe) in qedf_process_unsol_compl()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/bnxt_re/ |
D | qplib_fp.c | 1435 struct cq_req *cqe = (struct cq_req *)hw_cqe; in __clean_cq() local 1445 struct cq_res_rc *cqe = (struct cq_res_rc *)hw_cqe; in __clean_cq() local 2126 struct bnxt_qplib_cqe *cqe; in __flush_sq() local 2166 struct bnxt_qplib_cqe *cqe; in __flush_rq() local 2329 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_req() local 2436 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_rc() local 2517 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_ud() local 2620 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_raweth_qp1() local 2713 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_terminal() local 2827 struct bnxt_qplib_cqe *cqe, in bnxt_qplib_process_flush_list() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_cq.c | 291 struct pvrdma_cqe *cqe; in _pvrdma_flush_cqe() local 326 struct pvrdma_cqe *cqe; in pvrdma_poll_one() local
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | health.h | 10 #define MLX5E_RX_ERR_CQE(cqe) (get_cqe_opcode(cqe) != MLX5_CQE_RESP_SEND) argument
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/siw/ |
D | siw_cq.c | 50 struct siw_cqe *cqe; in siw_reap_cqe() local
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
D | rx.c | 81 struct mlx5_cqe64 *cqe, in mlx5e_xsk_skb_from_cqe_linear()
|