/drivers/net/ethernet/huawei/hinic/ |
D | hinic_hw_qp.c | 635 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() 658 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe() 674 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() 689 struct sk_buff *skb, unsigned int wqe_size) in hinic_sq_write_wqe() 713 unsigned int *wqe_size, u16 *cons_idx) in hinic_sq_read_wqebb() 751 unsigned int wqe_size, u16 *cons_idx) in hinic_sq_read_wqe() 766 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_put_wqe() 797 unsigned int wqe_size, u16 *prod_idx) in hinic_rq_get_wqe() 838 unsigned int wqe_size, in hinic_rq_read_wqe() 873 unsigned int wqe_size, in hinic_rq_read_next_wqe() [all …]
|
D | hinic_hw_wq.c | 740 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() 789 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() 803 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() 821 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() 889 unsigned int wqe_size) in hinic_write_wqe()
|
D | hinic_tx.c | 498 unsigned int wqe_size; in hinic_lb_xmit_frame() local 559 unsigned int wqe_size; in hinic_xmit_frame() local 670 unsigned int wqe_size; in free_all_tx_skbs() local 706 unsigned int wqe_size; in free_tx_poll() local
|
D | hinic_hw_cmdq.c | 155 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local 532 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
|
/drivers/infiniband/hw/qedr/ |
D | qedr_hsi_rdma.h | 310 u8 wqe_size; member 338 u8 wqe_size; member 374 u8 wqe_size; member 420 u8 wqe_size; member 475 u8 wqe_size; member 498 u8 wqe_size; member 548 u8 wqe_size; member 602 u8 wqe_size; member 628 u8 wqe_size; member 663 u8 wqe_size; member [all …]
|
D | qedr.h | 434 u8 wqe_size; member 445 u8 wqe_size; member
|
D | verbs.c | 3290 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data() 3375 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges()
|
/drivers/infiniband/sw/rxe/ |
D | rxe_qp.c | 195 int wqe_size; in rxe_qp_init_req() local 263 int wqe_size; in rxe_qp_init_resp() local
|
/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | txrx.h | 93 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() 450 static inline u16 mlx5e_stop_room_for_wqe(struct mlx5_core_dev *mdev, u16 wqe_size) in mlx5e_stop_room_for_wqe() 481 static inline bool mlx5e_icosq_can_post_wqe(struct mlx5e_icosq *sq, u16 wqe_size) in mlx5e_icosq_can_post_wqe()
|
D | params.c | 821 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_get_log_cq_size() local 1029 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_hd_per_wqe() local
|
/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_verbs.c | 1923 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() 1964 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local 1983 u32 wqe_size = sizeof(*hdr) + sizeof(*ext_rw); in ocrdma_build_write() local 2000 u32 wqe_size = ((wr->num_sge + 1) * sizeof(struct ocrdma_sge)) + in ocrdma_build_read() local 2033 u32 wqe_size = sizeof(*fast_reg) + sizeof(*hdr); in ocrdma_build_reg() local 2202 u32 wqe_size = 0; in ocrdma_build_rqe() local
|
D | ocrdma.h | 110 u32 wqe_size; member
|
/drivers/infiniband/hw/erdma/ |
D | erdma_qp.c | 285 u32 wqe_size, wqebb_cnt, hw_op, flags, sgl_offset; in erdma_push_one_sqe() local
|
/drivers/infiniband/hw/bnxt_re/ |
D | qplib_fp.h | 95 u16 wqe_size; member 254 u16 wqe_size; member 585 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot()
|
D | roce_hsi.h | 194 u8 wqe_size; member 217 u8 wqe_size; member 237 u8 wqe_size; member 286 u8 wqe_size; member 309 u8 wqe_size; member 324 u8 wqe_size; member 521 u8 wqe_size; member 537 u8 wqe_size; member
|
/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma.h | 155 int wqe_size; member 170 int wqe_size; member
|
/drivers/infiniband/hw/mlx5/ |
D | qp.c | 262 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq() local 294 size_t wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_read_wqe_srq() local 355 int wqe_size; in set_rq_size() local 487 static int get_send_sge(struct ib_qp_init_attr *attr, int wqe_size) in get_send_sge() 513 int wqe_size; in calc_sq_size() local
|
D | odp.c | 1118 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq() local 1137 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq() local
|
D | umr.c | 230 unsigned int wqe_size = in mlx5r_umr_post_send() local
|
D | wr.c | 431 int wqe_size; in set_sig_data_segment() local
|
/drivers/net/ethernet/ibm/ehea/ |
D | ehea_qmr.c | 358 int nr_pages, int wqe_size, int act_nr_sges, in ehea_qp_alloc_register()
|
/drivers/infiniband/hw/irdma/ |
D | uk.c | 1635 int irdma_fragcnt_to_wqesize_rq(u32 frag_cnt, u16 *wqe_size) in irdma_fragcnt_to_wqesize_rq()
|
/drivers/net/ethernet/microsoft/mana/ |
D | gdma_main.c | 1057 u32 wqe_size; in mana_gd_post_work_request() local
|
/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en.h | 159 #define MLX5E_KLM_MAX_ENTRIES_PER_WQE(wqe_size)\ argument 162 #define MLX5E_KLM_ENTRIES_PER_WQE(wqe_size)\ argument
|
/drivers/infiniband/core/ |
D | uverbs_cmd.c | 2189 u32 wqe_size, u32 sge_count) in ib_uverbs_unmarshall_recv()
|