/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en_rx.c | 58 mlx5e_skb_from_cqe_mpwrq_linear(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, 61 mlx5e_skb_from_cqe_mpwrq_nonlinear(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, 386 struct mlx5e_wqe_frag_info *wi, in mlx5e_free_rx_wqe() argument 391 for (i = 0; i < rq->wqe.info.num_frags; i++, wi++) in mlx5e_free_rx_wqe() 392 mlx5e_put_rx_frag(rq, wi, recycle); in mlx5e_free_rx_wqe() 397 struct mlx5e_wqe_frag_info *wi = get_frag(rq, ix); in mlx5e_dealloc_rx_wqe() local 399 mlx5e_free_rx_wqe(rq, wi, false); in mlx5e_dealloc_rx_wqe() 464 mlx5e_free_rx_mpwqe(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, bool recycle) in mlx5e_free_rx_mpwqe() argument 467 struct mlx5e_dma_info *dma_info = wi->umr.dma_info; in mlx5e_free_rx_mpwqe() 471 if (bitmap_full(wi->xdp_xmit_bitmap, MLX5_MPWRQ_PAGES_PER_WQE)) in mlx5e_free_rx_mpwqe() [all …]
|
D | en_tx.c | 434 struct mlx5e_tx_wqe_info *wi; in mlx5e_tx_flush() local 442 wi = &sq->db.wqe_info[pi]; in mlx5e_tx_flush() 444 *wi = (struct mlx5e_tx_wqe_info) { in mlx5e_tx_flush() 456 struct mlx5e_tx_wqe_info *wi, struct mlx5_wqe_ctrl_seg *cseg, in mlx5e_txwqe_complete() argument 462 *wi = (struct mlx5e_tx_wqe_info) { in mlx5e_txwqe_complete() 475 sq->pc += wi->num_wqebbs; in mlx5e_txwqe_complete() 503 struct mlx5e_tx_wqe_info *wi; in mlx5e_sq_xmit_wqe() local 511 wi = &sq->db.wqe_info[pi]; in mlx5e_sq_xmit_wqe() 542 mlx5e_txwqe_complete(sq, skb, attr, wqe_attr, num_dma, wi, cseg, xmit_more); in mlx5e_sq_xmit_wqe() 618 struct mlx5e_tx_wqe_info *wi; in mlx5e_tx_mpwqe_session_complete() local [all …]
|
D | en.h | 583 (*mlx5e_fp_skb_from_cqe_mpwrq)(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, 587 struct mlx5e_wqe_frag_info *wi, u32 cqe_bcnt);
|
/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | ktls_txrx.h | 25 void mlx5e_ktls_handle_ctx_completion(struct mlx5e_icosq_wqe_info *wi); 26 void mlx5e_ktls_handle_get_psv_completion(struct mlx5e_icosq_wqe_info *wi, 30 struct mlx5e_tx_wqe_info *wi, 34 struct mlx5e_tx_wqe_info *wi, in mlx5e_ktls_tx_try_handle_resync_dump_comp() argument 37 if (unlikely(wi->resync_dump_frag_page)) { in mlx5e_ktls_tx_try_handle_resync_dump_comp() 38 mlx5e_ktls_tx_handle_resync_dump_comp(sq, wi, dma_fifo_cc); in mlx5e_ktls_tx_try_handle_resync_dump_comp() 54 struct mlx5e_tx_wqe_info *wi, in mlx5e_ktls_tx_try_handle_resync_dump_comp() argument
|
D | ktls_rx.c | 130 struct mlx5e_icosq_wqe_info *wi) in icosq_fill_wi() argument 132 sq->db.wqe_info[pi] = *wi; in icosq_fill_wi() 140 struct mlx5e_icosq_wqe_info wi; in post_static_params() local 153 wi = (struct mlx5e_icosq_wqe_info) { in post_static_params() 158 icosq_fill_wi(sq, pi, &wi); in post_static_params() 170 struct mlx5e_icosq_wqe_info wi; in post_progress_params() local 183 wi = (struct mlx5e_icosq_wqe_info) { in post_progress_params() 189 icosq_fill_wi(sq, pi, &wi); in post_progress_params() 256 struct mlx5e_icosq_wqe_info wi; in resync_post_get_progress_params() local 305 wi = (struct mlx5e_icosq_wqe_info) { in resync_post_get_progress_params() [all …]
|
D | ktls_tx.c | 150 struct mlx5e_tx_wqe_info *wi = &sq->db.wqe_info[pi]; in tx_fill_wi() local 152 *wi = (struct mlx5e_tx_wqe_info) { in tx_fill_wi() 349 struct mlx5e_tx_wqe_info *wi, in mlx5e_ktls_tx_handle_resync_dump_comp() argument 359 put_page(wi->resync_dump_frag_page); in mlx5e_ktls_tx_handle_resync_dump_comp() 361 stats->tls_dump_bytes += wi->num_bytes; in mlx5e_ktls_tx_handle_resync_dump_comp()
|
/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | xdp.c | 174 struct mlx5e_xdp_wqe_info *wi, *edge_wi; in mlx5e_xdpsq_get_next_pi() local 176 wi = &sq->db.wqe_info[pi]; in mlx5e_xdpsq_get_next_pi() 177 edge_wi = wi + contig_wqebbs; in mlx5e_xdpsq_get_next_pi() 180 for (; wi < edge_wi; wi++) { in mlx5e_xdpsq_get_next_pi() 181 *wi = (struct mlx5e_xdp_wqe_info) { in mlx5e_xdpsq_get_next_pi() 224 struct mlx5e_xdp_wqe_info *wi = &sq->db.wqe_info[pi]; in mlx5e_xdp_mpwqe_complete() local 230 wi->num_wqebbs = DIV_ROUND_UP(ds_count, MLX5_SEND_WQEBB_NUM_DS); in mlx5e_xdp_mpwqe_complete() 231 wi->num_pkts = session->pkt_count; in mlx5e_xdp_mpwqe_complete() 233 sq->pc += wi->num_wqebbs; in mlx5e_xdp_mpwqe_complete() 367 struct mlx5e_xdp_wqe_info *wi, in mlx5e_free_xdpsq_desc() argument [all …]
|
D | txrx.h | 155 struct mlx5e_tx_wqe_info *wi, *edge_wi; in mlx5e_txqsq_get_next_pi() local 157 wi = &sq->db.wqe_info[pi]; in mlx5e_txqsq_get_next_pi() 158 edge_wi = wi + contig_wqebbs; in mlx5e_txqsq_get_next_pi() 161 for (; wi < edge_wi; wi++) { in mlx5e_txqsq_get_next_pi() 162 *wi = (struct mlx5e_tx_wqe_info) { in mlx5e_txqsq_get_next_pi() 207 struct mlx5e_icosq_wqe_info *wi, *edge_wi; in mlx5e_icosq_get_next_pi() local 209 wi = &sq->db.wqe_info[pi]; in mlx5e_icosq_get_next_pi() 210 edge_wi = wi + contig_wqebbs; in mlx5e_icosq_get_next_pi() 213 for (; wi < edge_wi; wi++) { in mlx5e_icosq_get_next_pi() 214 *wi = (struct mlx5e_icosq_wqe_info) { in mlx5e_icosq_get_next_pi()
|
/drivers/video/fbdev/ |
D | leo.c | 452 struct fb_wid_item *wi; in leo_wid_put() local 461 for (i = 0, wi = wl->wl_list; i < wl->wl_count; i++, wi++) { in leo_wid_put() 462 switch (wi->wi_type) { in leo_wid_put() 464 j = (wi->wi_index & 0xf) + 0x40; in leo_wid_put() 468 j = wi->wi_index & 0x3f; in leo_wid_put() 475 sbus_writel(wi->wi_values[0], &lx_krn->krn_value); in leo_wid_put() 488 struct fb_wid_item wi; in leo_init_wids() local 492 wl.wl_list = &wi; in leo_init_wids() 493 wi.wi_type = FB_WID_DBL_8; in leo_init_wids() 494 wi.wi_index = 0; in leo_init_wids() [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
D | rx.c | 27 struct mlx5e_mpw_info *wi, in mlx5e_xsk_skb_from_cqe_mpwrq_linear() argument 32 struct xdp_buff *xdp = wi->umr.dma_info[page_idx].xsk; in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 70 __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 82 struct mlx5e_wqe_frag_info *wi, in mlx5e_xsk_skb_from_cqe_linear() argument 85 struct xdp_buff *xdp = wi->di->xsk; in mlx5e_xsk_skb_from_cqe_linear() 92 WARN_ON_ONCE(wi->offset); in mlx5e_xsk_skb_from_cqe_linear()
|
D | tx.c | 56 struct mlx5e_xdp_wqe_info *wi = &sq->db.wqe_info[pi]; in mlx5e_xsk_tx_post_err() local 59 wi->num_wqebbs = 1; in mlx5e_xsk_tx_post_err() 60 wi->num_pkts = 1; in mlx5e_xsk_tx_post_err()
|
D | rx.h | 13 struct mlx5e_mpw_info *wi, 19 struct mlx5e_wqe_frag_info *wi,
|
/drivers/input/serio/ |
D | hp_sdc.c | 140 hp_sdc.wi = 0xff; in hp_sdc_status_out8() 491 if (hp_sdc.wi > 0x73 || hp_sdc.wi < 0x70 || in hp_sdc_put() 492 w7[hp_sdc.wi - 0x70] == hp_sdc.r7[hp_sdc.wi - 0x70]) { in hp_sdc_put() 501 hp_sdc.wi = 0x70 + i; in hp_sdc_put() 514 hp_sdc_data_out8(w7[hp_sdc.wi - 0x70]); in hp_sdc_put() 515 hp_sdc.r7[hp_sdc.wi - 0x70] = w7[hp_sdc.wi - 0x70]; in hp_sdc_put() 516 hp_sdc.wi++; /* write index register autoincrements */ in hp_sdc_put() 846 hp_sdc.wi = 0xff; in hp_sdc_init()
|
/drivers/md/persistent-data/ |
D | dm-array.c | 861 struct walk_info *wi = context; in walk_ablock() local 871 r = get_ablock(wi->info, le64_to_cpu(block_le), &block, &ab); in walk_ablock() 878 r = wi->fn(wi->context, keys[0] * max_entries + i, in walk_ablock() 879 element_at(wi->info, ab, i)); in walk_ablock() 885 unlock_ablock(wi->info, block); in walk_ablock() 893 struct walk_info wi; in dm_array_walk() local 895 wi.info = info; in dm_array_walk() 896 wi.fn = fn; in dm_array_walk() 897 wi.context = context; in dm_array_walk() 899 return dm_btree_walk(&info->btree_info, root, walk_ablock, &wi); in dm_array_walk()
|
/drivers/staging/greybus/ |
D | audio_topology.c | 431 int ret, wi, max, connect; in gbcodec_mixer_dapm_ctl_put() local 473 for (wi = 0; wi < wlist->num_widgets; wi++) { in gbcodec_mixer_dapm_ctl_put() 474 widget = wlist->widgets[wi]; in gbcodec_mixer_dapm_ctl_put() 763 int ret, wi, ctl_id; in gbcodec_enum_dapm_ctl_put() local 843 for (wi = 0; wi < wlist->num_widgets; wi++) { in gbcodec_enum_dapm_ctl_put() 844 widget = wlist->widgets[wi]; in gbcodec_enum_dapm_ctl_put()
|
/drivers/net/ethernet/brocade/bna/ |
D | bfi_enet.h | 100 } __packed wi; member 104 #define wi_hdr wi.base 105 #define wi_ext_hdr wi.ext
|
D | bnad.c | 2818 txqent->hdr.wi.vlan_tag = htons(vlan_tag); in bnad_txq_wi_prepare() 2828 txqent->hdr.wi.opcode = htons(BNA_TXQ_WI_SEND); in bnad_txq_wi_prepare() 2829 txqent->hdr.wi.lso_mss = 0; in bnad_txq_wi_prepare() 2832 txqent->hdr.wi.opcode = htons(BNA_TXQ_WI_SEND_LSO); in bnad_txq_wi_prepare() 2833 txqent->hdr.wi.lso_mss = htons(gso_size); in bnad_txq_wi_prepare() 2842 txqent->hdr.wi.l4_hdr_size_n_offset = in bnad_txq_wi_prepare() 2846 txqent->hdr.wi.opcode = htons(BNA_TXQ_WI_SEND); in bnad_txq_wi_prepare() 2847 txqent->hdr.wi.lso_mss = 0; in bnad_txq_wi_prepare() 2868 txqent->hdr.wi.l4_hdr_size_n_offset = in bnad_txq_wi_prepare() 2882 txqent->hdr.wi.l4_hdr_size_n_offset = in bnad_txq_wi_prepare() [all …]
|
D | bna_hw_defs.h | 373 } wi; member
|
/drivers/gpu/drm/omapdrm/dss/ |
D | dispc.c | 2810 const struct omap_dss_writeback_info *wi, in dispc_wb_setup() argument 2830 "rot %d\n", wi->paddr, wi->p_uv_addr, in_width, in dispc_wb_setup() 2831 in_height, wi->width, wi->height, wi->fourcc, wi->rotation); in dispc_wb_setup() 2833 r = dispc_ovl_setup_common(dispc, plane, caps, wi->paddr, wi->p_uv_addr, in dispc_wb_setup() 2834 wi->buf_width, pos_x, pos_y, in_width, in_height, wi->width, in dispc_wb_setup() 2835 wi->height, wi->fourcc, wi->rotation, zorder, in dispc_wb_setup() 2836 wi->pre_mult_alpha, global_alpha, wi->rotation_type, in dispc_wb_setup() 2842 switch (wi->fourcc) { in dispc_wb_setup()
|
D | dss.h | 458 const struct omap_dss_writeback_info *wi,
|
/drivers/acpi/ |
D | ec.c | 158 u8 wi; member 671 if (t->wlen > t->wi) { in advance_transaction() 673 acpi_ec_write_data(ec, t->wdata[t->wi++]); in advance_transaction() 689 } else if (t->wlen == t->wi && !(status & ACPI_EC_FLAG_IBF)) { in advance_transaction() 708 ec->curr->irq_count = ec->curr->wi = ec->curr->ri = 0; in start_transaction()
|