Home
last modified time | relevance | path

Searched refs:ex (Results 1 – 25 of 33) sorted by relevance

12

/drivers/net/team/
Dteam_mode_loadbalance.c64 struct lb_priv_ex *ex; /* priv extension */ member
83 (lb_priv)->ex->tx_hash_to_port_mapping[hash].port
86 (lb_priv)->ex->tx_hash_to_port_mapping[hash].opt_inst_info
98 pm = &lb_priv->ex->tx_hash_to_port_mapping[i]; in lb_tx_hash_to_port_mapping_null_port()
234 if (!lb_priv->ex->orig_fprog) { in lb_bpf_func_get()
239 ctx->data.bin_val.len = lb_priv->ex->orig_fprog->len * in lb_bpf_func_get()
241 ctx->data.bin_val.ptr = lb_priv->ex->orig_fprog->filter; in lb_bpf_func_get()
292 if (lb_priv->ex->orig_fprog) { in lb_bpf_func_set()
294 __fprog_destroy(lb_priv->ex->orig_fprog); in lb_bpf_func_set()
301 lb_priv->ex->orig_fprog = fprog; in lb_bpf_func_set()
[all …]
/drivers/scsi/libsas/
Dsas_expander.c206 struct expander_device *ex = &dev->ex_dev; in sas_set_ex_phy() local
207 struct ex_phy *phy = &ex->ex_phy[phy_id]; in sas_set_ex_phy()
394 struct expander_device *ex = &dev->ex_dev; in sas_ex_phy_discover() local
411 if (0 <= single && single < ex->num_phys) { in sas_ex_phy_discover()
416 for (i = 0; i < ex->num_phys; i++) { in sas_ex_phy_discover()
431 struct expander_device *ex = &dev->ex_dev; in sas_expander_discover() local
434 ex->ex_phy = kzalloc(sizeof(*ex->ex_phy)*ex->num_phys, GFP_KERNEL); in sas_expander_discover()
435 if (!ex->ex_phy) in sas_expander_discover()
444 kfree(ex->ex_phy); in sas_expander_discover()
445 ex->ex_phy = NULL; in sas_expander_discover()
[all …]
Dsas_internal.h170 struct expander_device *ex = &dev->ex_dev; in sas_add_parent_port() local
171 struct ex_phy *ex_phy = &ex->ex_phy[phy_id]; in sas_add_parent_port()
173 if (!ex->parent_port) { in sas_add_parent_port()
174 ex->parent_port = sas_port_alloc(&dev->rphy->dev, phy_id); in sas_add_parent_port()
176 BUG_ON(!ex->parent_port); in sas_add_parent_port()
177 BUG_ON(sas_port_add(ex->parent_port)); in sas_add_parent_port()
178 sas_port_mark_backlink(ex->parent_port); in sas_add_parent_port()
180 sas_port_add_phy(ex->parent_port, ex_phy->phy); in sas_add_parent_port()
/drivers/net/ethernet/nvidia/
Dforcedeth.c370 struct ring_desc_ex *ex; member
1032 if (np->rx_ring.ex) in free_rings()
1034 np->rx_ring.ex, np->ring_addr); in free_rings()
1853 less_rx = np->get_rx.ex; in nv_alloc_rx_optimized()
1854 if (less_rx-- == np->first_rx.ex) in nv_alloc_rx_optimized()
1855 less_rx = np->last_rx.ex; in nv_alloc_rx_optimized()
1857 while (np->put_rx.ex != less_rx) { in nv_alloc_rx_optimized()
1871 np->put_rx.ex->bufhigh = cpu_to_le32(dma_high(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1872 np->put_rx.ex->buflow = cpu_to_le32(dma_low(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1874 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
[all …]
/drivers/infiniband/hw/qib/
Dqib_uc.c119 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_uc_req()
148 ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; in qib_make_uc_req()
177 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_uc_req()
202 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_uc_req()
381 wc.ex.imm_data = ohdr->u.imm_data; in qib_uc_rcv()
387 wc.ex.imm_data = 0; in qib_uc_rcv()
456 wc.ex.imm_data = ohdr->u.rc.imm_data; in qib_uc_rcv()
471 wc.ex.imm_data = ohdr->u.imm_data; in qib_uc_rcv()
Dqib_ud.c128 wc.ex.imm_data = swqe->wr.ex.imm_data; in qib_ud_loopback()
336 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; in qib_make_ud_req()
509 wc.ex.imm_data = ohdr->u.ud.imm_data; in qib_ud_rcv()
513 wc.ex.imm_data = 0; in qib_ud_rcv()
Dqib_ruc.c434 wc.ex.imm_data = wqe->wr.ex.imm_data; in qib_ruc_loopback()
448 wc.ex.imm_data = wqe->wr.ex.imm_data; in qib_ruc_loopback()
Dqib_cq.c86 wc->uqueue[head].ex.imm_data = in qib_cq_enter()
87 (__u32 __force)entry->ex.imm_data; in qib_cq_enter()
Dqib_rc.c355 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
395 ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
536 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
577 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
2005 wc.ex.imm_data = ohdr->u.imm_data; in qib_rc_rcv()
2013 wc.ex.imm_data = 0; in qib_rc_rcv()
2091 wc.ex.imm_data = ohdr->u.rc.imm_data; in qib_rc_rcv()
/drivers/infiniband/hw/ipath/
Dipath_uc.c116 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_uc_req()
145 ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; in ipath_make_uc_req()
174 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_uc_req()
199 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_uc_req()
382 wc.ex.imm_data = *(__be32 *) data; in ipath_uc_rcv()
386 wc.ex.imm_data = ohdr->u.imm_data; in ipath_uc_rcv()
486 wc.ex.imm_data = *(__be32 *) data; in ipath_uc_rcv()
490 wc.ex.imm_data = ohdr->u.imm_data; in ipath_uc_rcv()
Dipath_ud.c98 wc.ex.imm_data = swqe->wr.ex.imm_data; in ipath_ud_loopback()
345 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; in ipath_make_ud_req()
483 wc.ex.imm_data = *(__be32 *) data; in ipath_ud_rcv()
486 wc.ex.imm_data = ohdr->u.ud.imm_data; in ipath_ud_rcv()
490 wc.ex.imm_data = 0; in ipath_ud_rcv()
Dipath_ruc.c335 wc.ex.imm_data = wqe->wr.ex.imm_data; in ipath_ruc_loopback()
346 wc.ex.imm_data = wqe->wr.ex.imm_data; in ipath_ruc_loopback()
Dipath_cq.c86 wc->uqueue[head].ex.imm_data = (__u32 __force) entry->ex.imm_data; in ipath_cq_enter()
Dipath_rc.c331 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_rc_req()
371 ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; in ipath_make_rc_req()
515 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_rc_req()
551 ohdr->u.imm_data = wqe->wr.ex.imm_data; in ipath_make_rc_req()
1706 wc.ex.imm_data = *(__be32 *) data; in ipath_rc_rcv()
1710 wc.ex.imm_data = ohdr->u.imm_data; in ipath_rc_rcv()
/drivers/infiniband/hw/cxgb3/
Diwch_cq.c100 wc->ex.invalidate_rkey = CQE_WRID_STAG(cqe); in iwch_poll_cq_one()
Diwch_qp.c61 wqe->send.rem_stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_rdma_send()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; in build_rdma_write()
194 wqe->local_inv.stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_inv_stag()
/drivers/infiniband/hw/ehca/
Dehca_reqs.c214 wqe_p->immediate_data = be32_to_cpu(send_wr->ex.imm_data); in ehca_write_swqe()
788 wc->ex.imm_data = cpu_to_be32(cqe->immediate_data); in ehca_poll_cq_one()
851 wc->ex.imm_data = wqe->immediate_data; in generate_flush_cqes()
/drivers/infiniband/hw/mlx4/
Dcq.c758 wc->ex.imm_data = cqe->immed_rss_invalid; in mlx4_ib_poll_one()
763 wc->ex.invalidate_rkey = be32_to_cpu(cqe->immed_rss_invalid); in mlx4_ib_poll_one()
772 wc->ex.imm_data = cqe->immed_rss_invalid; in mlx4_ib_poll_one()
/drivers/staging/media/solo6x10/
Dsolo6x10-v4l2.c75 int sx, int sy, int ex, int ey, int scale) in solo_win_setup() argument
84 SOLO_VI_WIN_EX(ex) | in solo_win_setup()
/drivers/infiniband/core/
Duverbs_cmd.c1356 tmp.ex.imm_data = (__u32 __force) wc->ex.imm_data; in copy_wc_to_user()
2120 next->ex.imm_data = in ib_uverbs_post_send()
2121 (__be32 __force) user_wr->ex.imm_data; in ib_uverbs_post_send()
2130 next->ex.imm_data = in ib_uverbs_post_send()
2131 (__be32 __force) user_wr->ex.imm_data; in ib_uverbs_post_send()
2134 next->ex.invalidate_rkey = in ib_uverbs_post_send()
2135 user_wr->ex.invalidate_rkey; in ib_uverbs_post_send()
/drivers/infiniband/hw/ocrdma/
Docrdma_verbs.c1799 hdr->immdt = ntohl(wr->ex.imm_data); in ocrdma_post_send()
1807 hdr->lkey = wr->ex.invalidate_rkey; in ocrdma_post_send()
1812 hdr->immdt = ntohl(wr->ex.imm_data); in ocrdma_post_send()
1827 hdr->lkey = wr->ex.invalidate_rkey; in ocrdma_post_send()
2326 ibwc->ex.imm_data = htonl(le32_to_cpu(cqe->rq.lkey_immdt)); in ocrdma_poll_success_rcqe()
2330 ibwc->ex.imm_data = htonl(le32_to_cpu(cqe->rq.lkey_immdt)); in ocrdma_poll_success_rcqe()
2333 ibwc->ex.invalidate_rkey = le32_to_cpu(cqe->rq.lkey_immdt); in ocrdma_poll_success_rcqe()
/drivers/net/ethernet/freescale/
Dfec_main.c225 struct bufdesc_ex *ex = (struct bufdesc_ex *)bdp; in fec_enet_get_nextdesc() local
227 return (struct bufdesc *)(ex + 1); in fec_enet_get_nextdesc()
234 struct bufdesc_ex *ex = (struct bufdesc_ex *)bdp; in fec_enet_get_prevdesc() local
236 return (struct bufdesc *)(ex - 1); in fec_enet_get_prevdesc()
/drivers/infiniband/hw/mthca/
Dmthca_cq.c624 entry->ex.imm_data = cqe->imm_etype_pkey_eec; in mthca_poll_one()
630 entry->ex.imm_data = cqe->imm_etype_pkey_eec; in mthca_poll_one()
Dmthca_qp.c1509 sqp->ud_header.immediate_data = wr->ex.imm_data; in build_mlx_header()
1656 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_tavor_post_send()
1997 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_arbel_post_send()
/drivers/md/
Ddm-snap.c550 struct dm_exception *ex, *next; in dm_exception_table_exit() local
557 list_for_each_entry_safe (ex, next, slot, hash_list) in dm_exception_table_exit()
558 kmem_cache_free(mem, ex); in dm_exception_table_exit()

12