Home
last modified time | relevance | path

Searched refs:to_mdev (Results 1 – 25 of 32) sorted by relevance

12

/drivers/infiniband/hw/mthca/
Dmthca_provider.c66 struct mthca_dev *mdev = to_mdev(ibdev); in mthca_query_device()
155 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_port()
167 props->gid_tbl_len = to_mdev(ibdev)->limits.gid_table_len; in mthca_query_port()
169 props->pkey_tbl_len = to_mdev(ibdev)->limits.pkey_table_len; in mthca_query_port()
194 if (mutex_lock_interruptible(&to_mdev(ibdev)->cap_mask_mutex)) in mthca_modify_device()
198 mutex_unlock(&to_mdev(ibdev)->cap_mask_mutex); in mthca_modify_device()
212 if (mutex_lock_interruptible(&to_mdev(ibdev)->cap_mask_mutex)) in mthca_modify_port()
225 err = mthca_SET_IB(to_mdev(ibdev), &set_ib, port); in mthca_modify_port()
229 mutex_unlock(&to_mdev(ibdev)->cap_mask_mutex); in mthca_modify_port()
249 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_pkey()
[all …]
Dmthca_mad.c120 mthca_update_rate(to_mdev(ibdev), port_num); in smp_snoop()
121 update_sm_ah(to_mdev(ibdev), port_num, in smp_snoop()
155 mutex_lock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
158 mutex_unlock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
220 forward_trap(to_mdev(ibdev), port_num, in_mad); in mthca_process_mad()
260 err = mthca_MAD_IFC(to_mdev(ibdev), in mthca_process_mad()
267 mthca_err(to_mdev(ibdev), "MAD_IFC returned %d\n", err); in mthca_process_mad()
Dmthca_cq.c340 if (!mthca_is_memfree(to_mdev(cq->ibcq.device)) && in mthca_cq_resize_copy_cqes()
662 struct mthca_dev *dev = to_mdev(ibcq->device); in mthca_poll_cq()
735 mthca_write64(dbhi, 0xffffffff, to_mdev(cq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_tavor_arm_cq()
736 MTHCA_GET_DOORBELL_LOCK(&to_mdev(cq->device)->doorbell_lock)); in mthca_tavor_arm_cq()
767 to_mdev(ibcq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_arbel_arm_cq()
768 MTHCA_GET_DOORBELL_LOCK(&to_mdev(ibcq->device)->doorbell_lock)); in mthca_arbel_arm_cq()
Dmthca_srq.c367 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_modify_srq()
390 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_query_srq()
478 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_tavor_post_srq_recv()
578 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_arbel_post_srq_recv()
Dmthca_mcg.c122 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_multicast_attach()
216 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_multicast_detach()
Dmthca_qp.c427 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_query_qp()
547 struct mthca_dev *dev = to_mdev(ibqp->device); in __mthca_modify_qp()
845 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_modify_qp()
1605 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_tavor_post_send()
1808 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_tavor_post_receive()
1919 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_arbel_post_send()
2159 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_arbel_post_receive()
/drivers/infiniband/hw/mlx4/
Dmr.c67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr()
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
82 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
138 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_reg_user_mr()
178 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_reg_user_mr()
194 struct mlx4_ib_dev *dev = to_mdev(mr->device); in mlx4_ib_rereg_user_mr()
330 ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr); in mlx4_ib_dereg_mr()
343 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_alloc_mw()
377 mlx4_mw_free(to_mdev(ibmw->device)->dev, &mw->mmw); in mlx4_ib_dealloc_mw()
387 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_alloc_mr()
[all …]
Dmain.c131 struct mlx4_ib_dev *ibdev = to_mdev(device); in mlx4_ib_get_netdev()
255 struct mlx4_ib_dev *ibdev = to_mdev(device); in mlx4_ib_add_gid()
333 struct mlx4_ib_dev *ibdev = to_mdev(device); in mlx4_ib_del_gid()
430 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_query_device()
464 err = mlx4_MAD_IFC(to_mdev(ibdev), MLX4_MAD_IFC_IGNORE_KEYS, in mlx4_ib_query_device()
577 struct mlx4_dev *dev = to_mdev(device)->dev; in mlx4_ib_port_link_layer()
601 if (mlx4_is_mfunc(to_mdev(ibdev)->dev) && netw_view) in ib_link_query_port()
604 err = mlx4_MAD_IFC(to_mdev(ibdev), mad_ifc_flags, port, NULL, NULL, in ib_link_query_port()
620 props->gid_tbl_len = to_mdev(ibdev)->dev->caps.gid_table_len[port]; in ib_link_query_port()
621 props->max_msg_sz = to_mdev(ibdev)->dev->caps.max_msg_sz; in ib_link_query_port()
[all …]
Dcm.c146 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in id_map_find_by_sl_id()
194 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_find_del()
212 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in sl_id_map_add()
247 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_alloc()
258 ent->dev = to_mdev(ibdev); in id_map_alloc()
262 spin_lock(&to_mdev(ibdev)->sriov.id_map_lock); in id_map_alloc()
287 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_get()
303 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in schedule_delayed()
Dcq.c95 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_modify_cq()
177 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_create_cq()
377 struct mlx4_ib_dev *dev = to_mdev(ibcq->device); in mlx4_ib_resize_cq()
478 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_destroy_cq()
696 struct mlx4_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx4_ib_poll_one()
716 mqp = __mlx4_qp_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
728 msrq = mlx4_srq_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
836 if (mlx4_is_mfunc(to_mdev(cq->ibcq.device)->dev)) { in mlx4_ib_poll_one()
880 struct mlx4_ib_dev *mdev = to_mdev(cq->ibcq.device); in mlx4_ib_poll_cq()
906 to_mdev(ibcq->device)->uar_map, in mlx4_ib_arm_cq()
[all …]
Dsrq.c76 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_create_srq()
237 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_modify_srq()
262 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_query_srq()
280 struct mlx4_ib_dev *dev = to_mdev(srq->device); in mlx4_ib_destroy_srq()
325 struct mlx4_ib_dev *mdev = to_mdev(ibsrq->device); in mlx4_ib_post_srq_recv()
Dmad.c227 struct mlx4_ib_dev *dev = to_mdev(ibdev); in smp_snoop()
373 spin_lock_irqsave(&to_mdev(dev)->sm_lock, flags); in node_desc_override()
376 spin_unlock_irqrestore(&to_mdev(dev)->sm_lock, flags); in node_desc_override()
431 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_find_real_gid()
642 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_demux_mad()
801 forward_trap(to_mdev(ibdev), port_num, in_mad); in ib_process_mad()
834 err = mlx4_MAD_IFC(to_mdev(ibdev), in ib_process_mad()
845 if (!mlx4_is_slave(to_mdev(ibdev)->dev)) in ib_process_mad()
909 struct mlx4_ib_dev *dev = to_mdev(ibdev); in iboe_process_mad()
958 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_process_mad()
[all …]
Dqp.c1177 if (!(to_mdev(pd->device)->dev->caps.flags & MLX4_DEV_CAP_FLAG_XRC)) in _mlx4_ib_create_qp()
1192 err = create_qp_common(to_mdev(pd->device), pd, init_attr, in _mlx4_ib_create_qp()
1213 int res = mlx4_qp_reserve_range(to_mdev(pd->device)->dev, 1, 1, &sqpn, 0); in _mlx4_ib_create_qp()
1218 sqpn = get_sqp_num(to_mdev(pd->device), init_attr); in _mlx4_ib_create_qp()
1221 err = create_qp_common(to_mdev(pd->device), pd, init_attr, udata, in _mlx4_ib_create_qp()
1245 struct mlx4_ib_dev *dev = to_mdev(device); in mlx4_ib_create_qp()
1276 struct mlx4_ib_dev *dev = to_mdev(qp->device); in _mlx4_ib_destroy_qp()
1622 struct mlx4_ib_dev *dev = to_mdev(ibqp->device); in __mlx4_ib_modify_qp()
1765 u8 port_num = mlx4_is_bonded(to_mdev(ibqp->device)->dev) ? 1 : in __mlx4_ib_modify_qp()
2161 struct mlx4_ib_dev *dev = to_mdev(ibqp->device); in _mlx4_ib_modify_qp()
[all …]
Dah.c46 struct mlx4_dev *dev = to_mdev(pd->device)->dev; in create_ib_ah()
75 struct mlx4_ib_dev *ibdev = to_mdev(pd->device); in create_iboe_ah()
/drivers/infiniband/hw/mlx5/
Dib_virt.c54 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_get_vf_config()
96 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_set_vf_link_state()
127 dev = to_mdev(device); in mlx5_ib_get_vf_stats()
151 struct mlx5_ib_dev *dev = to_mdev(device); in set_vf_node_guid()
169 struct mlx5_ib_dev *dev = to_mdev(device); in set_vf_port_guid()
Dmad.c96 err = mlx5_MAD_IFC(to_mdev(ibdev), in process_mad()
189 struct mlx5_ib_dev *dev = to_mdev(ibdev); in process_pma_cmd()
243 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_process_mad()
306 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, 1, NULL, NULL, in_mad, in mlx5_query_mad_ifc_smp_attr_node_info()
445 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_pkey()
474 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_gids()
485 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_gids()
501 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_mad_ifc_port()
Dgsi.c118 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_gsi_create_qp()
215 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_gsi_destroy_qp()
275 struct mlx5_ib_dev *dev = to_mdev(qp->device); in modify_to_rts()
315 struct mlx5_ib_dev *dev = to_mdev(device); in setup_qp()
376 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_gsi_modify_qp()
417 struct mlx5_ib_dev *dev = to_mdev(gsi->rx_qp->device); in mlx5_ib_add_outstanding_wr()
468 struct mlx5_ib_dev *dev = to_mdev(gsi->rx_qp->device); in get_tx_qp()
Dsrq.c79 struct mlx5_ib_dev *dev = to_mdev(pd->device); in create_srq_user()
244 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_create_srq()
351 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_modify_srq()
376 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_query_srq()
400 struct mlx5_ib_dev *dev = to_mdev(srq->device); in mlx5_ib_destroy_srq()
436 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_post_srq_recv()
Dmain.c95 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_port_link_layer()
151 struct mlx5_ib_dev *ibdev = to_mdev(device); in mlx5_ib_get_netdev()
172 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_query_port_roce()
278 struct mlx5_ib_dev *dev = to_mdev(device); in set_roce_addr()
364 if (mlx5_use_mad_ifc(to_mdev(ibdev))) in mlx5_get_vport_access_method()
399 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_system_image_guid()
431 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_max_pkeys()
452 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_vendor_id()
517 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_query_device()
716 struct mlx5_ib_dev *dev = to_mdev(ibdev); in translate_active_width()
[all …]
Dcq.c49 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_ib_cq_event()
171 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.device); in handle_responder()
520 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_poll_one()
650 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in poll_soft_wc()
673 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_ib_poll_cq()
703 struct mlx5_core_dev *mdev = to_mdev(ibcq->device)->mdev; in mlx5_ib_arm_cq()
907 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_create_cq()
1015 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_destroy_cq()
1101 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_modify_cq()
1189 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in copy_resize_cqes()
[all …]
Dodp.c158 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.pd->device); in mlx5_ib_page_fault_resume()
185 struct mlx5_ib_dev *mib_dev = to_mdev(qp->ibqp.pd->device); in pagefault_single_data_segment()
389 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.pd->device); in mlx5_ib_mr_initiator_pfault_handler()
495 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.pd->device); in mlx5_ib_mr_responder_pfault_handler()
535 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.pd->device); in mlx5_ib_mr_wqe_pfault_handler()
Dmr.c698 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_get_dma_mr()
800 struct mlx5_ib_dev *dev = to_mdev(pd->device); in prep_umr_wqe_common()
852 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mr_umem_get()
893 struct mlx5_ib_dev *dev = to_mdev(pd->device); in reg_umr()
1099 struct mlx5_ib_dev *dev = to_mdev(pd->device); in reg_create()
1185 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_reg_user_mr()
1285 struct mlx5_ib_dev *dev = to_mdev(pd->device); in rereg_umr()
1353 struct mlx5_ib_dev *dev = to_mdev(ib_mr->device); in mlx5_ib_rereg_user_mr()
1490 struct mlx5_ib_dev *dev = to_mdev(mr->ibmr.device); in clean_mr()
1533 struct mlx5_ib_dev *dev = to_mdev(ibmr->device); in mlx5_ib_dereg_mr()
[all …]
Dqp.c140 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_read_user_wqe()
2028 dev = to_mdev(pd->device); in mlx5_ib_create_qp()
2047 dev = to_mdev(to_mxrcd(init_attr->xrcd)->ibxrcd.device); in mlx5_ib_create_qp()
2116 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_destroy_qp()
2615 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in __mlx5_ib_modify_qp()
2876 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_modify_qp()
3693 mlx5_ib_warn(to_mdev(qp->ibqp.device), in set_reg_wr()
3820 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_post_send()
4164 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_post_recv()
4501 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_query_qp()
[all …]
Dah.c70 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_create_ah()
/drivers/staging/most/hdm-usb/
Dhdm_usb.c135 #define to_mdev(d) container_of(d, struct most_dev, iface) macro
260 struct most_dev *mdev = to_mdev(iface); in hdm_poison_channel()
373 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_write_completion()
527 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_read_completion()
608 mdev = to_mdev(iface); in hdm_enqueue()
679 struct most_dev *mdev = to_mdev(iface); in hdm_configure_channel()
804 mdev = to_mdev(iface); in hdm_request_netinfo()

12