Home
last modified time | relevance | path

Searched refs:to_mdev (Results 1 – 25 of 29) sorted by relevance

12

/drivers/infiniband/hw/mthca/
Dmthca_provider.c66 struct mthca_dev *mdev = to_mdev(ibdev); in mthca_query_device()
151 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_port()
163 props->gid_tbl_len = to_mdev(ibdev)->limits.gid_table_len; in mthca_query_port()
165 props->pkey_tbl_len = to_mdev(ibdev)->limits.pkey_table_len; in mthca_query_port()
190 if (mutex_lock_interruptible(&to_mdev(ibdev)->cap_mask_mutex)) in mthca_modify_device()
193 mutex_unlock(&to_mdev(ibdev)->cap_mask_mutex); in mthca_modify_device()
207 if (mutex_lock_interruptible(&to_mdev(ibdev)->cap_mask_mutex)) in mthca_modify_port()
220 err = mthca_SET_IB(to_mdev(ibdev), &set_ib, port); in mthca_modify_port()
224 mutex_unlock(&to_mdev(ibdev)->cap_mask_mutex); in mthca_modify_port()
244 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_pkey()
[all …]
Dmthca_mad.c120 mthca_update_rate(to_mdev(ibdev), port_num); in smp_snoop()
121 update_sm_ah(to_mdev(ibdev), port_num, in smp_snoop()
155 mutex_lock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
157 mutex_unlock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
211 forward_trap(to_mdev(ibdev), port_num, in_mad); in mthca_process_mad()
251 err = mthca_MAD_IFC(to_mdev(ibdev), in mthca_process_mad()
258 mthca_err(to_mdev(ibdev), "MAD_IFC returned %d\n", err); in mthca_process_mad()
Dmthca_cq.c340 if (!mthca_is_memfree(to_mdev(cq->ibcq.device)) && in mthca_cq_resize_copy_cqes()
665 struct mthca_dev *dev = to_mdev(ibcq->device); in mthca_poll_cq()
738 mthca_write64(dbhi, 0xffffffff, to_mdev(cq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_tavor_arm_cq()
739 MTHCA_GET_DOORBELL_LOCK(&to_mdev(cq->device)->doorbell_lock)); in mthca_tavor_arm_cq()
770 to_mdev(ibcq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_arbel_arm_cq()
771 MTHCA_GET_DOORBELL_LOCK(&to_mdev(ibcq->device)->doorbell_lock)); in mthca_arbel_arm_cq()
Dmthca_srq.c367 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_modify_srq()
390 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_query_srq()
478 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_tavor_post_srq_recv()
578 struct mthca_dev *dev = to_mdev(ibsrq->device); in mthca_arbel_post_srq_recv()
Dmthca_mcg.c122 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_multicast_attach()
216 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_multicast_detach()
Dmthca_qp.c427 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_query_qp()
547 struct mthca_dev *dev = to_mdev(ibqp->device); in __mthca_modify_qp()
845 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_modify_qp()
1605 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_tavor_post_send()
1808 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_tavor_post_receive()
1919 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_arbel_post_send()
2159 struct mthca_dev *dev = to_mdev(ibqp->device); in mthca_arbel_post_receive()
Dmthca_av.c295 struct mthca_dev *dev = to_mdev(ibah->device); in mthca_ah_query()
Dmthca_mr.c721 struct mthca_dev *dev = to_mdev(ibfmr->device); in mthca_tavor_map_phys_fmr()
762 struct mthca_dev *dev = to_mdev(ibfmr->device); in mthca_arbel_map_phys_fmr()
/drivers/infiniband/hw/mlx4/
Dmr.c66 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr()
71 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
81 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
137 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_reg_user_mr()
177 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_reg_user_mr()
193 struct mlx4_ib_dev *dev = to_mdev(mr->device); in mlx4_ib_rereg_user_mr()
280 ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr); in mlx4_ib_dereg_mr()
292 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_alloc_mw()
348 mlx4_mw_free(to_mdev(ibmw->device)->dev, &mw->mmw); in mlx4_ib_dealloc_mw()
357 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_alloc_fast_reg_mr()
[all …]
Dmain.c137 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_query_device()
151 err = mlx4_MAD_IFC(to_mdev(ibdev), MLX4_MAD_IFC_IGNORE_KEYS, in mlx4_ib_query_device()
243 struct mlx4_dev *dev = to_mdev(device)->dev; in mlx4_ib_port_link_layer()
267 if (mlx4_is_mfunc(to_mdev(ibdev)->dev) && netw_view) in ib_link_query_port()
270 err = mlx4_MAD_IFC(to_mdev(ibdev), mad_ifc_flags, port, NULL, NULL, in ib_link_query_port()
286 props->gid_tbl_len = to_mdev(ibdev)->dev->caps.gid_table_len[port]; in ib_link_query_port()
287 props->max_msg_sz = to_mdev(ibdev)->dev->caps.max_msg_sz; in ib_link_query_port()
288 props->pkey_tbl_len = to_mdev(ibdev)->dev->caps.pkey_table_len[port]; in ib_link_query_port()
319 err = mlx4_MAD_IFC(to_mdev(ibdev), mad_ifc_flags, port, in ib_link_query_port()
348 struct mlx4_ib_dev *mdev = to_mdev(ibdev); in eth_link_query_port()
[all …]
Dcm.c146 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in id_map_find_by_sl_id()
194 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_find_del()
212 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in sl_id_map_add()
247 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_alloc()
258 ent->dev = to_mdev(ibdev); in id_map_alloc()
262 spin_lock(&to_mdev(ibdev)->sriov.id_map_lock); in id_map_alloc()
287 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_get()
303 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in schedule_delayed()
Dcq.c95 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_modify_cq()
173 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_create_cq()
360 struct mlx4_ib_dev *dev = to_mdev(ibcq->device); in mlx4_ib_resize_cq()
462 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_destroy_cq()
639 struct mlx4_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx4_ib_poll_one()
659 mqp = __mlx4_qp_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
677 msrq = mlx4_srq_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
793 if (mlx4_is_mfunc(to_mdev(cq->ibcq.device)->dev)) { in mlx4_ib_poll_one()
860 to_mdev(ibcq->device)->uar_map, in mlx4_ib_arm_cq()
861 MLX4_GET_DOORBELL_LOCK(&to_mdev(ibcq->device)->uar_lock)); in mlx4_ib_arm_cq()
Dsrq.c75 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_create_srq()
231 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_modify_srq()
256 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_query_srq()
274 struct mlx4_ib_dev *dev = to_mdev(srq->device); in mlx4_ib_destroy_srq()
Dmad.c233 struct mlx4_ib_dev *dev = to_mdev(ibdev); in smp_snoop()
353 spin_lock_irqsave(&to_mdev(dev)->sm_lock, flags); in node_desc_override()
355 spin_unlock_irqrestore(&to_mdev(dev)->sm_lock, flags); in node_desc_override()
409 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_find_real_gid()
613 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_demux_mad()
754 forward_trap(to_mdev(ibdev), port_num, in_mad); in ib_process_mad()
787 err = mlx4_MAD_IFC(to_mdev(ibdev), in ib_process_mad()
796 if (!(to_mdev(ibdev)->dev->caps.flags & MLX4_DEV_CAP_FLAG_PORT_MNG_CHG_EV)) in ib_process_mad()
799 if (!mlx4_is_slave(to_mdev(ibdev)->dev)) in ib_process_mad()
832 struct mlx4_ib_dev *dev = to_mdev(ibdev); in iboe_process_mad()
[all …]
Dah.c45 struct mlx4_dev *dev = to_mdev(pd->device)->dev; in create_ib_ah()
74 struct mlx4_ib_dev *ibdev = to_mdev(pd->device); in create_iboe_ah()
Dqp.c1095 if (!(to_mdev(pd->device)->dev->caps.flags & MLX4_DEV_CAP_FLAG_XRC)) in mlx4_ib_create_qp()
1110 err = create_qp_common(to_mdev(pd->device), pd, init_attr, in mlx4_ib_create_qp()
1127 err = create_qp_common(to_mdev(pd->device), pd, init_attr, udata, in mlx4_ib_create_qp()
1128 get_sqp_num(to_mdev(pd->device), init_attr), in mlx4_ib_create_qp()
1148 struct mlx4_ib_dev *dev = to_mdev(qp->device); in mlx4_ib_destroy_qp()
1419 struct mlx4_ib_dev *dev = to_mdev(ibqp->device); in __mlx4_ib_modify_qp()
1888 struct mlx4_ib_dev *dev = to_mdev(ibqp->device); in mlx4_ib_modify_qp()
1987 struct mlx4_ib_dev *mdev = to_mdev(sqp->qp.ibqp.device); in build_sriov_qp0_header()
2133 if (mlx4_is_mfunc(to_mdev(ib_dev)->dev)) { in build_mlx_header()
2137 err = mlx4_get_roce_gid_from_slave(to_mdev(ib_dev)->dev, in build_mlx_header()
[all …]
/drivers/infiniband/hw/mlx5/
Dmain.c157 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_query_device()
175 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, 1, NULL, NULL, in_mad, out_mad); in mlx5_ib_query_device()
257 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_query_port()
364 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, out_mad); in mlx5_ib_query_gid()
374 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, out_mad); in mlx5_ib_query_gid()
402 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, out_mad); in mlx5_ib_query_pkey()
421 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_modify_device()
450 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_modify_port()
474 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_alloc_ucontext()
607 struct mlx5_ib_dev *dev = to_mdev(ibcontext->device); in mlx5_ib_dealloc_ucontext()
[all …]
Dcq.c49 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_ib_cq_event()
173 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.device); in handle_responder()
406 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_poll_one()
578 to_mdev(ibcq->device)->mdev->priv.uuari.uars[0].map, in mlx5_ib_arm_cq()
579 MLX5_GET_DOORBELL_LOCK(&to_mdev(ibcq->device)->mdev->priv.cq_uar_lock)); in mlx5_ib_arm_cq()
742 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_create_cq()
830 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_destroy_cq()
917 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_modify_cq()
1011 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in copy_resize_cqes()
1067 struct mlx5_ib_dev *dev = to_mdev(ibcq->device); in mlx5_ib_resize_cq()
Dsrq.c80 struct mlx5_ib_dev *dev = to_mdev(pd->device); in create_srq_user()
240 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_create_srq()
349 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_modify_srq()
374 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_query_srq()
398 struct mlx5_ib_dev *dev = to_mdev(srq->device); in mlx5_ib_destroy_srq()
Dmr.c612 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_get_dma_mr()
676 struct mlx5_ib_dev *dev = to_mdev(pd->device); in prep_umr_reg_wqe()
735 struct mlx5_ib_dev *dev = to_mdev(pd->device); in reg_umr()
820 struct mlx5_ib_dev *dev = to_mdev(pd->device); in reg_create()
875 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_reg_user_mr()
973 struct mlx5_ib_dev *dev = to_mdev(ibmr->device); in mlx5_ib_dereg_mr()
1012 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_create_mr()
1097 struct mlx5_ib_dev *dev = to_mdev(ibmr->device); in mlx5_ib_destroy_mr()
1128 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_alloc_fast_reg_mr()
1202 struct mlx5_ib_dev *dev = to_mdev(page_list->device); in mlx5_ib_free_fast_reg_page_list()
Dmad.c94 err = mlx5_MAD_IFC(to_mdev(ibdev), in mlx5_ib_process_mad()
Dqp.c1162 dev = to_mdev(pd->device); in mlx5_ib_create_qp()
1171 dev = to_mdev(to_mxrcd(init_attr->xrcd)->ibxrcd.device); in mlx5_ib_create_qp()
1237 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_destroy_qp()
1495 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in __mlx5_ib_modify_qp()
1691 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_modify_qp()
2469 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_post_send()
2909 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in mlx5_ib_query_qp()
3002 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_alloc_xrcd()
3026 struct mlx5_ib_dev *dev = to_mdev(xrcd->device); in mlx5_ib_dealloc_xrcd()
Dmlx5_ib.h392 static inline struct mlx5_ib_dev *to_mdev(struct ib_device *ibdev) in to_mdev() function
/drivers/media/platform/s5p-tv/
Dmixer_drv.c348 struct mxr_device *mdev = to_mdev(dev); in mxr_runtime_resume()
389 struct mxr_device *mdev = to_mdev(dev); in mxr_runtime_suspend()
471 struct mxr_device *mdev = to_mdev(dev); in mxr_remove()
Dmixer.h271 static inline struct mxr_device *to_mdev(struct device *dev) in to_mdev() function

12