Home
last modified time | relevance | path

Searched refs:ibmw (Results 1 – 20 of 20) sorted by relevance

/drivers/infiniband/sw/rxe/
Drxe_mw.c16 int rxe_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata) in rxe_alloc_mw() argument
18 struct rxe_mw *mw = to_rmw(ibmw); in rxe_alloc_mw()
19 struct rxe_pd *pd = to_rpd(ibmw->pd); in rxe_alloc_mw()
20 struct rxe_dev *rxe = to_rdev(ibmw->device); in rxe_alloc_mw()
31 mw->rkey = ibmw->rkey = (mw->elem.index << 8) | rxe_get_next_key(-1); in rxe_alloc_mw()
32 mw->state = (mw->ibmw.type == IB_MW_TYPE_2) ? in rxe_alloc_mw()
41 int rxe_dealloc_mw(struct ib_mw *ibmw) in rxe_dealloc_mw() argument
43 struct rxe_mw *mw = to_rmw(ibmw); in rxe_dealloc_mw()
53 if (mw->ibmw.type == IB_MW_TYPE_1) { in rxe_check_bind_mw()
67 if (mw->ibmw.type == IB_MW_TYPE_2) { in rxe_check_bind_mw()
[all …]
Drxe_verbs.h334 struct ib_mw ibmw; member
455 return mw ? container_of(mw, struct rxe_mw, ibmw) : NULL; in to_rmw()
470 return to_rpd(mw->ibmw.pd); in rxe_mw_pd()
Drxe_loc.h86 int rxe_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata);
87 int rxe_dealloc_mw(struct ib_mw *ibmw);
Drxe.h57 #define rxe_dbg_mw(mw, fmt, ...) ibdev_dbg((mw)->ibmw.device, \
Drxe_verbs.c1072 INIT_RDMA_OBJ_SIZE(ib_mw, rxe_mw, ibmw),
/drivers/infiniband/hw/mlx4/
Dmr.c613 int mlx4_ib_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata) in mlx4_ib_alloc_mw() argument
615 struct mlx4_ib_dev *dev = to_mdev(ibmw->device); in mlx4_ib_alloc_mw()
616 struct mlx4_ib_mw *mw = to_mmw(ibmw); in mlx4_ib_alloc_mw()
619 err = mlx4_mw_alloc(dev->dev, to_mpd(ibmw->pd)->pdn, in mlx4_ib_alloc_mw()
620 to_mlx4_type(ibmw->type), &mw->mmw); in mlx4_ib_alloc_mw()
628 ibmw->rkey = mw->mmw.key; in mlx4_ib_alloc_mw()
636 int mlx4_ib_dealloc_mw(struct ib_mw *ibmw) in mlx4_ib_dealloc_mw() argument
638 struct mlx4_ib_mw *mw = to_mmw(ibmw); in mlx4_ib_dealloc_mw()
640 mlx4_mw_free(to_mdev(ibmw->device)->dev, &mw->mmw); in mlx4_ib_dealloc_mw()
Dmlx4_ib.h145 struct ib_mw ibmw; member
703 static inline struct mlx4_ib_mw *to_mmw(struct ib_mw *ibmw) in to_mmw() argument
705 return container_of(ibmw, struct mlx4_ib_mw, ibmw); in to_mmw()
Dmain.c2602 INIT_RDMA_OBJ_SIZE(ib_mw, mlx4_ib_mw, ibmw),
/drivers/infiniband/hw/hns/
Dhns_roce_mr.c528 int hns_roce_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata) in hns_roce_alloc_mw() argument
530 struct hns_roce_dev *hr_dev = to_hr_dev(ibmw->device); in hns_roce_alloc_mw()
533 struct hns_roce_mw *mw = to_hr_mw(ibmw); in hns_roce_alloc_mw()
547 ibmw->rkey = mw->rkey; in hns_roce_alloc_mw()
548 mw->pdn = to_hr_pd(ibmw->pd)->pdn; in hns_roce_alloc_mw()
564 int hns_roce_dealloc_mw(struct ib_mw *ibmw) in hns_roce_dealloc_mw() argument
566 struct hns_roce_dev *hr_dev = to_hr_dev(ibmw->device); in hns_roce_dealloc_mw()
567 struct hns_roce_mw *mw = to_hr_mw(ibmw); in hns_roce_dealloc_mw()
Dhns_roce_device.h299 struct ib_mw ibmw; member
999 static inline struct hns_roce_mw *to_hr_mw(struct ib_mw *ibmw) in to_hr_mw() argument
1001 return container_of(ibmw, struct hns_roce_mw, ibmw); in to_hr_mw()
1164 int hns_roce_dealloc_mw(struct ib_mw *ibmw);
Dhns_roce_main.c564 INIT_RDMA_OBJ_SIZE(ib_mw, hns_roce_mw, ibmw),
Dhns_roce_hw_v2.c3474 mw->ibmw.type == IB_MW_TYPE_1 ? 0 : 1); in hns_roce_v2_mw_write_mtpt()
/drivers/infiniband/hw/irdma/
Dmain.h388 static inline struct irdma_mr *to_iwmw(struct ib_mw *ibmw) in to_iwmw() argument
390 return container_of(ibmw, struct irdma_mr, ibmw); in to_iwmw()
Dverbs.h99 struct ib_mw ibmw; member
Dverbs.c2486 if (iwmr->ibmw.type == IB_MW_TYPE_1) in irdma_hw_alloc_mw()
2508 static int irdma_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata) in irdma_alloc_mw() argument
2510 struct irdma_device *iwdev = to_iwdev(ibmw->device); in irdma_alloc_mw()
2511 struct irdma_mr *iwmr = to_iwmw(ibmw); in irdma_alloc_mw()
2520 ibmw->rkey = stag; in irdma_alloc_mw()
2535 static int irdma_dealloc_mw(struct ib_mw *ibmw) in irdma_dealloc_mw() argument
2537 struct ib_pd *ibpd = ibmw->pd; in irdma_dealloc_mw()
2539 struct irdma_mr *iwmr = to_iwmr((struct ib_mr *)ibmw); in irdma_dealloc_mw()
2540 struct irdma_device *iwdev = to_iwdev(ibmw->device); in irdma_dealloc_mw()
2553 info->stag_idx = ibmw->rkey >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_dealloc_mw()
[all …]
/drivers/infiniband/hw/cxgb4/
Diw_cxgb4.h409 struct ib_mw ibmw; member
417 static inline struct c4iw_mw *to_c4iw_mw(struct ib_mw *ibmw) in to_c4iw_mw() argument
419 return container_of(ibmw, struct c4iw_mw, ibmw); in to_c4iw_mw()
Dprovider.c505 INIT_RDMA_OBJ_SIZE(ib_mw, c4iw_mw, ibmw),
/drivers/infiniband/hw/mlx5/
Dmlx5_ib.h711 struct ib_mw ibmw; member
1200 static inline struct mlx5_ib_mw *to_mmw(struct ib_mw *ibmw) in to_mmw() argument
1202 return container_of(ibmw, struct mlx5_ib_mw, ibmw); in to_mmw()
Dmr.c1902 int mlx5_ib_alloc_mw(struct ib_mw *ibmw, struct ib_udata *udata) in mlx5_ib_alloc_mw() argument
1904 struct mlx5_ib_dev *dev = to_mdev(ibmw->device); in mlx5_ib_alloc_mw()
1906 struct mlx5_ib_mw *mw = to_mmw(ibmw); in mlx5_ib_alloc_mw()
1941 MLX5_SET(mkc, mkc, pd, to_mpd(ibmw->pd)->pdn); in mlx5_ib_alloc_mw()
1945 MLX5_SET(mkc, mkc, en_rinval, !!((ibmw->type == IB_MW_TYPE_2))); in mlx5_ib_alloc_mw()
1953 ibmw->rkey = mw->mmkey.key; in mlx5_ib_alloc_mw()
Dmain.c3772 INIT_RDMA_OBJ_SIZE(ib_mw, mlx5_ib_mw, ibmw),