Home
last modified time | relevance | path

Searched refs:rhp (Results 1 – 20 of 20) sorted by relevance

/drivers/infiniband/hw/cxgb4/
Dmem.c224 return insert_handle(mhp->rhp, &mhp->rhp->mmidr, mhp, mmid); in finish_mem_reg()
227 static int register_mem(struct c4iw_dev *rhp, struct c4iw_pd *php, in register_mem() argument
233 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, mhp->attr.pdid, in register_mem()
243 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, in register_mem()
248 static int reregister_mem(struct c4iw_dev *rhp, struct c4iw_pd *php, in reregister_mem() argument
258 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, mhp->attr.pdid, in reregister_mem()
268 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, in reregister_mem()
276 mhp->attr.pbl_addr = c4iw_pblpool_alloc(&mhp->rhp->rdev, in alloc_pbl()
362 struct c4iw_dev *rhp; in c4iw_reregister_phys_mem() local
376 rhp = mhp->rhp; in c4iw_reregister_phys_mem()
[all …]
Dprovider.c92 struct c4iw_dev *rhp = to_c4iw_dev(context->device); in c4iw_dealloc_ucontext() local
99 c4iw_release_dev_ucontext(&rhp->rdev, &ucontext->uctx); in c4iw_dealloc_ucontext()
108 struct c4iw_dev *rhp = to_c4iw_dev(ibdev); in c4iw_alloc_ucontext() local
114 c4iw_init_dev_ucontext(&rhp->rdev, &context->uctx); in c4iw_alloc_ucontext()
185 struct c4iw_dev *rhp; in c4iw_deallocate_pd() local
189 rhp = php->rhp; in c4iw_deallocate_pd()
191 c4iw_put_resource(&rhp->rdev.resource.pdid_fifo, php->pdid, in c4iw_deallocate_pd()
192 &rhp->rdev.resource.pdid_fifo_lock); in c4iw_deallocate_pd()
203 struct c4iw_dev *rhp; in c4iw_allocate_pd() local
206 rhp = (struct c4iw_dev *) ibdev; in c4iw_allocate_pd()
[all …]
Diw_cxgb4.h199 static inline struct c4iw_cq *get_chp(struct c4iw_dev *rhp, u32 cqid) in get_chp() argument
201 return idr_find(&rhp->cqidr, cqid); in get_chp()
204 static inline struct c4iw_qp *get_qhp(struct c4iw_dev *rhp, u32 qpid) in get_qhp() argument
206 return idr_find(&rhp->qpidr, qpid); in get_qhp()
209 static inline struct c4iw_mr *get_mhp(struct c4iw_dev *rhp, u32 mmid) in get_mhp() argument
211 return idr_find(&rhp->mmidr, mmid); in get_mhp()
214 static inline int insert_handle(struct c4iw_dev *rhp, struct idr *idr, in insert_handle() argument
223 spin_lock_irq(&rhp->lock); in insert_handle()
226 spin_unlock_irq(&rhp->lock); in insert_handle()
232 static inline void remove_handle(struct c4iw_dev *rhp, struct idr *idr, u32 id) in remove_handle() argument
[all …]
Dqp.c928 c4iw_ofld_send(&qhp->rhp->rdev, skb); in post_terminate()
977 rchp = get_chp(qhp->rhp, qhp->attr.rcq); in flush_qp()
978 schp = get_chp(qhp->rhp, qhp->attr.scq); in flush_qp()
998 static int rdma_fini(struct c4iw_dev *rhp, struct c4iw_qp *qhp, in rdma_fini() argument
1024 ret = c4iw_ofld_send(&rhp->rdev, skb); in rdma_fini()
1028 ret = c4iw_wait_for_reply(&rhp->rdev, &ep->com.wr_wait, qhp->ep->hwtid, in rdma_fini()
1060 static int rdma_init(struct c4iw_dev *rhp, struct c4iw_qp *qhp) in rdma_init() argument
1116 rhp->rdev.lldi.vr->rq.start); in rdma_init()
1120 ret = c4iw_ofld_send(&rhp->rdev, skb); in rdma_init()
1124 ret = c4iw_wait_for_reply(&rhp->rdev, &qhp->ep->com.wr_wait, in rdma_init()
[all …]
Dcq.c580 qhp = get_qhp(chp->rhp, CQE_QPID(rd_cqe)); in c4iw_poll_cq_one()
738 remove_handle(chp->rhp, &chp->rhp->cqidr, chp->cq.cqid); in c4iw_destroy_cq()
744 destroy_cq(&chp->rhp->rdev, &chp->cq, in c4iw_destroy_cq()
754 struct c4iw_dev *rhp; in c4iw_create_cq() local
764 rhp = to_c4iw_dev(ibdev); in c4iw_create_cq()
812 ret = create_cq(&rhp->rdev, &chp->cq, in c4iw_create_cq()
813 ucontext ? &ucontext->uctx : &rhp->rdev.uctx); in c4iw_create_cq()
817 chp->rhp = rhp; in c4iw_create_cq()
824 ret = insert_handle(rhp, &rhp->cqidr, chp, chp->cq.cqid); in c4iw_create_cq()
836 uresp.qid_mask = rhp->rdev.cqmask; in c4iw_create_cq()
[all …]
Dcm.c1134 err = c4iw_modify_qp(ep->com.qp->rhp, in process_mpa_reply()
1148 err = c4iw_modify_qp(ep->com.qp->rhp, ep->com.qp, in process_mpa_reply()
1166 err = c4iw_modify_qp(ep->com.qp->rhp, ep->com.qp, in process_mpa_reply()
1768 ret = c4iw_modify_qp(ep->com.qp->rhp, ep->com.qp, in peer_close()
1786 c4iw_modify_qp(ep->com.qp->rhp, ep->com.qp, in peer_close()
1950 ret = c4iw_modify_qp(ep->com.qp->rhp, in peer_abort()
2031 c4iw_modify_qp(ep->com.qp->rhp, in close_con_rpl()
2068 c4iw_modify_qp(ep->com.qp->rhp, ep->com.qp, in terminate()
2203 err = c4iw_modify_qp(ep->com.qp->rhp, in c4iw_accept_cr()
2522 c4iw_modify_qp(ep->com.qp->rhp, in process_timeout()
Dev.c63 c4iw_modify_qp(qhp->rhp, qhp, C4IW_QP_ATTR_NEXT_STATE, in post_qp_event()
/drivers/infiniband/hw/cxgb3/
Diwch.h127 static inline int t3b_device(const struct iwch_dev *rhp) in t3b_device() argument
129 return rhp->rdev.t3cdev_p->type == T3B; in t3b_device()
132 static inline int t3a_device(const struct iwch_dev *rhp) in t3a_device() argument
134 return rhp->rdev.t3cdev_p->type == T3A; in t3a_device()
137 static inline struct iwch_cq *get_chp(struct iwch_dev *rhp, u32 cqid) in get_chp() argument
139 return idr_find(&rhp->cqidr, cqid); in get_chp()
142 static inline struct iwch_qp *get_qhp(struct iwch_dev *rhp, u32 qpid) in get_qhp() argument
144 return idr_find(&rhp->qpidr, qpid); in get_qhp()
147 static inline struct iwch_mr *get_mhp(struct iwch_dev *rhp, u32 mmid) in get_mhp() argument
149 return idr_find(&rhp->mmidr, mmid); in get_mhp()
[all …]
Diwch_provider.c97 struct iwch_dev *rhp = to_iwch_dev(context->device); in iwch_dealloc_ucontext() local
104 cxio_release_ucontext(&rhp->rdev, &ucontext->uctx); in iwch_dealloc_ucontext()
113 struct iwch_dev *rhp = to_iwch_dev(ibdev); in iwch_alloc_ucontext() local
119 cxio_init_ucontext(&rhp->rdev, &context->uctx); in iwch_alloc_ucontext()
132 remove_handle(chp->rhp, &chp->rhp->cqidr, chp->cq.cqid); in iwch_destroy_cq()
136 cxio_destroy_cq(&chp->rhp->rdev, &chp->cq); in iwch_destroy_cq()
145 struct iwch_dev *rhp; in iwch_create_cq() local
154 rhp = to_iwch_dev(ibdev); in iwch_create_cq()
161 if (!t3a_device(rhp)) { in iwch_create_cq()
170 if (t3a_device(rhp)) { in iwch_create_cq()
[all …]
Diwch_mem.c52 return insert_handle(mhp->rhp, &mhp->rhp->mmidr, mhp, mmid); in iwch_finish_mem_reg()
55 int iwch_register_mem(struct iwch_dev *rhp, struct iwch_pd *php, in iwch_register_mem() argument
61 if (cxio_register_phys_mem(&rhp->rdev, in iwch_register_mem()
73 cxio_dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, in iwch_register_mem()
78 int iwch_reregister_mem(struct iwch_dev *rhp, struct iwch_pd *php, in iwch_reregister_mem() argument
91 if (cxio_reregister_phys_mem(&rhp->rdev, in iwch_reregister_mem()
103 cxio_dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, in iwch_reregister_mem()
111 mhp->attr.pbl_addr = cxio_hal_pblpool_alloc(&mhp->rhp->rdev, in iwch_alloc_pbl()
124 cxio_hal_pblpool_free(&mhp->rhp->rdev, mhp->attr.pbl_addr, in iwch_free_pbl()
130 return cxio_write_pbl(&mhp->rhp->rdev, pages, in iwch_write_pbl()
Diwch_cq.c44 static int iwch_poll_cq_one(struct iwch_dev *rhp, struct iwch_cq *chp, in iwch_poll_cq_one() argument
60 qhp = get_qhp(rhp, CQE_QPID(*rd_cqe)); in iwch_poll_cq_one()
69 if (t3a_device(chp->rhp) && credit) { in iwch_poll_cq_one()
72 cxio_hal_cq_op(&rhp->rdev, &chp->cq, CQ_CREDIT_UPDATE, credit); in iwch_poll_cq_one()
197 struct iwch_dev *rhp; in iwch_poll_cq() local
204 rhp = chp->rhp; in iwch_poll_cq()
218 err = iwch_poll_cq_one(rhp, chp, wc + npolled); in iwch_poll_cq()
Diwch_provider.h47 struct iwch_dev *rhp; member
77 struct iwch_dev *rhp; member
91 struct iwch_dev *rhp; member
103 struct iwch_dev *rhp; member
161 struct iwch_dev *rhp; member
258 int iwch_modify_qp(struct iwch_dev *rhp,
340 int iwch_register_mem(struct iwch_dev *rhp, struct iwch_pd *php,
342 int iwch_reregister_mem(struct iwch_dev *rhp, struct iwch_pd *php,
Diwch_qp.c200 static int iwch_sgl2pbl_map(struct iwch_dev *rhp, struct ib_sge *sg_list, in iwch_sgl2pbl_map() argument
208 mhp = get_mhp(rhp, (sg_list[i].lkey) >> 8); in iwch_sgl2pbl_map()
240 rhp->rdev.rnic_info.pbl_base) >> 3) + in iwch_sgl2pbl_map()
254 err = iwch_sgl2pbl_map(qhp->rhp, wr->sg_list, wr->num_sge, pbl_addr, in build_rdma_recv()
301 pbl_addr = cxio_hal_pblpool_alloc(&qhp->rhp->rdev, T3_STAG0_PBL_SIZE); in build_zero_stag_recv()
308 pbl_offset = (pbl_addr - qhp->rhp->rdev.rnic_info.pbl_base) >> 3; in build_zero_stag_recv()
532 struct iwch_dev *rhp; in iwch_bind_mw() local
548 rhp = qhp->rhp; in iwch_bind_mw()
582 err = iwch_sgl2pbl_map(rhp, &sgl, 1, &pbl_addr, &page_size); in iwch_bind_mw()
767 return iwch_cxgb3_ofld_send(ep->com.qp->rhp->rdev.t3cdev_p, skb); in iwch_post_zb_read()
[all …]
Diwch_cm.c921 err = iwch_modify_qp(ep->com.qp->rhp, in process_mpa_reply()
1480 iwch_modify_qp(ep->com.qp->rhp, ep->com.qp, in peer_close()
1495 iwch_modify_qp(ep->com.qp->rhp, ep->com.qp, in peer_close()
1590 ret = iwch_modify_qp(ep->com.qp->rhp, in peer_abort()
1657 iwch_modify_qp(ep->com.qp->rhp, in close_con_rpl()
1722 iwch_modify_qp(ep->com.qp->rhp, in ec_status()
1752 iwch_modify_qp(ep->com.qp->rhp, in ep_timeout()
1809 if ((conn_param->ord > qp->rhp->attr.max_rdma_read_qp_depth) || in iwch_accept_cr()
1810 (conn_param->ird > qp->rhp->attr.max_rdma_reads_per_qp)) { in iwch_accept_cr()
1842 err = iwch_modify_qp(ep->com.qp->rhp, in iwch_accept_cr()
Diwch_ev.c82 iwch_modify_qp(qhp->rhp, qhp, IWCH_QP_ATTR_NEXT_STATE, in post_qp_event()
Diwch.c79 ring_doorbell(qhp->rhp->rdev.ctrl_qp.doorbell, qhp->wq.qpid); in enable_qp_db()
/drivers/media/video/pvrusb2/
Dpvrusb2-v4l2.c55 struct pvr2_ioread *rhp; member
976 if (fhp->rhp) { in pvr2_v4l2_release()
979 sp = pvr2_ioread_get_stream(fhp->rhp); in pvr2_v4l2_release()
981 pvr2_ioread_destroy(fhp->rhp); in pvr2_v4l2_release()
982 fhp->rhp = NULL; in pvr2_v4l2_release()
1124 if (fh->rhp) return 0; in pvr2_v4l2_iosetup()
1140 fh->rhp = pvr2_channel_create_mpeg_stream(fh->pdi->stream); in pvr2_v4l2_iosetup()
1141 if (!fh->rhp) { in pvr2_v4l2_iosetup()
1151 return pvr2_ioread_set_enabled(fh->rhp,!0); in pvr2_v4l2_iosetup()
1194 if (!fh->rhp) { in pvr2_v4l2_read()
[all …]
/drivers/staging/cxt1e1/
Dmusycc.h55 VINT32 rhp[32]; /* Receive Head Pointer [5-29] */ member
Dpmcc4_drv.c1424 pi->regram->rhp[gchan] = tmp; in c4_chan_up()
Dmusycc.c1689 pi->regram->rhp[gchan] = 0; in musycc_chan_down()