Home
last modified time | relevance | path

Searched refs:mhp (Results 1 – 6 of 6) sorted by relevance

/drivers/infiniband/hw/cxgb4/
Dmem.c353 static int finish_mem_reg(struct c4iw_mr *mhp, u32 stag) in finish_mem_reg() argument
357 mhp->attr.state = 1; in finish_mem_reg()
358 mhp->attr.stag = stag; in finish_mem_reg()
360 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in finish_mem_reg()
361 PDBG("%s mmid 0x%x mhp %p\n", __func__, mmid, mhp); in finish_mem_reg()
362 return insert_handle(mhp->rhp, &mhp->rhp->mmidr, mhp, mmid); in finish_mem_reg()
366 struct c4iw_mr *mhp, int shift) in register_mem() argument
371 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, mhp->attr.pdid, in register_mem()
372 FW_RI_STAG_NSMR, mhp->attr.perms, in register_mem()
373 mhp->attr.mw_bind_enable, mhp->attr.zbva, in register_mem()
[all …]
/drivers/infiniband/hw/cxgb3/
Diwch_mem.c43 static int iwch_finish_mem_reg(struct iwch_mr *mhp, u32 stag) in iwch_finish_mem_reg() argument
47 mhp->attr.state = 1; in iwch_finish_mem_reg()
48 mhp->attr.stag = stag; in iwch_finish_mem_reg()
50 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in iwch_finish_mem_reg()
51 PDBG("%s mmid 0x%x mhp %p\n", __func__, mmid, mhp); in iwch_finish_mem_reg()
52 return insert_handle(mhp->rhp, &mhp->rhp->mmidr, mhp, mmid); in iwch_finish_mem_reg()
56 struct iwch_mr *mhp, int shift) in iwch_register_mem() argument
62 &stag, mhp->attr.pdid, in iwch_register_mem()
63 mhp->attr.perms, in iwch_register_mem()
64 mhp->attr.zbva, in iwch_register_mem()
[all …]
Diwch_provider.c448 struct iwch_mr *mhp; in iwch_dereg_mr() local
456 mhp = to_iwch_mr(ib_mr); in iwch_dereg_mr()
457 rhp = mhp->rhp; in iwch_dereg_mr()
458 mmid = mhp->attr.stag >> 8; in iwch_dereg_mr()
459 cxio_dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, in iwch_dereg_mr()
460 mhp->attr.pbl_addr); in iwch_dereg_mr()
461 iwch_free_pbl(mhp); in iwch_dereg_mr()
463 if (mhp->kva) in iwch_dereg_mr()
464 kfree((void *) (unsigned long) mhp->kva); in iwch_dereg_mr()
465 if (mhp->umem) in iwch_dereg_mr()
[all …]
Diwch_provider.h341 struct iwch_mr *mhp, int shift);
343 struct iwch_mr *mhp,
346 int iwch_alloc_pbl(struct iwch_mr *mhp, int npages);
347 void iwch_free_pbl(struct iwch_mr *mhp);
348 int iwch_write_pbl(struct iwch_mr *mhp, __be64 *pages, int npages, int offset);
Diwch_qp.c204 struct iwch_mr *mhp; in iwch_sgl2pbl_map() local
208 mhp = get_mhp(rhp, (sg_list[i].lkey) >> 8); in iwch_sgl2pbl_map()
209 if (!mhp) { in iwch_sgl2pbl_map()
213 if (!mhp->attr.state) { in iwch_sgl2pbl_map()
217 if (mhp->attr.zbva) { in iwch_sgl2pbl_map()
222 if (sg_list[i].addr < mhp->attr.va_fbo) { in iwch_sgl2pbl_map()
232 mhp->attr.va_fbo + ((u64) mhp->attr.len)) { in iwch_sgl2pbl_map()
236 offset = sg_list[i].addr - mhp->attr.va_fbo; in iwch_sgl2pbl_map()
237 offset += mhp->attr.va_fbo & in iwch_sgl2pbl_map()
238 ((1UL << (12 + mhp->attr.page_size)) - 1); in iwch_sgl2pbl_map()
[all …]
/drivers/iommu/
Dintel-iommu.c3916 struct memory_notify *mhp = v; in intel_iommu_memory_notifier() local
3922 start = mhp->start_pfn << PAGE_SHIFT; in intel_iommu_memory_notifier()
3923 end = ((mhp->start_pfn + mhp->nr_pages) << PAGE_SHIFT) - 1; in intel_iommu_memory_notifier()
3933 start_vpfn = mm_to_dma_pfn(mhp->start_pfn); in intel_iommu_memory_notifier()
3934 last_vpfn = mm_to_dma_pfn(mhp->start_pfn + mhp->nr_pages - 1); in intel_iommu_memory_notifier()