Lines Matching refs:odp
163 struct ib_umem_odp *odp = to_ib_umem_odp(mr->umem); in populate_mtt() local
171 pa = odp->dma_list[idx + i]; in populate_mtt()
188 struct ib_umem_odp *odp = to_ib_umem_odp(mr->umem); in dma_fence_odp_mr() local
191 mutex_lock(&odp->umem_mutex); in dma_fence_odp_mr()
192 if (odp->npages) { in dma_fence_odp_mr()
194 ib_umem_odp_unmap_dma_pages(odp, ib_umem_start(odp), in dma_fence_odp_mr()
195 ib_umem_end(odp)); in dma_fence_odp_mr()
196 WARN_ON(odp->npages); in dma_fence_odp_mr()
198 odp->private = NULL; in dma_fence_odp_mr()
199 mutex_unlock(&odp->umem_mutex); in dma_fence_odp_mr()
217 struct ib_umem_odp *odp = to_ib_umem_odp(mr->umem); in free_implicit_child_mr() local
218 unsigned long idx = ib_umem_start(odp) >> MLX5_IMR_MTT_SHIFT; in free_implicit_child_mr()
238 ib_umem_odp_release(odp); in free_implicit_child_mr()
263 struct ib_umem_odp *odp = to_ib_umem_odp(mr->umem); in destroy_unused_implicit_child_mr() local
264 unsigned long idx = ib_umem_start(odp) >> MLX5_IMR_MTT_SHIFT; in destroy_unused_implicit_child_mr()
468 struct ib_umem_odp *odp; in implicit_get_child_mr() local
473 odp = ib_umem_odp_alloc_child(to_ib_umem_odp(imr->umem), in implicit_get_child_mr()
476 if (IS_ERR(odp)) in implicit_get_child_mr()
477 return ERR_CAST(odp); in implicit_get_child_mr()
485 mr->umem = &odp->umem; in implicit_get_child_mr()
490 odp->private = mr; in implicit_get_child_mr()
526 ib_umem_odp_release(odp); in implicit_get_child_mr()
670 static int pagefault_real_mr(struct mlx5_ib_mr *mr, struct ib_umem_odp *odp, in pagefault_real_mr() argument
684 page_shift = odp->page_shift; in pagefault_real_mr()
685 start_idx = (user_va - ib_umem_start(odp)) >> page_shift; in pagefault_real_mr()
688 if (odp->umem.writable && !downgrade) in pagefault_real_mr()
691 np = ib_umem_odp_map_dma_and_lock(odp, user_va, bcnt, access_mask, fault); in pagefault_real_mr()
700 mutex_unlock(&odp->umem_mutex); in pagefault_real_mr()
812 struct ib_umem_odp *odp = to_ib_umem_odp(mr->umem); in pagefault_mr() local
818 if (!odp->is_implicit_odp) { in pagefault_mr()
822 (u64)odp->umem.address, &user_va)) in pagefault_mr()
824 if (unlikely(user_va >= ib_umem_end(odp) || in pagefault_mr()
825 ib_umem_end(odp) - user_va < bcnt)) in pagefault_mr()
827 return pagefault_real_mr(mr, odp, user_va, bcnt, bytes_mapped, in pagefault_mr()
830 return pagefault_implicit_mr(mr, odp, io_virt, bcnt, bytes_mapped, in pagefault_mr()
1750 struct ib_umem_odp *odp; in get_prefetchable_mr() local
1764 odp = to_ib_umem_odp(mr->umem); in get_prefetchable_mr()
1768 !odp->umem.writable) in get_prefetchable_mr()