Home
last modified time | relevance | path

Searched refs:access_flags (Results 1 – 25 of 35) sorted by relevance

12

/drivers/infiniband/hw/mana/
Dmr.c12 mana_ib_verbs_to_gdma_access_flags(int access_flags) in mana_ib_verbs_to_gdma_access_flags() argument
16 if (access_flags & IB_ACCESS_LOCAL_WRITE) in mana_ib_verbs_to_gdma_access_flags()
19 if (access_flags & IB_ACCESS_REMOTE_WRITE) in mana_ib_verbs_to_gdma_access_flags()
22 if (access_flags & IB_ACCESS_REMOTE_READ) in mana_ib_verbs_to_gdma_access_flags()
48 req.gva.access_flags = mr_params->gva.access_flags; in mana_ib_gd_create_mr()
104 u64 iova, int access_flags, in mana_ib_reg_user_mr() argument
119 start, iova, length, access_flags); in mana_ib_reg_user_mr()
121 access_flags &= ~IB_ACCESS_OPTIONAL; in mana_ib_reg_user_mr()
122 if (access_flags & ~VALID_MR_FLAGS) in mana_ib_reg_user_mr()
129 mr->umem = ib_umem_get(ibdev, start, length, access_flags); in mana_ib_reg_user_mr()
[all …]
Dmana_ib.h116 struct ib_mr *mana_ib_get_dma_mr(struct ib_pd *ibpd, int access_flags);
119 u64 iova, int access_flags,
/drivers/char/agp/
Dfrontend.c350 clear_bit(AGP_FF_IS_VALID, &priv->access_flags); in agp_remove_all_clients()
351 clear_bit(AGP_FF_IS_CLIENT, &priv->access_flags); in agp_remove_all_clients()
416 set_bit(AGP_FF_IS_VALID, &priv->access_flags); in agp_controller_make_current()
417 set_bit(AGP_FF_IS_CLIENT, &priv->access_flags); in agp_controller_make_current()
430 clear_bit(AGP_FF_IS_VALID, &controller_priv->access_flags); in agp_controller_release_current()
439 clear_bit(AGP_FF_IS_VALID, &priv->access_flags); in agp_controller_release_current()
578 if (!(test_bit(AGP_FF_IS_VALID, &priv->access_flags))) in agp_mmap()
588 if (test_bit(AGP_FF_IS_CLIENT, &priv->access_flags)) { in agp_mmap()
613 if (test_bit(AGP_FF_IS_CONTROLLER, &priv->access_flags)) { in agp_mmap()
651 if (test_bit(AGP_FF_IS_CONTROLLER, &priv->access_flags)) { in agp_release()
[all …]
Dcompat_ioctl.c85 set_bit(AGP_FF_IS_CLIENT, &client_priv->access_flags); in compat_agpioc_reserve_wrap()
86 set_bit(AGP_FF_IS_VALID, &client_priv->access_flags); in compat_agpioc_reserve_wrap()
142 set_bit(AGP_FF_IS_CLIENT, &client_priv->access_flags); in compat_agpioc_reserve_wrap()
143 set_bit(AGP_FF_IS_VALID, &client_priv->access_flags); in compat_agpioc_reserve_wrap()
228 if (!(test_bit(AGP_FF_IS_CONTROLLER, &curr_priv->access_flags))) { in compat_agp_ioctl()
/drivers/infiniband/hw/mlx5/
Dmr.c56 u64 iova, int access_flags,
304 set_mkc_access_pd_addr_fields(mkc, ent->rb_key.access_flags, 0, in set_cache_mkc()
673 res = key1.access_flags - key2.access_flags; in cache_ent_key_cmp()
738 smallest->rb_key.access_flags == rb_key.access_flags && in mkey_cache_ent_from_rb_key()
746 int access_flags) in _mlx5_mr_cache_alloc() argument
782 int access_flags) in get_unchangeable_access_flags() argument
786 if ((access_flags & IB_ACCESS_REMOTE_ATOMIC) && in get_unchangeable_access_flags()
791 if ((access_flags & IB_ACCESS_RELAXED_ORDERING) && in get_unchangeable_access_flags()
796 if ((access_flags & IB_ACCESS_RELAXED_ORDERING) && in get_unchangeable_access_flags()
806 int access_flags, int access_mode, in mlx5_mr_cache_alloc() argument
[all …]
Dumr.c381 unsigned int access_flags) in mlx5r_umr_set_access_flags() argument
383 bool ro_read = (access_flags & IB_ACCESS_RELAXED_ORDERING) && in mlx5r_umr_set_access_flags()
387 MLX5_SET(mkc, seg, a, !!(access_flags & IB_ACCESS_REMOTE_ATOMIC)); in mlx5r_umr_set_access_flags()
388 MLX5_SET(mkc, seg, rw, !!(access_flags & IB_ACCESS_REMOTE_WRITE)); in mlx5r_umr_set_access_flags()
389 MLX5_SET(mkc, seg, rr, !!(access_flags & IB_ACCESS_REMOTE_READ)); in mlx5r_umr_set_access_flags()
390 MLX5_SET(mkc, seg, lw, !!(access_flags & IB_ACCESS_LOCAL_WRITE)); in mlx5r_umr_set_access_flags()
393 !!(access_flags & IB_ACCESS_RELAXED_ORDERING)); in mlx5r_umr_set_access_flags()
398 int access_flags) in mlx5r_umr_rereg_pd_access() argument
409 mlx5r_umr_set_access_flags(dev, &wqe.mkey_seg, access_flags); in mlx5r_umr_rereg_pd_access()
419 mr->access_flags = access_flags; in mlx5r_umr_rereg_pd_access()
[all …]
Dmlx5_ib.h649 unsigned int access_flags; member
712 int access_flags; member
1320 u64 virt_addr, int access_flags,
1324 int fd, int access_flags,
1335 int access_flags);
1339 u64 length, u64 virt_addr, int access_flags,
1377 u64 access_flags);
1388 int access_flags, int access_mode,
1671 struct ib_umem *umem, int access_flags) in mlx5_umem_needs_ats() argument
1675 return access_flags & IB_ACCESS_RELAXED_ORDERING; in mlx5_umem_needs_ats()
Drestrack.c86 if (!(mr->access_flags & IB_ACCESS_ON_DEMAND)) in fill_stat_mr_entry()
128 if (!(mr->access_flags & IB_ACCESS_ON_DEMAND)) in fill_res_mr_entry()
Dmem.c43 u64 access_flags) in mlx5_ib_populate_pas() argument
49 access_flags); in mlx5_ib_populate_pas()
Dodp.c420 mr = mlx5_mr_cache_alloc(dev, imr->access_flags, in implicit_get_child_mr()
428 mr->access_flags = imr->access_flags; in implicit_get_child_mr()
482 int access_flags) in mlx5_ib_alloc_implicit_mr() argument
492 umem_odp = ib_umem_odp_alloc_implicit(&dev->ib_dev, access_flags); in mlx5_ib_alloc_implicit_mr()
496 imr = mlx5_mr_cache_alloc(dev, access_flags, MLX5_MKC_ACCESS_MODE_KSM, in mlx5_ib_alloc_implicit_mr()
503 imr->access_flags = access_flags; in mlx5_ib_alloc_implicit_mr()
Dumr.h93 int access_flags);
Ddevx.c2182 struct devx_umem *obj, u32 access_flags) in devx_umem_get() argument
2192 err = ib_check_mr_access(&dev->ib_dev, access_flags); in devx_umem_get()
2206 &dev->ib_dev, addr, size, dmabuf_fd, access_flags); in devx_umem_get()
2211 obj->umem = ib_umem_get(&dev->ib_dev, addr, size, access_flags); in devx_umem_get()
2323 int access_flags; in UVERBS_HANDLER() local
2329 err = uverbs_get_flags32(&access_flags, attrs, in UVERBS_HANDLER()
2342 err = devx_umem_get(dev, &c->ibucontext, attrs, obj, access_flags); in UVERBS_HANDLER()
2346 err = devx_umem_reg_cmd_alloc(dev, attrs, obj, &cmd, access_flags); in UVERBS_HANDLER()
Dqp.c3357 u32 access_flags; in set_qpc_atomic_flags() local
3365 access_flags = attr->qp_access_flags; in set_qpc_atomic_flags()
3367 access_flags = qp->trans_qp.atomic_rd_en; in set_qpc_atomic_flags()
3370 access_flags &= IB_ACCESS_REMOTE_WRITE; in set_qpc_atomic_flags()
3372 MLX5_SET(qpc, qpc, rre, !!(access_flags & IB_ACCESS_REMOTE_READ)); in set_qpc_atomic_flags()
3374 if (access_flags & IB_ACCESS_REMOTE_ATOMIC) { in set_qpc_atomic_flags()
3385 MLX5_SET(qpc, qpc, rwe, !!(access_flags & IB_ACCESS_REMOTE_WRITE)); in set_qpc_atomic_flags()
5003 u32 access_flags = 0; in mlx5_ib_dct_query_qp() local
5035 access_flags |= IB_ACCESS_REMOTE_READ; in mlx5_ib_dct_query_qp()
5037 access_flags |= IB_ACCESS_REMOTE_WRITE; in mlx5_ib_dct_query_qp()
[all …]
/drivers/iommu/
Diommu-sva.c157 unsigned int access_flags = 0; in iommu_sva_handle_iopf() local
176 access_flags |= VM_READ; in iommu_sva_handle_iopf()
179 access_flags |= VM_WRITE; in iommu_sva_handle_iopf()
184 access_flags |= VM_EXEC; in iommu_sva_handle_iopf()
191 if (access_flags & ~vma->vm_flags) in iommu_sva_handle_iopf()
/drivers/infiniband/core/
Duverbs_std_types_mr.c110 ret = uverbs_get_flags32(&attr.access_flags, attrs, in UVERBS_HANDLER()
116 if (!(attr.access_flags & IB_ZERO_BASED)) in UVERBS_HANDLER()
119 ret = ib_check_mr_access(ib_dev, attr.access_flags); in UVERBS_HANDLER()
196 u32 fd, access_flags; in UVERBS_HANDLER() local
226 ret = uverbs_get_flags32(&access_flags, attrs, in UVERBS_HANDLER()
236 ret = ib_check_mr_access(ib_dev, access_flags); in UVERBS_HANDLER()
241 access_flags, in UVERBS_HANDLER()
/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_mr.c82 cmd->access_flags = acc; in pvrdma_get_dma_mr()
112 u64 virt_addr, int access_flags, in pvrdma_reg_user_mr() argument
129 umem = ib_umem_get(pd->device, start, length, access_flags); in pvrdma_reg_user_mr()
170 cmd->access_flags = access_flags; in pvrdma_reg_user_mr()
241 cmd->access_flags = 0; in pvrdma_alloc_mr()
Dpvrdma_dev_api.h460 u32 access_flags; member
565 u32 access_flags; member
Dpvrdma_verbs.h370 u64 virt_addr, int access_flags,
/drivers/infiniband/hw/mlx4/
Dmr.c373 u64 length, int access_flags) in mlx4_get_umem_mr() argument
381 if (!ib_access_writable(access_flags)) { in mlx4_get_umem_mr()
395 access_flags |= IB_ACCESS_LOCAL_WRITE; in mlx4_get_umem_mr()
397 access_flags |= IB_ACCESS_LOCAL_WRITE; in mlx4_get_umem_mr()
403 return ib_umem_get(device, start, length, access_flags); in mlx4_get_umem_mr()
407 u64 virt_addr, int access_flags, in mlx4_ib_reg_user_mr() argument
420 mr->umem = mlx4_get_umem_mr(pd->device, start, length, access_flags); in mlx4_ib_reg_user_mr()
429 convert_access(access_flags), n, shift, &mr->mmr); in mlx4_ib_reg_user_mr()
/drivers/infiniband/hw/efa/
Defa.h155 u64 virt_addr, int access_flags,
159 int fd, int access_flags,
Defa_verbs.c1566 static struct efa_mr *efa_alloc_mr(struct ib_pd *ibpd, int access_flags, in efa_alloc_mr() argument
1585 access_flags &= ~IB_ACCESS_OPTIONAL; in efa_alloc_mr()
1586 if (access_flags & ~supp_access_flags) { in efa_alloc_mr()
1589 access_flags, supp_access_flags); in efa_alloc_mr()
1601 u64 length, u64 virt_addr, int access_flags) in efa_register_mr() argument
1614 params.permissions = access_flags; in efa_register_mr()
1663 int fd, int access_flags, in efa_reg_user_mr_dmabuf() argument
1671 mr = efa_alloc_mr(ibpd, access_flags, udata); in efa_reg_user_mr_dmabuf()
1678 access_flags); in efa_reg_user_mr_dmabuf()
1686 err = efa_register_mr(ibpd, mr, start, length, virt_addr, access_flags); in efa_reg_user_mr_dmabuf()
[all …]
/drivers/infiniband/sw/rdmavt/
Dmr.c313 mr->mr.access_flags = acc; in rvt_get_dma_mr()
365 mr->mr.access_flags = mr_access_flags; in rvt_reg_user_mr()
632 mr->mr.access_flags = access; in rvt_fast_reg_mr()
767 (mr->access_flags & acc) != acc)) in rvt_lkey_ok()
874 (mr->access_flags & acc) == 0)) in rvt_rkey_ok()
/drivers/infiniband/hw/usnic/
Dusnic_ib_verbs.h62 u64 virt_addr, int access_flags,
Dusnic_ib_verbs.c594 u64 virt_addr, int access_flags, in usnic_ib_reg_mr() argument
608 access_flags, 0); in usnic_ib_reg_mr()
/drivers/infiniband/hw/mthca/
Dmthca_qp.c332 u32 access_flags; in get_hw_access_flags() local
341 access_flags = attr->qp_access_flags; in get_hw_access_flags()
343 access_flags = qp->atomic_rd_en; in get_hw_access_flags()
346 access_flags &= IB_ACCESS_REMOTE_WRITE; in get_hw_access_flags()
348 if (access_flags & IB_ACCESS_REMOTE_READ) in get_hw_access_flags()
350 if (access_flags & IB_ACCESS_REMOTE_ATOMIC) in get_hw_access_flags()
352 if (access_flags & IB_ACCESS_REMOTE_WRITE) in get_hw_access_flags()

12