Home
last modified time | relevance | path

Searched refs:xa_load (Results 1 – 25 of 68) sorted by relevance

123

/kernel/linux/linux-5.10/lib/
Dtest_xarray.c63 XA_BUG_ON(xa, xa_load(xa, index) != NULL); in xa_erase_index()
153 void *entry = xa_load(xa, j); in check_xa_load()
164 void *entry = xa_load(xa, j); in check_xa_load()
344 XA_BUG_ON(xa, xa_load(xa, 1) != NULL); in check_xa_shrink()
349 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_xa_shrink()
356 XA_BUG_ON(xa, xa_load(xa, max) != xa_mk_value(0)); in check_xa_shrink()
357 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
366 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
379 XA_BUG_ON(xa, xa_load(xa, i - 1) != NULL); in check_insert()
380 XA_BUG_ON(xa, xa_load(xa, i + 1) != NULL); in check_insert()
[all …]
/kernel/linux/linux-5.10/drivers/iommu/
Dioasid.c274 ioasid_data = xa_load(&active_allocator->xa, ioasid); in ioasid_set_data()
357 ioasid_data = xa_load(&active_allocator->xa, ioasid); in ioasid_free()
398 ioasid_data = xa_load(&idata->xa, ioasid); in ioasid_find()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
Dmapping.c108 mi = xa_load(&ctx->xarray, index); in mapping_remove()
131 mi = xa_load(&ctx->xarray, index); in mapping_find()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
Dpagealloc.c87 root = xa_load(&dev->priv.page_root_xa, function); in page_root_per_function()
158 root = xa_load(&dev->priv.page_root_xa, function); in find_fw_page()
240 root = xa_load(&dev->priv.page_root_xa, fwp->function); in free_fwp()
410 root = xa_load(&dev->priv.page_root_xa, function); in release_all_pages()
470 root = xa_load(&dev->priv.page_root_xa, get_function(func_id, ec_function)); in reclaim_pages_cmd()
/kernel/linux/linux-5.10/drivers/infiniband/hw/cxgb4/
Dev.c127 qhp = xa_load(&dev->qps, CQE_QPID(err_cqe)); in c4iw_ev_dispatch()
229 chp = xa_load(&dev->cqs, qid); in c4iw_ev_handler()
Diw_cxgb4.h351 return xa_load(&rhp->cqs, cqid); in get_chp()
356 return xa_load(&rhp->qps, qpid); in get_qhp()
/kernel/linux/linux-5.10/arch/arm64/mm/
Dmteswap.c51 void *tags = xa_load(&mte_pages, entry.val); in mte_restore_tags()
/kernel/linux/linux-5.10/drivers/gpu/drm/lima/
Dlima_ctx.c72 ctx = xa_load(&mgr->handles, id); in lima_ctx_get()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
Ddevx.c1285 event = xa_load(&dev->devx_event_table.event_xa, in devx_cleanup_subscription()
1289 xa_val_level2 = xa_load(&event->object_ids, sub->xa_key_level2); in devx_cleanup_subscription()
1351 event = xa_load(&table->event_xa, MLX5_EVENT_TYPE_COMP); in devx_cq_comp()
1355 obj_event = xa_load(&event->object_ids, obj_id); in devx_cq_comp()
1761 event = xa_load(&devx_event_table->event_xa, key_level1); in subscribe_event_xa_dealloc()
1764 xa_val_level2 = xa_load(&event->object_ids, in subscribe_event_xa_dealloc()
1783 event = xa_load(&devx_event_table->event_xa, key_level1); in subscribe_event_xa_alloc()
1805 obj_event = xa_load(&event->object_ids, key_level2); in subscribe_event_xa_alloc()
2037 event = xa_load(&devx_event_table->event_xa, in UVERBS_HANDLER()
2047 obj_event = xa_load(&event->object_ids, obj_id); in UVERBS_HANDLER()
[all …]
Dodp.c135 struct mlx5_ib_mr *mtt = xa_load(&imr->implicit_children, idx); in populate_klm()
744 mtt = xa_load(&imr->implicit_children, idx); in pagefault_implicit_mr()
912 mmkey = xa_load(&dev->odp_mkeys, mlx5_base_mkey(key)); in pagefault_single_data_segment()
1755 mmkey = xa_load(&dev->odp_mkeys, mlx5_base_mkey(lkey)); in get_prefetchable_mr()
Dsrq_cmd.c87 srq = xa_load(&table->array, srqn); in mlx5_cmd_get_srq()
669 srq = xa_load(&table->array, srqn); in srq_event_notifier()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
Dcm.c275 ent = xa_load(&sriov->pv_id_table, *pv_cm_id); in id_map_get()
366 item = xa_load(&sriov->xa_rej_tmout, (unsigned long)rem_pv_cm_id); in alloc_rej_tmout()
411 item = xa_load(&sriov->xa_rej_tmout, (unsigned long)rem_pv_cm_id); in lookup_rej_tmout_slave()
/kernel/linux/linux-5.10/drivers/infiniband/core/
Ddevice.c311 device = xa_load(&devices, index); in ib_device_get_by_index()
430 struct ib_client *client = xa_load(&clients, index); in ib_device_rename()
636 if (xa_load(&devices, device->index) == device) in ib_dealloc_device()
731 client_data = xa_load(&device->client_data, client_id); in remove_client_context()
733 client = xa_load(&clients, client_id); in remove_client_context()
903 cdev = xa_load(&device->compat_devs, rnet->id); in add_one_compat_dev()
1713 if (xa_load(&clients, highest_client_id - 1)) in remove_client_id()
1838 struct ib_client *client = xa_load(&clients, index); in __ib_get_client_nl_info()
2511 struct ib_client *client = xa_load(&clients, index); in ib_get_net_dev_by_params()
Dib_core_uverbs.c127 entry = xa_load(&ucontext->mmap_xa, pgoff); in rdma_user_mmap_entry_get_pgoff()
Drestrack.c277 res = xa_load(&rt->xa, id); in rdma_restrack_get_byid()
/kernel/linux/linux-5.10/drivers/infiniband/hw/hns/
Dhns_roce_cq.c353 hr_cq = xa_load(&hr_dev->cq_table.array, in hns_roce_cq_completion()
374 hr_cq = xa_load(&hr_dev->cq_table.array, in hns_roce_cq_event()
Dhns_roce_srq.c18 srq = xa_load(&srq_table->xa, srqn & (hr_dev->caps.num_srqs - 1)); in hns_roce_srq_event()
/kernel/linux/linux-5.10/drivers/iommu/arm/arm-smmu-v3/
Darm-smmu-v3-sva.c28 cd = xa_load(&arm_smmu_asid_xa, asid); in arm_smmu_share_asid()
/kernel/linux/linux-5.10/fs/erofs/
Dutils.c66 grp = xa_load(&sbi->managed_pslots, index); in erofs_find_workgroup()
/kernel/linux/linux-5.10/drivers/dma-buf/
Ddma-heap.c71 heap = xa_load(&dma_heap_minors, iminor(inode)); in dma_heap_open()
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/
Dnetdev_rx.c462 return xa_load(&priv->dev_tbl, id); in hfi1_netdev_get_data()
/kernel/linux/linux-5.10/drivers/infiniband/hw/qedr/
Dqedr_iw_cm.c523 qp = xa_load(&dev->qps, qpn); in qedr_iw_load_qp()
817 return xa_load(&dev->qps, qpn); in qedr_iw_get_qp()
/kernel/linux/linux-5.10/drivers/infiniband/sw/siw/
Dsiw_mem.c53 mem = xa_load(&sdev->mem_xa, stag_index); in siw_mem_id2obj()
/kernel/linux/linux-5.10/mm/
Dreadahead.c199 struct page *page = xa_load(&mapping->i_pages, index + i); in page_cache_ra_unbounded()
Dmemremap.c486 pgmap = xa_load(&pgmap_array, PHYS_PFN(phys)); in get_dev_pagemap()

123