Home
last modified time | relevance | path

Searched refs:nent (Results 1 – 25 of 54) sorted by relevance

123

/kernel/linux/linux-5.10/tools/testing/selftests/kvm/x86_64/
Dhyperv_cpuid.c52 int nent = 9; in test_hv_cpuid() local
56 nent += 1; /* 0x4000000A */ in test_hv_cpuid()
58 TEST_ASSERT(hv_cpuid_entries->nent == nent, in test_hv_cpuid()
61 nent, evmcs_enabled, hv_cpuid_entries->nent); in test_hv_cpuid()
63 for (i = 0; i < hv_cpuid_entries->nent; i++) { in test_hv_cpuid()
115 static struct kvm_cpuid2 cpuid = {.nent = 0}; in test_hv_cpuid_e2big()
128 int nent = 20; /* should be enough */ in kvm_get_supported_hv_cpuid() local
131 cpuid = malloc(sizeof(*cpuid) + nent * sizeof(struct kvm_cpuid_entry2)); in kvm_get_supported_hv_cpuid()
138 cpuid->nent = nent; in kvm_get_supported_hv_cpuid()
/kernel/linux/linux-5.10/arch/x86/kvm/
Dcpuid.c58 struct kvm_cpuid_entry2 *entries, int nent, u32 function, u32 index) in cpuid_entry2_find() argument
63 for (i = 0; i < nent; i++) { in cpuid_entry2_find()
74 static int kvm_check_cpuid(struct kvm_cpuid_entry2 *entries, int nent) in kvm_check_cpuid() argument
82 best = cpuid_entry2_find(entries, nent, 0x80000008, 0); in kvm_check_cpuid()
234 if (cpuid->nent > KVM_MAX_CPUID_ENTRIES) in kvm_vcpu_ioctl_set_cpuid()
237 if (cpuid->nent) { in kvm_vcpu_ioctl_set_cpuid()
238 e = vmemdup_user(entries, array_size(sizeof(*e), cpuid->nent)); in kvm_vcpu_ioctl_set_cpuid()
242 e2 = kvmalloc_array(cpuid->nent, sizeof(*e2), GFP_KERNEL_ACCOUNT); in kvm_vcpu_ioctl_set_cpuid()
248 for (i = 0; i < cpuid->nent; i++) { in kvm_vcpu_ioctl_set_cpuid()
261 r = kvm_check_cpuid(e2, cpuid->nent); in kvm_vcpu_ioctl_set_cpuid()
[all …]
Dhyperv.c1982 int i, nent = ARRAY_SIZE(cpuid_entries); in kvm_vcpu_ioctl_get_hv_cpuid() local
1989 --nent; in kvm_vcpu_ioctl_get_hv_cpuid()
1991 if (cpuid->nent < nent) in kvm_vcpu_ioctl_get_hv_cpuid()
1994 if (cpuid->nent > nent) in kvm_vcpu_ioctl_get_hv_cpuid()
1995 cpuid->nent = nent; in kvm_vcpu_ioctl_get_hv_cpuid()
1997 for (i = 0; i < nent; i++) { in kvm_vcpu_ioctl_get_hv_cpuid()
2115 nent * sizeof(struct kvm_cpuid_entry2))) in kvm_vcpu_ioctl_get_hv_cpuid()
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dgk20a.c44 int nent; in gk20a_gr_av_to_init() local
52 nent = (blob.size / sizeof(struct gk20a_fw_av)); in gk20a_gr_av_to_init()
53 pack = vzalloc((sizeof(*pack) * 2) + (sizeof(*init) * (nent + 1))); in gk20a_gr_av_to_init()
62 for (i = 0; i < nent; i++) { in gk20a_gr_av_to_init()
94 int nent; in gk20a_gr_aiv_to_init() local
102 nent = (blob.size / sizeof(struct gk20a_fw_aiv)); in gk20a_gr_aiv_to_init()
103 pack = vzalloc((sizeof(*pack) * 2) + (sizeof(*init) * (nent + 1))); in gk20a_gr_aiv_to_init()
112 for (i = 0; i < nent; i++) { in gk20a_gr_aiv_to_init()
140 int nent; in gk20a_gr_av_to_method() local
148 nent = (blob.size / sizeof(struct gk20a_fw_av)); in gk20a_gr_av_to_method()
[all …]
/kernel/linux/linux-5.10/drivers/tty/vt/
Dconmakehash.c82 int i, nuni, nent; in main() local
272 nent = 0; in main()
275 while ( nent >= unicount[fp0] ) in main()
278 nent = 0; in main()
280 printf("0x%04x", unitable[fp0][nent++]); in main()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
Deq.c272 for (i = 0; i < eq->nent; i++) { in init_eq_buf()
298 eq->nent = roundup_pow_of_two(param->nent + MLX5_NUM_SPARE_EQE); in create_map_eq()
300 err = mlx5_buf_alloc(dev, eq->nent * MLX5_EQE_SIZE, &eq->buf); in create_map_eq()
327 MLX5_SET(eqc, eqc, log_eq_size, ilog2(eq->nent)); in create_map_eq()
647 .nent = MLX5_NUM_CMD_EQE, in create_async_eqs()
660 .nent = MLX5_NUM_ASYNC_EQE, in create_async_eqs()
670 .nent = /* TODO: sriov max_vf + */ 1, in create_async_eqs()
764 eqe = get_eqe(eq, ci & (eq->nent - 1)); in mlx5_eq_get_eqe()
765 eqe = ((eqe->owner & 1) ^ !!(ci & eq->nent)) ? NULL : eqe; in mlx5_eq_get_eqe()
811 int nent; in create_comp_eqs() local
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/lib/
Deq.h33 int nent; member
57 struct mlx5_eqe *eqe = get_eqe(eq, eq->cons_index & (eq->nent - 1)); in next_eqe_sw()
59 return ((eqe->owner & 1) ^ !!(eq->cons_index & eq->nent)) ? NULL : eqe; in next_eqe_sw()
/kernel/linux/linux-5.10/drivers/infiniband/hw/qib/
Dqib_pcie.c211 int qib_pcie_params(struct qib_devdata *dd, u32 minw, u32 *nent) in qib_pcie_params() argument
229 maxvec = (nent && *nent) ? *nent : 1; in qib_pcie_params()
239 if (nent) in qib_pcie_params()
240 *nent = !dd->pcidev->msix_enabled ? 0 : nvec; in qib_pcie_params()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/
Dmthca_allocator.c160 int mthca_array_init(struct mthca_array *array, int nent) in mthca_array_init() argument
162 int npage = (nent * sizeof (void *) + PAGE_SIZE - 1) / PAGE_SIZE; in mthca_array_init()
178 void mthca_array_cleanup(struct mthca_array *array, int nent) in mthca_array_cleanup() argument
182 for (i = 0; i < (nent * sizeof (void *) + PAGE_SIZE - 1) / PAGE_SIZE; ++i) in mthca_array_cleanup()
Dmthca_eq.c184 mthca_write64(MTHCA_EQ_DB_SET_CI | eq->eqn, ci & (eq->nent - 1), in tavor_set_eq_ci()
230 unsigned long off = (entry & (eq->nent - 1)) * MTHCA_EQ_ENTRY_SIZE; in get_eqe()
466 int nent, in mthca_create_eq() argument
479 eq->nent = roundup_pow_of_two(max(nent, 2)); in mthca_create_eq()
480 npages = ALIGN(eq->nent * MTHCA_EQ_ENTRY_SIZE, PAGE_SIZE) / PAGE_SIZE; in mthca_create_eq()
511 for (i = 0; i < eq->nent; ++i) in mthca_create_eq()
535 eq_context->logsize_usrpage = cpu_to_be32((ffs(eq->nent) - 1) << 24); in mthca_create_eq()
560 eq->eqn, eq->nent); in mthca_create_eq()
593 int npages = (eq->nent * MTHCA_EQ_ENTRY_SIZE + PAGE_SIZE - 1) / in mthca_free_eq()
Dmthca_cq.c348 int mthca_alloc_cq_buf(struct mthca_dev *dev, struct mthca_cq_buf *buf, int nent) in mthca_alloc_cq_buf() argument
353 ret = mthca_buf_alloc(dev, nent * MTHCA_CQ_ENTRY_SIZE, in mthca_alloc_cq_buf()
360 for (i = 0; i < nent; ++i) in mthca_alloc_cq_buf()
768 int mthca_init_cq(struct mthca_dev *dev, int nent, in mthca_init_cq() argument
776 cq->ibcq.cqe = nent - 1; in mthca_init_cq()
814 err = mthca_alloc_cq_buf(dev, &cq->buf, nent); in mthca_init_cq()
828 cq_context->logsize_usrpage = cpu_to_be32((ffs(nent) - 1) << 24); in mthca_init_cq()
Dmthca_dev.h421 int mthca_array_init(struct mthca_array *array, int nent);
422 void mthca_array_cleanup(struct mthca_array *array, int nent);
487 int mthca_init_cq(struct mthca_dev *dev, int nent,
498 int mthca_alloc_cq_buf(struct mthca_dev *dev, struct mthca_cq_buf *buf, int nent);
/kernel/linux/linux-5.10/fs/nfs/
Dnfs42xattr.c73 atomic_long_t nent; member
298 atomic_long_set(&cache->nent, 0); in nfs4_xattr_alloc_cache()
407 atomic_long_set(&cache->nent, 0); in nfs4_xattr_discard_cache()
549 atomic_long_inc(&cache->nent); in nfs4_xattr_hash_add()
578 atomic_long_dec(&cache->nent); in nfs4_xattr_hash_remove()
832 if (atomic_long_read(&cache->nent) > 1) in cache_lru_isolate()
918 atomic_long_dec(&cache->nent); in entry_lru_isolate()
988 atomic_long_set(&cache->nent, 0); in nfs4_xattr_cache_init_once()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/
Deq.c110 unsigned long offset = (entry & (eq->nent - 1)) * eqe_size; in get_eqe()
124 return !!(eqe->owner & 0x80) ^ !!(eq->cons_index & eq->nent) ? NULL : eqe; in next_eqe_sw()
782 eq->cons_index, eqe->owner, eq->nent, in mlx4_eq_int()
785 !!(eq->cons_index & eq->nent) ? "HW" : "SW"); in mlx4_eq_int()
807 eq->cons_index, eqe->owner, eq->nent, in mlx4_eq_int()
809 !!(eq->cons_index & eq->nent) ? "HW" : "SW"); in mlx4_eq_int()
819 eq->cons_index, eqe->owner, eq->nent, in mlx4_eq_int()
822 !!(eq->cons_index & eq->nent) ? "HW" : "SW"); in mlx4_eq_int()
969 static int mlx4_create_eq(struct mlx4_dev *dev, int nent, in mlx4_create_eq() argument
983 eq->nent = roundup_pow_of_two(max(nent, 2)); in mlx4_create_eq()
[all …]
Dcq.c341 int mlx4_cq_alloc(struct mlx4_dev *dev, int nent, in mlx4_cq_alloc() argument
381 cpu_to_be32((ilog2(nent) << 24) | in mlx4_cq_alloc()
393 err = mlx4_init_user_cqes(buf_addr, nent, in mlx4_cq_alloc()
398 mlx4_init_kernel_cqes(buf_addr, nent, in mlx4_cq_alloc()
/kernel/linux/linux-5.10/lib/
Dscatterlist.c573 unsigned int nent, nalloc; in sgl_alloc_order() local
576 nent = round_up(length, PAGE_SIZE << order) >> (PAGE_SHIFT + order); in sgl_alloc_order()
578 if (length > (nent << (PAGE_SHIFT + order))) in sgl_alloc_order()
580 nalloc = nent; in sgl_alloc_order()
608 *nent_p = nent; in sgl_alloc_order()
/kernel/linux/linux-5.10/drivers/tty/serial/
Dpch_uart.c232 int nent; member
766 for (i = 0; i < priv->nent; i++, sg++) { in pch_dma_tx_complete()
774 priv->nent = 0; in pch_dma_tx_complete()
916 int nent; in dma_handle_tx() local
994 nent = dma_map_sg(port->dev, sg, num, DMA_TO_DEVICE); in dma_handle_tx()
995 if (!nent) { in dma_handle_tx()
1000 priv->nent = nent; in dma_handle_tx()
1002 for (i = 0; i < nent; i++, sg++) { in dma_handle_tx()
1007 if (i == (nent - 1)) in dma_handle_tx()
1014 priv->sg_tx_p, nent, DMA_MEM_TO_DEV, in dma_handle_tx()
[all …]
/kernel/linux/linux-5.10/drivers/iommu/
Domap-iommu.c705 int nent = 1; in iopgtable_clear_entry_core() local
719 nent *= 16; in iopgtable_clear_entry_core()
723 bytes *= nent; in iopgtable_clear_entry_core()
724 memset(iopte, 0, nent * sizeof(*iopte)); in iopgtable_clear_entry_core()
726 flush_iopte_range(obj->dev, pt_dma, pt_offset, nent); in iopgtable_clear_entry_core()
737 nent = 1; /* for the next L1 entry */ in iopgtable_clear_entry_core()
741 nent *= 16; in iopgtable_clear_entry_core()
745 bytes *= nent; in iopgtable_clear_entry_core()
747 memset(iopgd, 0, nent * sizeof(*iopgd)); in iopgtable_clear_entry_core()
748 flush_iopte_range(obj->dev, obj->pd_dma, pd_offset, nent); in iopgtable_clear_entry_core()
/kernel/linux/linux-5.10/include/linux/mlx5/
Deq.h17 int nent; member
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
Dcq.c74 static u8 sw_ownership_bit(int n, int nent) in sw_ownership_bit() argument
76 return (n & nent) ? 1 : 0; in sw_ownership_bit()
660 int nent, in alloc_cq_frag_buf() argument
669 nent * cqe_size, in alloc_cq_frag_buf()
678 buf->nent = nent; in alloc_cq_frag_buf()
847 for (i = 0; i < buf->nent; i++) { in init_cq_frag_buf()
1217 (i + 1) & cq->resize_buf->nent); in copy_resize_cqes()
1219 sw_own = sw_ownership_bit(i + 1, cq->resize_buf->nent); in copy_resize_cqes()
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/asm-x86/asm/
Dkvm.h199 __u32 nent; member
217 __u32 nent; member
/kernel/linux/linux-5.10/tools/arch/x86/include/uapi/asm/
Dkvm.h227 __u32 nent; member
249 __u32 nent; member
/kernel/linux/linux-5.10/arch/x86/include/uapi/asm/
Dkvm.h227 __u32 nent; member
249 __u32 nent; member
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/x86_64/
Dprocessor.c625 int nent = 100; in allocate_kvm_cpuid2() local
629 size += nent * sizeof(struct kvm_cpuid_entry2); in allocate_kvm_cpuid2()
636 cpuid->nent = nent; in allocate_kvm_cpuid2()
693 for (i = 0; i < cpuid->nent; i++) { in kvm_get_supported_cpuid_index()
1236 for (i = 0; i < cpuid->nent; i++) { in set_cpuid()
/kernel/linux/linux-5.10/drivers/vdpa/mlx5/core/
Dmr.c43 for_each_sg(mr->sg_head.sgl, sg, mr->nent, i) { in populate_mtts()
275 mr->nent = dma_map_sg_attrs(dma, mr->sg_head.sgl, mr->nsg, DMA_BIDIRECTIONAL, 0); in map_direct_mr()
276 if (!mr->nent) { in map_direct_mr()

123