/drivers/net/ethernet/mellanox/mlx5/core/lib/ |
D | aso.c | 45 static int mlx5_aso_alloc_cq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_alloc_cq() argument 53 param.buf_numa_node = numa_node; in mlx5_aso_alloc_cq() 54 param.db_numa_node = numa_node; in mlx5_aso_alloc_cq() 121 static int mlx5_aso_create_cq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_create_cq() argument 136 err = mlx5_aso_alloc_cq(mdev, numa_node, cqc_data, cq); in mlx5_aso_create_cq() 158 static int mlx5_aso_alloc_sq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_alloc_sq() argument 168 param.db_numa_node = numa_node; in mlx5_aso_alloc_sq() 169 param.buf_numa_node = numa_node; in mlx5_aso_alloc_sq() 269 static int mlx5_aso_create_sq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_create_sq() argument 284 err = mlx5_aso_alloc_sq(mdev, numa_node, sqc_data, sq); in mlx5_aso_create_sq() [all …]
|
/drivers/dax/ |
D | kmem.c | 59 int numa_node; in dev_dax_kmem_probe() local 67 numa_node = dev_dax->target_node; in dev_dax_kmem_probe() 68 if (numa_node < 0) { in dev_dax_kmem_probe() 70 numa_node); in dev_dax_kmem_probe() 91 init_node_memory_type(numa_node, dax_slowmem_type); in dev_dax_kmem_probe() 102 rc = memory_group_register_static(numa_node, PFN_UP(total_len)); in dev_dax_kmem_probe() 170 clear_node_memory_type(numa_node, dax_slowmem_type); in dev_dax_kmem_probe()
|
/drivers/virt/nitro_enclaves/ |
D | ne_misc_dev.c | 124 int numa_node; member 186 int numa_node = -1; in ne_setup_cpu_pool() local 228 if (numa_node < 0) { in ne_setup_cpu_pool() 229 numa_node = cpu_to_node(cpu); in ne_setup_cpu_pool() 230 if (numa_node < 0) { in ne_setup_cpu_pool() 232 ne_misc_dev.name, numa_node); in ne_setup_cpu_pool() 239 if (numa_node != cpu_to_node(cpu)) { in ne_setup_cpu_pool() 353 ne_cpu_pool.numa_node = numa_node; in ne_setup_cpu_pool() 373 ne_cpu_pool.numa_node = -1; in ne_setup_cpu_pool() 416 ne_cpu_pool.numa_node = -1; in ne_teardown_cpu_pool() [all …]
|
D | ne_misc_dev.h | 77 int numa_node; member
|
/drivers/net/ethernet/fungible/funeth/ |
D | funeth_rx.c | 623 int numa_node; in fun_rxq_create_sw() local 625 numa_node = fun_irq_node(irq); in fun_rxq_create_sw() 626 q = kzalloc_node(sizeof(*q), GFP_KERNEL, numa_node); in fun_rxq_create_sw() 634 q->numa_node = numa_node; in fun_rxq_create_sw() 640 sizeof(*q->bufs), false, numa_node, in fun_rxq_create_sw() 646 false, numa_node, &q->cq_dma_addr, NULL, in fun_rxq_create_sw() 651 err = fun_rxq_init_cache(&q->cache, nrqe, numa_node); in fun_rxq_create_sw() 655 err = fun_rxq_alloc_bufs(q, numa_node); in fun_rxq_create_sw() 752 q->numa_node, q->headroom); in fun_rxq_create_dev()
|
D | funeth_tx.c | 631 int numa_node; in fun_txq_create_sw() local 634 numa_node = fun_irq_node(irq); /* skb Tx queue */ in fun_txq_create_sw() 636 numa_node = cpu_to_node(qidx); /* XDP Tx queue */ in fun_txq_create_sw() 638 q = kzalloc_node(sizeof(*q), GFP_KERNEL, numa_node); in fun_txq_create_sw() 644 sizeof(*q->info), true, numa_node, in fun_txq_create_sw() 653 q->numa_node = numa_node; in fun_txq_create_sw() 720 q->ethid, q->numa_node); in fun_txq_create_dev()
|
D | funeth_txrx.h | 125 int numa_node; member 195 int numa_node; member
|
/drivers/nvdimm/ |
D | of_pmem.c | 63 ndr_desc.numa_node = dev_to_node(&pdev->dev); in of_pmem_region_probe() 64 ndr_desc.target_node = ndr_desc.numa_node; in of_pmem_region_probe()
|
D | e820.c | 28 ndr_desc.numa_node = numa_map_to_online_node(nid); in e820_register_one()
|
D | virtio_pmem.c | 82 ndr_desc.numa_node = nid; in virtio_pmem_probe()
|
/drivers/net/ethernet/fungible/funcore/ |
D | fun_queue.c | 21 int numa_node, dma_addr_t *dma_addr, void **sw_va, in fun_alloc_ring_mem() argument 28 if (numa_node == NUMA_NO_NODE) in fun_alloc_ring_mem() 29 numa_node = dev_node; in fun_alloc_ring_mem() 36 set_dev_node(dma_dev, numa_node); in fun_alloc_ring_mem() 44 numa_node); in fun_alloc_ring_mem()
|
/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_crat.c | 1902 int numa_node = NUMA_NO_NODE; in kfd_find_numa_node_in_srat() local 1948 numa_node = pxm_to_node(gpu->proximity_domain); in kfd_find_numa_node_in_srat() 1966 if (found && (numa_node < 0 || in kfd_find_numa_node_in_srat() 1967 numa_node > pxm_to_node(max_pxm))) in kfd_find_numa_node_in_srat() 1968 numa_node = 0; in kfd_find_numa_node_in_srat() 1970 if (numa_node != NUMA_NO_NODE) in kfd_find_numa_node_in_srat() 1971 set_dev_node(&kdev->pdev->dev, numa_node); in kfd_find_numa_node_in_srat() 2032 if (kdev->pdev->dev.numa_node == NUMA_NO_NODE) in kfd_fill_gpu_direct_io_link_to_cpu() 2036 if (kdev->pdev->dev.numa_node == NUMA_NO_NODE) in kfd_fill_gpu_direct_io_link_to_cpu() 2039 sub_type_hdr->proximity_domain_to = kdev->pdev->dev.numa_node; in kfd_fill_gpu_direct_io_link_to_cpu()
|
/drivers/hv/ |
D | channel_mgmt.c | 739 int numa_node; in init_vp_index() local 760 numa_node = next_numa_node_id++; in init_vp_index() 761 if (numa_node == nr_node_ids) { in init_vp_index() 765 if (cpumask_empty(cpumask_of_node(numa_node))) in init_vp_index() 769 allocated_mask = &hv_context.hv_numa_map[numa_node]; in init_vp_index() 772 cpumask_xor(available_mask, allocated_mask, cpumask_of_node(numa_node)); in init_vp_index()
|
/drivers/net/ethernet/amazon/ena/ |
D | ena_eth_com.h | 194 u8 numa_node) in ena_com_update_numa_node() argument 201 numa_cfg.numa_cfg = (numa_node & ENA_ETH_IO_NUMA_NODE_CFG_REG_NUMA_MASK) in ena_com_update_numa_node()
|
D | ena_netdev.c | 684 ring->numa_node = 0; in ena_init_io_rings_common() 788 tx_ring->numa_node = node; in ena_setup_tx_resources() 921 rx_ring->numa_node = node; in ena_setup_rx_resources() 1870 int numa_node; in ena_update_ring_numa_node() local 1880 numa_node = cpu_to_node(cpu); in ena_update_ring_numa_node() 1882 if (likely(tx_ring->numa_node == numa_node)) in ena_update_ring_numa_node() 1887 if (numa_node != NUMA_NO_NODE) { in ena_update_ring_numa_node() 1888 ena_com_update_numa_node(tx_ring->ena_com_io_cq, numa_node); in ena_update_ring_numa_node() 1889 tx_ring->numa_node = numa_node; in ena_update_ring_numa_node() 1891 rx_ring->numa_node = numa_node; in ena_update_ring_numa_node() [all …]
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | icm.c | 146 dev->numa_node); in mlx4_alloc_icm() 164 dev->numa_node); in mlx4_alloc_icm() 193 dev->numa_node); in mlx4_alloc_icm()
|
/drivers/scsi/elx/efct/ |
D | efct_driver.h | 61 u32 numa_node; member
|
/drivers/platform/x86/intel/speed_select_if/ |
D | isst_if_common.c | 287 int numa_node; member 333 if (node == isst_cpu_info[cpu].numa_node) { in _isst_if_get_pci_dev() 395 isst_cpu_info[cpu].numa_node = cpu_to_node(cpu); in isst_if_cpu_online()
|
/drivers/dax/hmem/ |
D | device.c | 57 pdev->dev.numa_node = numa_map_to_online_node(target_nid); in hmem_register_device()
|
/drivers/net/ethernet/mellanox/mlx5/core/ |
D | eq.c | 286 &eq->frag_buf, dev->priv.numa_node); in create_map_eq() 442 dev->priv.numa_node); in mlx5_eq_table_init() 735 dev->priv.numa_node); in mlx5_eq_create_generic() 837 cpus[i] = cpumask_local_spread(i, dev->priv.numa_node); in comp_irqs_request() 899 eq = kzalloc_node(sizeof(*eq), GFP_KERNEL, dev->priv.numa_node); in create_comp_eqs()
|
/drivers/net/ethernet/cavium/liquidio/ |
D | octeon_droq.c | 229 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_init_droq() local 284 numa_node); in octeon_init_droq() 930 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_create_droq() local 939 droq = vmalloc_node(sizeof(*droq), numa_node); in octeon_create_droq()
|
D | request_manager.c | 65 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_init_instr_queue() local 99 numa_node); in octeon_init_instr_queue() 207 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_setup_iq() local 217 vzalloc_node(sizeof(struct octeon_instr_queue), numa_node); in octeon_setup_iq()
|
D | octeon_device.c | 869 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_setup_instr_queues() local 882 numa_node); in octeon_setup_instr_queues() 913 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_setup_output_queues() local 928 oct->droq[0] = vzalloc_node(sizeof(*oct->droq[0]), numa_node); in octeon_setup_output_queues()
|
/drivers/block/mtip32xx/ |
D | mtip32xx.c | 2730 dd->numa_node); in mtip_hw_init() 3346 unsigned int hctx_idx, unsigned int numa_node) in mtip_init_cmd() argument 3422 dd->tags.numa_node = dd->numa_node; in mtip_block_initialize() 3512 dd, dd->numa_node, in mtip_block_initialize() 3746 dd->numa_node = my_node; in mtip_pci_probe() 3760 node_mask = cpumask_of_node(dd->numa_node); in mtip_pci_probe() 3769 dd->numa_node, in mtip_pci_probe() 3771 nr_cpus_node(dd->numa_node), in mtip_pci_probe() 3776 dd->isr_binding = get_least_used_cpu_on_node(dd->numa_node); in mtip_pci_probe() 3782 dd->work[1].cpu_binding = get_least_used_cpu_on_node(dd->numa_node); in mtip_pci_probe() [all …]
|
/drivers/mmc/core/ |
D | queue.c | 205 unsigned int hctx_idx, unsigned int numa_node) in mmc_mq_init_request() argument 432 mq->tag_set.numa_node = NUMA_NO_NODE; in mmc_init_queue()
|