/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/lib/ |
D | aso.c | 45 static int mlx5_aso_alloc_cq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_alloc_cq() argument 53 param.buf_numa_node = numa_node; in mlx5_aso_alloc_cq() 54 param.db_numa_node = numa_node; in mlx5_aso_alloc_cq() 121 static int mlx5_aso_create_cq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_create_cq() argument 136 err = mlx5_aso_alloc_cq(mdev, numa_node, cqc_data, cq); in mlx5_aso_create_cq() 158 static int mlx5_aso_alloc_sq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_alloc_sq() argument 168 param.db_numa_node = numa_node; in mlx5_aso_alloc_sq() 169 param.buf_numa_node = numa_node; in mlx5_aso_alloc_sq() 269 static int mlx5_aso_create_sq(struct mlx5_core_dev *mdev, int numa_node, in mlx5_aso_create_sq() argument 284 err = mlx5_aso_alloc_sq(mdev, numa_node, sqc_data, sq); in mlx5_aso_create_sq() [all …]
|
/linux-6.1.9/include/linux/ |
D | topology.h | 83 DECLARE_PER_CPU(int, numa_node); 89 return raw_cpu_read(numa_node); in numa_node_id() 96 return per_cpu(numa_node, cpu); in cpu_to_node() 103 this_cpu_write(numa_node, node); in set_numa_node() 110 per_cpu(numa_node, cpu) = node; in set_cpu_numa_node()
|
/linux-6.1.9/drivers/dax/ |
D | kmem.c | 59 int numa_node; in dev_dax_kmem_probe() local 67 numa_node = dev_dax->target_node; in dev_dax_kmem_probe() 68 if (numa_node < 0) { in dev_dax_kmem_probe() 70 numa_node); in dev_dax_kmem_probe() 91 init_node_memory_type(numa_node, dax_slowmem_type); in dev_dax_kmem_probe() 102 rc = memory_group_register_static(numa_node, total_len); in dev_dax_kmem_probe() 170 clear_node_memory_type(numa_node, dax_slowmem_type); in dev_dax_kmem_probe()
|
/linux-6.1.9/Documentation/driver-api/cxl/ |
D | memory-devices.rst | 64 "numa_node":1, 76 "numa_node":1, 94 "numa_node":1, 106 "numa_node":1, 130 "numa_node":0, 142 "numa_node":0, 160 "numa_node":0, 172 "numa_node":0, 243 "numa_node":0, 270 "numa_node":0, [all …]
|
/linux-6.1.9/drivers/virt/nitro_enclaves/ |
D | ne_misc_dev.c | 124 int numa_node; member 186 int numa_node = -1; in ne_setup_cpu_pool() local 228 if (numa_node < 0) { in ne_setup_cpu_pool() 229 numa_node = cpu_to_node(cpu); in ne_setup_cpu_pool() 230 if (numa_node < 0) { in ne_setup_cpu_pool() 232 ne_misc_dev.name, numa_node); in ne_setup_cpu_pool() 239 if (numa_node != cpu_to_node(cpu)) { in ne_setup_cpu_pool() 353 ne_cpu_pool.numa_node = numa_node; in ne_setup_cpu_pool() 373 ne_cpu_pool.numa_node = -1; in ne_setup_cpu_pool() 416 ne_cpu_pool.numa_node = -1; in ne_teardown_cpu_pool() [all …]
|
D | ne_misc_dev.h | 77 int numa_node; member
|
/linux-6.1.9/drivers/net/ethernet/fungible/funeth/ |
D | funeth_rx.c | 623 int numa_node; in fun_rxq_create_sw() local 625 numa_node = fun_irq_node(irq); in fun_rxq_create_sw() 626 q = kzalloc_node(sizeof(*q), GFP_KERNEL, numa_node); in fun_rxq_create_sw() 634 q->numa_node = numa_node; in fun_rxq_create_sw() 640 sizeof(*q->bufs), false, numa_node, in fun_rxq_create_sw() 646 false, numa_node, &q->cq_dma_addr, NULL, in fun_rxq_create_sw() 651 err = fun_rxq_init_cache(&q->cache, nrqe, numa_node); in fun_rxq_create_sw() 655 err = fun_rxq_alloc_bufs(q, numa_node); in fun_rxq_create_sw() 752 q->numa_node, q->headroom); in fun_rxq_create_dev()
|
D | funeth_tx.c | 631 int numa_node; in fun_txq_create_sw() local 634 numa_node = fun_irq_node(irq); /* skb Tx queue */ in fun_txq_create_sw() 636 numa_node = cpu_to_node(qidx); /* XDP Tx queue */ in fun_txq_create_sw() 638 q = kzalloc_node(sizeof(*q), GFP_KERNEL, numa_node); in fun_txq_create_sw() 644 sizeof(*q->info), true, numa_node, in fun_txq_create_sw() 653 q->numa_node = numa_node; in fun_txq_create_sw() 720 q->ethid, q->numa_node); in fun_txq_create_dev()
|
D | funeth_txrx.h | 125 int numa_node; member 195 int numa_node; member
|
/linux-6.1.9/drivers/nvdimm/ |
D | of_pmem.c | 57 ndr_desc.numa_node = dev_to_node(&pdev->dev); in of_pmem_region_probe() 58 ndr_desc.target_node = ndr_desc.numa_node; in of_pmem_region_probe()
|
/linux-6.1.9/arch/sparc/kernel/ |
D | pci.c | 254 int numa_node) in pci_init_dev_archdata() argument 260 sd->numa_node = numa_node; in pci_init_dev_archdata() 279 pbm->numa_node); in of_create_pci_dev() 283 sd->numa_node = pbm->numa_node; in of_create_pci_dev() 772 return pbm->numa_node; in pcibus_to_node() 886 psd->numa_node); in pcibios_device_add()
|
D | of_device_common.c | 69 op->dev.archdata.numa_node = bus_sd->numa_node; in of_propagate_archdata()
|
D | iommu.c | 95 int numa_node) in iommu_table_init() argument 111 iommu->tbl.map = kzalloc_node(sz, GFP_KERNEL, numa_node); in iommu_table_init() 122 page = alloc_pages_node(numa_node, GFP_KERNEL, 0); in iommu_table_init() 133 page = alloc_pages_node(numa_node, GFP_KERNEL, order); in iommu_table_init() 213 nid = dev->archdata.numa_node; in dma_4u_alloc_coherent()
|
/linux-6.1.9/tools/perf/util/ |
D | env.h | 28 struct numa_node { struct 93 struct numa_node *numa_nodes;
|
/linux-6.1.9/kernel/bpf/ |
D | ringbuf.c | 95 static struct bpf_ringbuf *bpf_ringbuf_area_alloc(size_t data_sz, int numa_node) in bpf_ringbuf_area_alloc() argument 125 pages = bpf_map_area_alloc(array_size, numa_node); in bpf_ringbuf_area_alloc() 130 page = alloc_pages_node(numa_node, flags, 0); in bpf_ringbuf_area_alloc() 163 static struct bpf_ringbuf *bpf_ringbuf_alloc(size_t data_sz, int numa_node) in bpf_ringbuf_alloc() argument 167 rb = bpf_ringbuf_area_alloc(data_sz, numa_node); in bpf_ringbuf_alloc() 207 rb_map->rb = bpf_ringbuf_alloc(attr->max_entries, rb_map->map.numa_node); in ringbuf_map_alloc()
|
D | local_storage.c | 169 map->numa_node); in cgroup_storage_update_elem() 288 int numa_node = bpf_map_attr_numa_node(attr); in cgroup_storage_map_alloc() local 316 map = bpf_map_area_alloc(sizeof(struct bpf_cgroup_storage_map), numa_node); in cgroup_storage_map_alloc() 508 gfp, map->numa_node); in bpf_cgroup_storage_alloc() 514 map->numa_node); in bpf_cgroup_storage_alloc()
|
D | bloom_filter.c | 94 int numa_node = bpf_map_attr_numa_node(attr); in bloom_map_alloc() local 145 bloom = bpf_map_area_alloc(sizeof(*bloom) + bitset_bytes, numa_node); in bloom_map_alloc()
|
D | queue_stack_maps.c | 70 int numa_node = bpf_map_attr_numa_node(attr); in queue_stack_map_alloc() local 77 qs = bpf_map_area_alloc(queue_size, numa_node); in queue_stack_map_alloc()
|
/linux-6.1.9/net/xdp/ |
D | xskmap.c | 64 int numa_node; in xsk_map_alloc() local 75 numa_node = bpf_map_attr_numa_node(attr); in xsk_map_alloc() 78 m = bpf_map_area_alloc(size, numa_node); in xsk_map_alloc()
|
/linux-6.1.9/arch/sparc/include/asm/ |
D | device.h | 18 int numa_node; member
|
/linux-6.1.9/drivers/net/ethernet/fungible/funcore/ |
D | fun_queue.c | 21 int numa_node, dma_addr_t *dma_addr, void **sw_va, in fun_alloc_ring_mem() argument 28 if (numa_node == NUMA_NO_NODE) in fun_alloc_ring_mem() 29 numa_node = dev_node; in fun_alloc_ring_mem() 36 set_dev_node(dma_dev, numa_node); in fun_alloc_ring_mem() 44 numa_node); in fun_alloc_ring_mem()
|
/linux-6.1.9/drivers/net/ethernet/amazon/ena/ |
D | ena_eth_com.h | 194 u8 numa_node) in ena_com_update_numa_node() argument 201 numa_cfg.numa_cfg = (numa_node & ENA_ETH_IO_NUMA_NODE_CFG_REG_NUMA_MASK) in ena_com_update_numa_node()
|
/linux-6.1.9/drivers/hv/ |
D | channel_mgmt.c | 739 int numa_node; in init_vp_index() local 760 numa_node = next_numa_node_id++; in init_vp_index() 761 if (numa_node == nr_node_ids) { in init_vp_index() 765 if (cpumask_empty(cpumask_of_node(numa_node))) in init_vp_index() 769 allocated_mask = &hv_context.hv_numa_map[numa_node]; in init_vp_index() 772 cpumask_xor(available_mask, allocated_mask, cpumask_of_node(numa_node)); in init_vp_index()
|
/linux-6.1.9/drivers/scsi/elx/efct/ |
D | efct_driver.h | 61 u32 numa_node; member
|
/linux-6.1.9/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_crat.c | 2073 int numa_node = NUMA_NO_NODE; in kfd_find_numa_node_in_srat() local 2119 numa_node = pxm_to_node(gpu->proximity_domain); in kfd_find_numa_node_in_srat() 2137 if (found && (numa_node < 0 || in kfd_find_numa_node_in_srat() 2138 numa_node > pxm_to_node(max_pxm))) in kfd_find_numa_node_in_srat() 2139 numa_node = 0; in kfd_find_numa_node_in_srat() 2141 if (numa_node != NUMA_NO_NODE) in kfd_find_numa_node_in_srat() 2142 set_dev_node(&kdev->pdev->dev, numa_node); in kfd_find_numa_node_in_srat() 2203 if (kdev->pdev->dev.numa_node == NUMA_NO_NODE) in kfd_fill_gpu_direct_io_link_to_cpu() 2207 if (kdev->pdev->dev.numa_node == NUMA_NO_NODE) in kfd_fill_gpu_direct_io_link_to_cpu() 2210 sub_type_hdr->proximity_domain_to = kdev->pdev->dev.numa_node; in kfd_fill_gpu_direct_io_link_to_cpu()
|