Home
last modified time | relevance | path

Searched refs:node_mask (Results 1 – 7 of 7) sorted by relevance

/linux-6.1.9/tools/perf/util/
Dmmap.c101 unsigned long *node_mask; in perf_mmap__aio_bind() local
109 node_mask = bitmap_zalloc(node_index + 1); in perf_mmap__aio_bind()
110 if (!node_mask) { in perf_mmap__aio_bind()
114 set_bit(node_index, node_mask); in perf_mmap__aio_bind()
115 if (mbind(data, mmap_len, MPOL_BIND, node_mask, node_index + 1 + 1, 0)) { in perf_mmap__aio_bind()
120 bitmap_free(node_mask); in perf_mmap__aio_bind()
/linux-6.1.9/net/netfilter/
Dxt_cluster.c118 return !!((1 << hash) & info->node_mask) ^ in xt_cluster_mt()
132 if (info->node_mask >= (1ULL << info->total_nodes)) { in xt_cluster_mt_checkentry()
/linux-6.1.9/include/uapi/linux/netfilter/
Dxt_cluster.h13 __u32 node_mask; member
/linux-6.1.9/tools/perf/bench/
Dnuma.c394 struct bitmask *node_mask; in bind_to_memnode() local
400 node_mask = numa_allocate_nodemask(); in bind_to_memnode()
401 BUG_ON(!node_mask); in bind_to_memnode()
403 numa_bitmask_clearall(node_mask); in bind_to_memnode()
404 numa_bitmask_setbit(node_mask, node); in bind_to_memnode()
406 ret = set_mempolicy(MPOL_BIND, node_mask->maskp, node_mask->size + 1); in bind_to_memnode()
407 dprintf("binding to node %d, mask: %016lx => %d\n", node, *node_mask->maskp, ret); in bind_to_memnode()
409 numa_bitmask_free(node_mask); in bind_to_memnode()
/linux-6.1.9/drivers/scsi/
Dstorvsc_drv.c1361 const struct cpumask *node_mask; in get_og_chn() local
1378 node_mask = cpumask_of_node(cpu_to_node(q_num)); in get_og_chn()
1382 if (cpumask_test_cpu(tgt_cpu, node_mask)) in get_og_chn()
1395 if (!cpumask_test_cpu(tgt_cpu, node_mask)) in get_og_chn()
1416 const struct cpumask *node_mask; in storvsc_do_io() local
1438 node_mask = cpumask_of_node(cpu_to_node(q_num)); in storvsc_do_io()
1441 if (!cpumask_test_cpu(tgt_cpu, node_mask)) in storvsc_do_io()
1472 if (cpumask_test_cpu(tgt_cpu, node_mask)) in storvsc_do_io()
/linux-6.1.9/drivers/infiniband/hw/hfi1/
Daffinity.c1001 const struct cpumask *node_mask, in hfi1_get_proc_affinity() local
1112 node_mask = cpumask_of_node(node); in hfi1_get_proc_affinity()
1114 cpumask_pr_args(node_mask)); in hfi1_get_proc_affinity()
1117 cpumask_and(available_mask, hw_thread_mask, node_mask); in hfi1_get_proc_affinity()
1146 cpumask_andnot(available_mask, available_mask, node_mask); in hfi1_get_proc_affinity()
/linux-6.1.9/drivers/block/mtip32xx/
Dmtip32xx.c3596 const struct cpumask *node_mask; in get_least_used_cpu_on_node() local
3598 node_mask = cpumask_of_node(node); in get_least_used_cpu_on_node()
3599 least_used_cpu = cpumask_first(node_mask); in get_least_used_cpu_on_node()
3603 for_each_cpu(cpu, node_mask) { in get_least_used_cpu_on_node()
3699 const struct cpumask *node_mask; in mtip_pci_probe() local
3760 node_mask = cpumask_of_node(dd->numa_node); in mtip_pci_probe()
3761 if (!cpumask_empty(node_mask)) { in mtip_pci_probe()
3762 for_each_cpu(cpu, node_mask) in mtip_pci_probe()
3770 topology_physical_package_id(cpumask_first(node_mask)), in mtip_pci_probe()