/linux-6.1.9/tools/lib/perf/ |
D | cpumap.c | 13 static struct perf_cpu_map *perf_cpu_map__alloc(int nr_cpus) in perf_cpu_map__alloc() argument 15 struct perf_cpu_map *cpus = malloc(sizeof(*cpus) + sizeof(struct perf_cpu) * nr_cpus); in perf_cpu_map__alloc() 18 cpus->nr = nr_cpus; in perf_cpu_map__alloc() 60 int nr_cpus; in cpu_map__default_new() local 62 nr_cpus = sysconf(_SC_NPROCESSORS_ONLN); in cpu_map__default_new() 63 if (nr_cpus < 0) in cpu_map__default_new() 66 cpus = perf_cpu_map__alloc(nr_cpus); in cpu_map__default_new() 70 for (i = 0; i < nr_cpus; ++i) in cpu_map__default_new() 90 static struct perf_cpu_map *cpu_map__trim_new(int nr_cpus, const struct perf_cpu *tmp_cpus) in cpu_map__trim_new() argument 92 size_t payload_size = nr_cpus * sizeof(struct perf_cpu); in cpu_map__trim_new() [all …]
|
/linux-6.1.9/tools/testing/selftests/bpf/prog_tests/ |
D | map_lookup_percpu_elem.c | 11 int ret, i, nr_cpus = libbpf_num_possible_cpus(); in test_map_lookup_percpu_elem() local 14 buf = malloc(nr_cpus*sizeof(__u64)); in test_map_lookup_percpu_elem() 18 for (i = 0; i < nr_cpus; i++) in test_map_lookup_percpu_elem() 20 sum = (nr_cpus - 1) * nr_cpus / 2; in test_map_lookup_percpu_elem() 27 skel->rodata->nr_cpus = nr_cpus; in test_map_lookup_percpu_elem()
|
D | map_init.c | 10 static int nr_cpus; variable 22 pcpu_map_value_t value[nr_cpus]; in map_populate() 26 for (i = 0; i < nr_cpus; i++) in map_populate() 105 for (i = 0; i < nr_cpus; i++) { in check_values_one_cpu() 131 pcpu_map_value_t value[nr_cpus]; in test_pcpu_map_init() 171 pcpu_map_value_t value[nr_cpus]; in test_pcpu_lru_map_init() 203 nr_cpus = bpf_num_possible_cpus(); in test_map_init() 204 if (nr_cpus <= 1) { in test_map_init()
|
D | lookup_and_delete.c | 11 static int nr_cpus; variable 29 __u64 key, value[nr_cpus]; in fill_values_percpu() 32 for (i = 0; i < nr_cpus; i++) in fill_values_percpu() 137 __u64 key, val, value[nr_cpus]; in test_lookup_and_delete_percpu_hash() 156 for (i = 0; i < nr_cpus; i++) { in test_lookup_and_delete_percpu_hash() 223 __u64 key, val, value[nr_cpus]; in test_lookup_and_delete_lru_percpu_hash() 242 for (i = 0; i < nr_cpus; i++) in test_lookup_and_delete_lru_percpu_hash() 252 for (i = 0; i < nr_cpus; i++) { in test_lookup_and_delete_lru_percpu_hash() 281 nr_cpus = bpf_num_possible_cpus(); in test_lookup_and_delete()
|
D | perf_buffer.c | 48 int err, on_len, nr_on_cpus = 0, nr_cpus, i, j; in serial_test_perf_buffer() local 56 nr_cpus = libbpf_num_possible_cpus(); in serial_test_perf_buffer() 57 if (CHECK(nr_cpus < 0, "nr_cpus", "err %d\n", nr_cpus)) in serial_test_perf_buffer() 94 for (i = 0; i < nr_cpus; i++) { in serial_test_perf_buffer() 117 for (i = 0, j = 0; i < nr_cpus; i++) { in serial_test_perf_buffer()
|
D | xdp_noinline.c | 8 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_xdp_noinline() local 19 } stats[nr_cpus]; in test_xdp_noinline() 66 for (i = 0; i < nr_cpus; i++) { in test_xdp_noinline()
|
/linux-6.1.9/tools/testing/selftests/bpf/map_tests/ |
D | array_map_batch_ops.c | 13 static int nr_cpus; variable 28 cpu_offset = i * nr_cpus; in map_batch_update() 29 for (j = 0; j < nr_cpus; j++) in map_batch_update() 49 cpu_offset = i * nr_cpus; in map_batch_verify() 50 for (j = 0; j < nr_cpus; j++) { in map_batch_verify() 90 value_size *= nr_cpus; in __test_map_lookup_and_update_batch() 158 nr_cpus = libbpf_num_possible_cpus(); in test_array_map_batch_ops() 160 CHECK(nr_cpus < 0, "nr_cpus checking", in test_array_map_batch_ops()
|
/linux-6.1.9/tools/testing/selftests/bpf/ |
D | test_lru_map.c | 26 static int nr_cpus; variable 96 unsigned long long value0[nr_cpus], value1[nr_cpus]; in map_subset() 127 while (next < nr_cpus) { in sched_next_online() 150 unsigned long long key, value[nr_cpus]; in test_lru_sanity0() 160 lru_map_fd = create_map(map_type, map_flags, 2 * nr_cpus); in test_lru_sanity0() 241 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity1() 317 unsigned long long key, value[nr_cpus]; in test_lru_sanity2() 424 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity3() 489 unsigned long long key, value[nr_cpus]; in test_lru_sanity4() 500 3 * tgt_free * nr_cpus); in test_lru_sanity4() [all …]
|
/linux-6.1.9/tools/tracing/rtla/src/ |
D | osnoise_hist.c | 57 int nr_cpus; member 69 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in osnoise_free_histogram() 85 *osnoise_alloc_histogram(int nr_cpus, int entries, int bucket_size) in osnoise_alloc_histogram() argument 96 data->nr_cpus = nr_cpus; in osnoise_alloc_histogram() 98 data->hist = calloc(1, sizeof(*data->hist) * nr_cpus); in osnoise_alloc_histogram() 102 for (cpu = 0; cpu < nr_cpus; cpu++) { in osnoise_alloc_histogram() 109 for (cpu = 0; cpu < nr_cpus; cpu++) in osnoise_alloc_histogram() 267 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in osnoise_hist_header() 298 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in osnoise_print_summary() 312 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in osnoise_print_summary() [all …]
|
D | timerlat_hist.c | 65 int nr_cpus; member 77 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in timerlat_free_histogram() 96 *timerlat_alloc_histogram(int nr_cpus, int entries, int bucket_size) in timerlat_alloc_histogram() argument 107 data->nr_cpus = nr_cpus; in timerlat_alloc_histogram() 110 data->hist = calloc(1, sizeof(*data->hist) * nr_cpus); in timerlat_alloc_histogram() 115 for (cpu = 0; cpu < nr_cpus; cpu++) { in timerlat_alloc_histogram() 125 for (cpu = 0; cpu < nr_cpus; cpu++) { in timerlat_alloc_histogram() 224 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in timerlat_hist_header() 260 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in timerlat_print_summary() 280 for (cpu = 0; cpu < data->nr_cpus; cpu++) { in timerlat_print_summary() [all …]
|
D | timerlat_top.c | 54 int nr_cpus; member 70 static struct timerlat_top_data *timerlat_alloc_top(int nr_cpus) in timerlat_alloc_top() argument 79 data->nr_cpus = nr_cpus; in timerlat_alloc_top() 82 data->cpu_data = calloc(1, sizeof(*data->cpu_data) * nr_cpus); in timerlat_alloc_top() 87 for (cpu = 0; cpu < nr_cpus; cpu++) { in timerlat_alloc_top() 241 static int nr_cpus = -1; in timerlat_print_stats() local 244 if (nr_cpus == -1) in timerlat_print_stats() 245 nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in timerlat_print_stats() 252 for (i = 0; i < nr_cpus; i++) { in timerlat_print_stats() 549 int nr_cpus; in timerlat_init_top() local [all …]
|
D | osnoise_top.c | 54 int nr_cpus; member 70 static struct osnoise_top_data *osnoise_alloc_top(int nr_cpus) in osnoise_alloc_top() argument 78 data->nr_cpus = nr_cpus; in osnoise_alloc_top() 81 data->cpu_data = calloc(1, sizeof(*data->cpu_data) * nr_cpus); in osnoise_alloc_top() 220 static int nr_cpus = -1; in osnoise_print_stats() local 223 if (nr_cpus == -1) in osnoise_print_stats() 224 nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in osnoise_print_stats() 231 for (i = 0; i < nr_cpus; i++) { in osnoise_print_stats() 510 int nr_cpus; in osnoise_init_top() local 512 nr_cpus = sysconf(_SC_NPROCESSORS_CONF); in osnoise_init_top() [all …]
|
/linux-6.1.9/samples/bpf/ |
D | test_lru_dist.c | 31 static int nr_cpus; variable 227 if (next_to_try == nr_cpus) in sched_next_online() 230 while (next_to_try < nr_cpus) { in sched_next_online() 323 nr_cpus * lru_size); in test_parallel_lru_dist() 339 unsigned long long key, value[nr_cpus]; in test_lru_loss0() 351 map_fd = create_map(map_type, map_flags, 900 * nr_cpus); in test_lru_loss0() 393 unsigned long long key, value[nr_cpus]; in test_lru_loss1() 403 map_fd = create_map(map_type, map_flags, 1000 * nr_cpus); in test_lru_loss1() 431 unsigned long long key, value[nr_cpus]; in do_test_parallel_lru_loss() 480 nr_cpus * (1000 + 200)); in test_parallel_lru_loss() [all …]
|
D | tracex3_user.c | 19 unsigned int nr_cpus = bpf_num_possible_cpus(); in clear_stats() local 20 __u64 values[nr_cpus]; in clear_stats() 76 unsigned int nr_cpus = bpf_num_possible_cpus(); in print_hist() local 78 long values[nr_cpus]; in print_hist() 88 for (i = 0; i < nr_cpus; i++) in print_hist()
|
D | xdp_sample_user.c | 317 unsigned int nr_cpus = libbpf_num_possible_cpus(); in alloc_record_per_cpu() local 320 array = calloc(nr_cpus, sizeof(*array)); in alloc_record_per_cpu() 323 nr_cpus); in alloc_record_per_cpu() 343 unsigned int nr_cpus = libbpf_num_possible_cpus(); in map_collect_percpu() local 356 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 382 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu_devmap() local 392 values = calloc(count * nr_cpus, sizeof(struct datarec)); in map_collect_percpu_devmap() 414 arr = &values[i * nr_cpus]; in map_collect_percpu_devmap() 650 unsigned int nr_cpus, struct sample_output *out) in stats_get_rx_cnt() argument 660 for (i = 0; i < nr_cpus; i++) { in stats_get_rx_cnt() [all …]
|
D | sampleip_user.c | 27 static int nr_cpus; variable 49 for (i = 0; i < nr_cpus; i++) { in sampling_start() 72 for (i = 0; i < nr_cpus; i++) in sampling_end() 173 nr_cpus = sysconf(_SC_NPROCESSORS_ONLN); in main() 174 links = calloc(nr_cpus, sizeof(struct bpf_link *)); in main()
|
D | lwt_len_hist_user.c | 31 unsigned int nr_cpus = bpf_num_possible_cpus(); in main() local 33 uint64_t values[nr_cpus], sum, max_value = 0, data[MAX_INDEX] = {}; in main() 54 for (i = 0; i < nr_cpus; i++) in main()
|
D | xdp_rxq_info_user.c | 208 unsigned int nr_cpus = bpf_num_possible_cpus(); in alloc_record_per_cpu() local 211 array = calloc(nr_cpus, sizeof(struct datarec)); in alloc_record_per_cpu() 213 fprintf(stderr, "Mem alloc error (nr_cpus:%u)\n", nr_cpus); in alloc_record_per_cpu() 267 unsigned int nr_cpus = bpf_num_possible_cpus(); in map_collect_percpu() local 268 struct datarec values[nr_cpus]; in map_collect_percpu() 282 for (i = 0; i < nr_cpus; i++) { in map_collect_percpu() 348 unsigned int nr_cpus = bpf_num_possible_cpus(); in stats_print() local 371 for (i = 0; i < nr_cpus; i++) { in stats_print() 405 for (i = 0; i < nr_cpus; i++) { in stats_print()
|
/linux-6.1.9/drivers/powercap/ |
D | dtpm_cpu.c | 49 int i, nr_cpus; in set_pd_power_limit() local 52 nr_cpus = cpumask_weight(&cpus); in set_pd_power_limit() 56 power = pd->table[i].power * nr_cpus; in set_pd_power_limit() 66 power_limit = pd->table[i - 1].power * nr_cpus; in set_pd_power_limit() 119 int nr_cpus; in update_pd_power_uw() local 122 nr_cpus = cpumask_weight(&cpus); in update_pd_power_uw() 126 dtpm->power_min *= nr_cpus; in update_pd_power_uw() 130 dtpm->power_max *= nr_cpus; in update_pd_power_uw()
|
/linux-6.1.9/tools/testing/selftests/kvm/lib/aarch64/ |
D | gic.c | 26 gic_dist_init(enum gic_type type, unsigned int nr_cpus, void *dist_base) in gic_dist_init() argument 43 gic_ops->gic_init(nr_cpus, dist_base); in gic_dist_init() 52 void gic_init(enum gic_type type, unsigned int nr_cpus, in gic_init() argument 60 GUEST_ASSERT(nr_cpus); in gic_init() 62 gic_dist_init(type, nr_cpus, dist_base); in gic_init()
|
/linux-6.1.9/tools/testing/selftests/bpf/progs/ |
D | test_map_lookup_percpu_elem.c | 10 const volatile int nr_cpus; variable 60 bpf_loop(nr_cpus, read_percpu_elem_callback, &map_ctx, 0); in sysenter_getuid() 65 bpf_loop(nr_cpus, read_percpu_elem_callback, &map_ctx, 0); in sysenter_getuid() 70 bpf_loop(nr_cpus, read_percpu_elem_callback, &map_ctx, 0); in sysenter_getuid()
|
/linux-6.1.9/arch/mips/loongson64/ |
D | env.c | 139 loongson_sysconf.nr_cpus = ecpu->nr_cpus; in prom_lefi_init_env() 142 if (ecpu->nr_cpus > NR_CPUS || ecpu->nr_cpus == 0) in prom_lefi_init_env() 143 loongson_sysconf.nr_cpus = NR_CPUS; in prom_lefi_init_env() 144 loongson_sysconf.nr_nodes = (loongson_sysconf.nr_cpus + in prom_lefi_init_env()
|
/linux-6.1.9/arch/arm64/kvm/vgic/ |
D | vgic-debug.c | 26 int nr_cpus; member 45 ++iter->vcpu_id < iter->nr_cpus) in iter_next() 58 int nr_cpus = atomic_read(&kvm->online_vcpus); in iter_init() local 62 iter->nr_cpus = nr_cpus; in iter_init() 78 iter->vcpu_id == iter->nr_cpus && in end_of_vgic() 246 if (iter->vcpu_id < iter->nr_cpus) in vgic_debug_show()
|
/linux-6.1.9/tools/perf/util/ |
D | svghelper.c | 700 int *pos, int nr_cpus) in scan_thread_topology() argument 709 for_each_set_bit(thr, cpumask_bits(&t->sib_thr[i]), nr_cpus) in scan_thread_topology() 715 static void scan_core_topology(int *map, struct topology *t, int nr_cpus) in scan_core_topology() argument 722 for_each_set_bit(cpu, cpumask_bits(&t->sib_core[i]), nr_cpus) in scan_core_topology() 723 scan_thread_topology(map, t, cpu, &pos, nr_cpus); in scan_core_topology() 726 static int str_to_bitmap(char *s, cpumask_t *b, int nr_cpus) in str_to_bitmap() argument 739 if (c.cpu >= nr_cpus) { in str_to_bitmap() 754 int i, nr_cpus; in svg_build_topology_map() local 758 nr_cpus = min(env->nr_cpus_online, MAX_NR_CPUS); in svg_build_topology_map() 774 if (str_to_bitmap(sib_core, &t.sib_core[i], nr_cpus)) { in svg_build_topology_map() [all …]
|
/linux-6.1.9/drivers/pci/controller/ |
D | pcie-iproc-msi.c | 98 int nr_cpus; member 197 return (hwirq % msi->nr_cpus); in hwirq_to_cpu() 253 if (msi->nr_cpus > 1 && nr_irqs > 1) in iproc_msi_irq_domain_alloc() 263 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_alloc() 291 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_free() 478 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_free() 490 for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { in iproc_msi_irq_setup() 542 msi->nr_cpus = num_possible_cpus(); in iproc_msi_init() 544 if (msi->nr_cpus == 1) in iproc_msi_init() 559 if (msi->nr_irqs < msi->nr_cpus) { in iproc_msi_init() [all …]
|