Home
last modified time | relevance | path

Searched refs:cpu_map (Results 1 – 25 of 35) sorted by relevance

12

/linux-6.1.9/kernel/sched/
Dtopology.c304 static void perf_domain_debug(const struct cpumask *cpu_map, in perf_domain_debug() argument
310 printk(KERN_DEBUG "root_domain %*pbl:", cpumask_pr_args(cpu_map)); in perf_domain_debug()
372 static bool build_perf_domains(const struct cpumask *cpu_map) in build_perf_domains() argument
374 int i, nr_pd = 0, nr_ps = 0, nr_cpus = cpumask_weight(cpu_map); in build_perf_domains()
376 int cpu = cpumask_first(cpu_map); in build_perf_domains()
388 cpumask_pr_args(cpu_map)); in build_perf_domains()
396 cpumask_pr_args(cpu_map)); in build_perf_domains()
403 cpumask_pr_args(cpu_map)); in build_perf_domains()
408 for_each_cpu(i, cpu_map) { in build_perf_domains()
422 cpumask_pr_args(cpu_map)); in build_perf_domains()
[all …]
/linux-6.1.9/arch/mips/kernel/
Dcacheinfo.c58 static void fill_cpumask_siblings(int cpu, cpumask_t *cpu_map) in fill_cpumask_siblings() argument
64 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_siblings()
67 static void fill_cpumask_cluster(int cpu, cpumask_t *cpu_map) in fill_cpumask_cluster() argument
74 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_cluster()
/linux-6.1.9/tools/power/x86/intel-speed-select/
Disst-config.c68 struct _cpu_map *cpu_map; variable
309 if (cpu_map && cpu_map[cpu].initialized) in get_physical_package_id()
310 return cpu_map[cpu].pkg_id; in get_physical_package_id()
333 if (cpu_map && cpu_map[cpu].initialized) in get_physical_core_id()
334 return cpu_map[cpu].core_id; in get_physical_core_id()
357 if (cpu_map && cpu_map[cpu].initialized) in get_physical_die_id()
358 return cpu_map[cpu].die_id; in get_physical_die_id()
599 cpu_map[i].punit_cpu_core > max_id) in get_max_punit_core_id()
600 max_id = cpu_map[i].punit_cpu_core; in get_max_punit_core_id()
623 cpu_map = calloc(topo_max_cpus, sizeof(*cpu_map)); in create_cpu_map()
[all …]
/linux-6.1.9/arch/ia64/mm/
Ddiscontig.c184 unsigned int *cpu_map; in setup_per_cpu_areas() local
194 cpu_map = ai->groups[0].cpu_map; in setup_per_cpu_areas()
208 cpu_map[unit++] = cpu; in setup_per_cpu_areas()
233 cpu = cpu_map[unit]; in setup_per_cpu_areas()
245 gi->cpu_map = &cpu_map[unit]; in setup_per_cpu_areas()
Dcontig.c115 gi->cpu_map[gi->nr_units++] = cpu; in setup_per_cpu_areas()
/linux-6.1.9/tools/testing/selftests/bpf/progs/
Dtest_xdp_with_cpumap_helpers.c13 } cpu_map SEC(".maps");
18 return bpf_redirect_map(&cpu_map, 1, 0); in xdp_redir_prog()
Dtest_xdp_with_cpumap_frags_helpers.c13 } cpu_map SEC(".maps");
/linux-6.1.9/samples/bpf/
Dxdp_redirect_cpu.bpf.c15 } cpu_map SEC(".maps");
164 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum0_no_touch()
206 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum1_touch_data()
251 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum2_round_robin()
317 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum3_proto_separate()
390 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum4_ddos_filter_pktgen()
491 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum5_lb_hash_ip_pairs()
Dxdp_redirect_cpu_user.c364 if (bpf_map__set_max_entries(skel->maps.cpu_map, n_cpus) < 0) { in main()
497 ret = bpf_obj_get_info_by_fd(bpf_map__fd(skel->maps.cpu_map), &info, &infosz); in main()
506 map_fd = bpf_map__fd(skel->maps.cpu_map); in main()
/linux-6.1.9/drivers/platform/x86/intel/speed_select_if/
Disst_if_common.c466 struct isst_if_cpu_map *cpu_map; in isst_if_proc_phyid_req() local
468 cpu_map = (struct isst_if_cpu_map *)cmd_ptr; in isst_if_proc_phyid_req()
469 if (cpu_map->logical_cpu >= nr_cpu_ids || in isst_if_proc_phyid_req()
470 cpu_map->logical_cpu >= num_possible_cpus()) in isst_if_proc_phyid_req()
474 cpu_map->physical_cpu = isst_cpu_info[cpu_map->logical_cpu].punit_cpu_id; in isst_if_proc_phyid_req()
597 cmd_cb.offset = offsetof(struct isst_if_cpu_maps, cpu_map); in isst_if_def_ioctl()
/linux-6.1.9/tools/perf/tests/
Dcpumap.c19 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_mask()
55 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_cpus()
81 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_range_cpus()
/linux-6.1.9/tools/perf/util/
Dmmap.c249 const struct perf_cpu_map *cpu_map = NULL; in build_node_mask() local
251 cpu_map = cpu_map__online(); in build_node_mask()
252 if (!cpu_map) in build_node_mask()
255 nr_cpus = perf_cpu_map__nr(cpu_map); in build_node_mask()
257 cpu = perf_cpu_map__cpu(cpu_map, idx); /* map c index to online cpu index */ in build_node_mask()
Dtool.h75 cpu_map, member
Dsession.c547 if (tool->cpu_map == NULL) in perf_tool__fill_defaults()
548 tool->cpu_map = process_event_cpu_map_stub; in perf_tool__fill_defaults()
918 struct perf_record_cpu_map_data *data = &event->cpu_map.data; in perf_event__cpu_map_swap()
1708 return tool->cpu_map(session, event); in perf_session__process_user_event()
/linux-6.1.9/kernel/bpf/
Dcpumap.c79 struct bpf_cpu_map_entry __rcu **cpu_map; member
113 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc()
116 if (!cmap->cpu_map) in cpu_map_alloc()
536 old_rcpu = unrcu_pointer(xchg(&cmap->cpu_map[key_cpu], RCU_INITIALIZER(rcpu))); in __cpu_map_entry_replace()
618 rcpu = rcu_dereference_raw(cmap->cpu_map[i]); in cpu_map_free()
625 bpf_map_area_free(cmap->cpu_map); in cpu_map_free()
641 rcpu = rcu_dereference_check(cmap->cpu_map[key], in __cpu_map_lookup_elem()
/linux-6.1.9/tools/testing/selftests/bpf/prog_tests/
Dxdp_cpumap_attach.c35 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_helpers()
87 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_frags_helpers()
/linux-6.1.9/mm/
Dpercpu.c2445 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2446 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2454 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2457 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2523 if (gi->cpu_map[unit] != NR_CPUS) in pcpu_dump_alloc_info()
2525 cpu_width, gi->cpu_map[unit]); in pcpu_dump_alloc_info()
2670 cpu = gi->cpu_map[i]; in pcpu_setup_first_chunk()
2867 unsigned int *cpu_map; in pcpu_build_alloc_info() local
2959 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
2962 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
[all …]
/linux-6.1.9/tools/perf/arch/arm/util/
Dcs-etm.c716 struct perf_cpu_map *cpu_map; in cs_etm_info_fill() local
731 cpu_map = online_cpus; in cs_etm_info_fill()
742 cpu_map = event_cpus; in cs_etm_info_fill()
745 nr_cpu = perf_cpu_map__nr(cpu_map); in cs_etm_info_fill()
761 if (perf_cpu_map__has(cpu_map, cpu)) in cs_etm_info_fill()
/linux-6.1.9/tools/perf/python/
Dtwatch.py12 cpus = perf.cpu_map()
Dtracepoint.py19 cpus = perf.cpu_map()
/linux-6.1.9/include/uapi/linux/
Disst_if.h63 struct isst_if_cpu_map cpu_map[1]; member
/linux-6.1.9/include/linux/
Dpercpu.h70 unsigned int *cpu_map; /* unit->cpu map, empty member
/linux-6.1.9/drivers/hwmon/
Dcoretemp.c91 u16 cpu_map[NUM_REAL_CORES]; member
473 pdata->cpu_map[index] = topology_core_id(cpu); in create_core_data()
692 if (pd->cpu_map[i] == topology_core_id(cpu)) { in coretemp_cpu_offline()
/linux-6.1.9/tools/lib/perf/include/perf/
Devent.h495 struct perf_record_cpu_map cpu_map; member
/linux-6.1.9/drivers/scsi/lpfc/
Dlpfc_init.c1307 hdwq = &phba->sli4_hba.hdwq[phba->sli4_hba.cpu_map[i].hdwq]; in lpfc_idle_stat_delay_work()
8385 phba->sli4_hba.cpu_map = kcalloc(phba->sli4_hba.num_possible_cpu, in lpfc_sli4_driver_resource_setup()
8388 if (!phba->sli4_hba.cpu_map) { in lpfc_sli4_driver_resource_setup()
8460 kfree(phba->sli4_hba.cpu_map); in lpfc_sli4_driver_resource_setup()
8507 kfree(phba->sli4_hba.cpu_map); in lpfc_sli4_driver_resource_unset()
10531 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_create()
10564 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_create()
10577 eqcpup = &phba->sli4_hba.cpu_map[eqcpu]; in lpfc_sli4_queue_create()
11138 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_setup()
11173 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_setup()
[all …]

12