Home
last modified time | relevance | path

Searched refs:cpu_hw_events (Results 1 – 22 of 22) sorted by relevance

/linux-6.1.9/arch/x86/events/amd/
Dbrs.c208 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable()
224 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable_all()
231 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable()
260 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable_all()
286 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_drain()
389 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_brs_sched_task()
410 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in perf_amd_brs_lopwr_cb()
Dlbr.c99 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_filter()
162 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_read()
324 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_reset()
343 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_add()
363 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_del()
378 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_sched_task()
391 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_enable_all()
412 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_disable_all()
Dcore.c364 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb()
392 static void __amd_put_nb_event_constraints(struct cpu_hw_events *cpuc, in __amd_put_nb_event_constraints()
449 __amd_get_nb_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in __amd_get_nb_event_constraints()
543 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare()
567 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_starting()
581 nb = per_cpu(cpu_hw_events, i).amd_nb; in amd_pmu_cpu_starting()
600 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead()
691 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_check_overflow()
723 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_enable_all()
853 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_handle_irq()
[all …]
/linux-6.1.9/arch/sh/kernel/
Dperf_event.c28 struct cpu_hw_events { struct
34 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument
201 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop()
219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start()
236 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del()
246 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add()
336 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_prepare_cpu()
338 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_prepare_cpu()
/linux-6.1.9/arch/x86/events/intel/
Dlbr.c105 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc);
122 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable()
191 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_reset()
361 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_restore()
426 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_restore()
453 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_save()
503 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_save()
543 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_sched_task()
580 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_add()
622 struct cpu_hw_events *cpuc; in release_lbr_buffers()
[all …]
Dds.c385 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in init_debug_store_on_cpu()
397 if (!per_cpu(cpu_hw_events, cpu).ds) in fini_debug_store_on_cpu()
456 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in alloc_pebs_buffer()
494 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in release_pebs_buffer()
512 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in alloc_bts_buffer()
541 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in release_bts_buffer()
559 per_cpu(cpu_hw_events, cpu).ds = ds; in alloc_ds_buffer()
565 per_cpu(cpu_hw_events, cpu).ds = NULL; in release_ds_buffer()
685 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts()
702 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer()
[all …]
Dcore.c2181 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all()
2198 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all()
2232 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_snapshot_branch_stack()
2285 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_nhm_workaround()
2350 static void intel_set_tfa(struct cpu_hw_events *cpuc, bool on) in intel_set_tfa()
2360 static void intel_tfa_commit_scheduling(struct cpu_hw_events *cpuc, int idx, int cntr) in intel_tfa_commit_scheduling()
2371 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_tfa_pmu_enable_all()
2404 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_set_masks()
2416 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_clear_masks()
2425 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_fixed()
[all …]
Dbts.c147 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_config_buffer()
186 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_update()
262 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start()
307 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop()
452 struct debug_store *ds = this_cpu_ptr(&cpu_hw_events)->ds; in intel_bts_interrupt()
522 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add()
Dknc.c216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq()
221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq()
Dp4.c919 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all()
998 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all()
1035 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq()
1041 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq()
1240 static int p4_pmu_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in p4_pmu_schedule_events()
/linux-6.1.9/arch/powerpc/perf/
Dcore-fsl-emb.c19 struct cpu_hw_events { struct
25 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument
192 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable()
196 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_disable()
231 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable()
235 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_enable()
278 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_add()
285 cpuhw = &get_cpu_var(cpu_hw_events); in fsl_emb_pmu_add()
331 put_cpu_var(cpu_hw_events); in fsl_emb_pmu_add()
339 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_del()
[all …]
Dcore-book3s.c32 struct cpu_hw_events { struct
64 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument
128 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in()
136 static inline void power_pmu_bhrb_read(struct perf_event *event, struct cpu_hw_events *cpuhw) {} in power_pmu_bhrb_read()
157 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in get_pmcs_ext_regs()
416 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_enable()
432 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_disable()
490 static void power_pmu_bhrb_read(struct perf_event *event, struct cpu_hw_events *cpuhw) in power_pmu_bhrb_read()
647 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in()
795 struct cpu_hw_events *cpuhw; in power_pmu_wants_prompt_pmi()
[all …]
/linux-6.1.9/arch/alpha/kernel/
Dperf_event.c34 struct cpu_hw_events { struct
53 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument
391 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration()
435 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_add()
487 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_del()
535 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_stop()
555 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_start()
722 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_enable()
748 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_disable()
807 struct cpu_hw_events *cpuc; in alpha_perf_event_irq_handler()
[all …]
/linux-6.1.9/arch/loongarch/kernel/
Dperf_event.c89 struct cpu_hw_events { struct
104 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument
253 static int loongarch_pmu_alloc_counter(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc) in loongarch_pmu_alloc_counter()
269 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_enable_event()
288 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_disable_event()
388 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_add()
422 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_del()
481 static void handle_associated_event(struct cpu_hw_events *cpuc, int idx, in handle_associated_event()
503 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in pmu_handle_irq()
642 struct cpu_hw_events fake_cpuc; in validate_group()
[all …]
/linux-6.1.9/arch/x86/events/
Dcore.c50 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = {
679 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all()
719 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable()
736 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all()
766 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in x86_get_pmu()
976 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events()
1114 static int add_nr_metric_event(struct cpu_hw_events *cpuc, in add_nr_metric_event()
1127 static void del_nr_metric_event(struct cpu_hw_events *cpuc, in del_nr_metric_event()
1134 static int collect_event(struct cpu_hw_events *cpuc, struct perf_event *event, in collect_event()
1158 static int collect_events(struct cpu_hw_events *cpuc, struct perf_event *leader, bool dogrp) in collect_events()
[all …]
Dperf_event.h229 struct cpu_hw_events { struct
753 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign);
772 (*get_event_constraints)(struct cpu_hw_events *cpuc,
776 void (*put_event_constraints)(struct cpu_hw_events *cpuc,
779 void (*start_scheduling)(struct cpu_hw_events *cpuc);
781 void (*commit_scheduling)(struct cpu_hw_events *cpuc, int idx, int cntr);
783 void (*stop_scheduling)(struct cpu_hw_events *cpuc);
882 void (*lbr_read)(struct cpu_hw_events *cpuc);
1065 DECLARE_PER_CPU(struct cpu_hw_events, cpu_hw_events);
1142 u64 disable_mask = __this_cpu_read(cpu_hw_events.perf_ctr_virt_mask); in __x86_pmu_enable_event()
[all …]
/linux-6.1.9/arch/sparc/kernel/
Dperf_event.c78 struct cpu_hw_events { struct
115 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { .enabled = 1, }; argument
827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event()
845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event()
923 static void read_in_all_counters(struct cpu_hw_events *cpuc) in read_in_all_counters()
947 static void calculate_single_pcr(struct cpu_hw_events *cpuc) in calculate_single_pcr()
983 static void calculate_multiple_pcrs(struct cpu_hw_events *cpuc) in calculate_multiple_pcrs()
1017 static void update_pcrs_for_enable(struct cpu_hw_events *cpuc) in update_pcrs_for_enable()
1031 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_enable()
1049 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_disable()
[all …]
/linux-6.1.9/arch/mips/kernel/
Dperf_event_mipsxx.c31 struct cpu_hw_events { struct
48 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument
314 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter()
351 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event()
397 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event()
501 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_add()
537 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_del()
763 struct cpu_hw_events fake_cpuc; in validate_group()
782 static void handle_associated_event(struct cpu_hw_events *cpuc, in handle_associated_event()
1548 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in pause_local_counters()
[all …]
/linux-6.1.9/include/linux/perf/
Driscv_pmu.h29 struct cpu_hw_events { struct
57 struct cpu_hw_events __percpu *hw_events; argument
/linux-6.1.9/drivers/perf/
Driscv_pmu.c200 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_add()
224 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_del()
290 struct cpu_hw_events *cpuc; in riscv_pmu_alloc()
296 pmu->hw_events = alloc_percpu_gfp(struct cpu_hw_events, GFP_KERNEL); in riscv_pmu_alloc()
Driscv_pmu_sbi.c271 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_get_idx()
318 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_clear_idx()
510 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_stop_hw_ctrs()
527 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_start_overflow_mask()
573 struct cpu_hw_events *cpu_hw_evt = dev; in pmu_sbi_ovf_handler()
653 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_starting_cpu()
690 struct cpu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_sbi_setup_irqs()
736 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pm_pmu_notify()
/linux-6.1.9/arch/x86/events/zhaoxin/
Dcore.c357 struct cpu_hw_events *cpuc; in zhaoxin_pmu_handle_irq()
362 cpuc = this_cpu_ptr(&cpu_hw_events); in zhaoxin_pmu_handle_irq()
422 zhaoxin_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in zhaoxin_get_event_constraints()