Home
last modified time | relevance | path

Searched refs:cpuc (Results 1 – 25 of 26) sorted by relevance

12

/linux-6.6.21/arch/alpha/kernel/
Dperf_event.c391 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() argument
395 if (cpuc->n_added == 0) in maybe_change_configuration()
399 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration()
400 struct perf_event *pe = cpuc->event[j]; in maybe_change_configuration()
402 if (cpuc->current_idx[j] != PMC_NO_INDEX && in maybe_change_configuration()
403 cpuc->current_idx[j] != pe->hw.idx) { in maybe_change_configuration()
404 alpha_perf_event_update(pe, &pe->hw, cpuc->current_idx[j], 0); in maybe_change_configuration()
405 cpuc->current_idx[j] = PMC_NO_INDEX; in maybe_change_configuration()
410 cpuc->idx_mask = 0; in maybe_change_configuration()
411 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration()
[all …]
/linux-6.6.21/arch/x86/events/amd/
Dlbr.c99 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_filter() local
100 int br_sel = cpuc->br_sel, offset, type, i, j; in amd_pmu_lbr_filter()
110 for (i = 0; i < cpuc->lbr_stack.nr; i++) { in amd_pmu_lbr_filter()
111 from = cpuc->lbr_entries[i].from; in amd_pmu_lbr_filter()
112 to = cpuc->lbr_entries[i].to; in amd_pmu_lbr_filter()
121 cpuc->lbr_entries[i].from += offset; in amd_pmu_lbr_filter()
128 cpuc->lbr_entries[i].from = 0; /* mark invalid */ in amd_pmu_lbr_filter()
133 cpuc->lbr_entries[i].type = common_branch_type(type); in amd_pmu_lbr_filter()
140 for (i = 0; i < cpuc->lbr_stack.nr; ) { in amd_pmu_lbr_filter()
141 if (!cpuc->lbr_entries[i].from) { in amd_pmu_lbr_filter()
[all …]
Dcore.c364 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() argument
366 struct amd_nb *nb = cpuc->amd_nb; in amd_has_nb()
392 static void __amd_put_nb_event_constraints(struct cpu_hw_events *cpuc, in __amd_put_nb_event_constraints() argument
395 struct amd_nb *nb = cpuc->amd_nb; in __amd_put_nb_event_constraints()
449 __amd_get_nb_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in __amd_get_nb_event_constraints() argument
453 struct amd_nb *nb = cpuc->amd_nb; in __amd_get_nb_event_constraints()
460 if (cpuc->is_fake) in __amd_get_nb_event_constraints()
547 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() local
549 cpuc->lbr_sel = kzalloc_node(sizeof(struct er_account), GFP_KERNEL, in amd_pmu_cpu_prepare()
551 if (!cpuc->lbr_sel) in amd_pmu_cpu_prepare()
[all …]
Dbrs.c205 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable() local
209 if (++cpuc->brs_active > 1) in amd_brs_enable()
221 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable_all() local
222 if (cpuc->lbr_users) in amd_brs_enable_all()
228 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable() local
232 if (!cpuc->brs_active) in amd_brs_disable()
236 if (--cpuc->brs_active) in amd_brs_disable()
257 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable_all() local
258 if (cpuc->lbr_users) in amd_brs_disable_all()
283 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_drain() local
[all …]
/linux-6.6.21/arch/x86/events/intel/
Dlbr.c105 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc);
122 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable() local
136 if (cpuc->lbr_sel) in __intel_pmu_lbr_enable()
137 lbr_select = cpuc->lbr_sel->config & x86_pmu.lbr_sel_mask; in __intel_pmu_lbr_enable()
138 if (!static_cpu_has(X86_FEATURE_ARCH_LBR) && !pmi && cpuc->lbr_sel) in __intel_pmu_lbr_enable()
191 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_reset() local
198 cpuc->last_task_ctx = NULL; in intel_pmu_lbr_reset()
199 cpuc->last_log_id = 0; in intel_pmu_lbr_reset()
200 if (!static_cpu_has(X86_FEATURE_ARCH_LBR) && cpuc->lbr_select) in intel_pmu_lbr_reset()
361 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_restore() local
[all …]
Dds.c740 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts() local
743 if (!cpuc->ds) in intel_pmu_disable_bts()
757 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer() local
758 struct debug_store *ds = cpuc->ds; in intel_pmu_drain_bts_buffer()
764 struct perf_event *event = cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in intel_pmu_drain_bts_buffer()
1120 static inline bool pebs_needs_sched_cb(struct cpu_hw_events *cpuc) in pebs_needs_sched_cb() argument
1122 if (cpuc->n_pebs == cpuc->n_pebs_via_pt) in pebs_needs_sched_cb()
1125 return cpuc->n_pebs && (cpuc->n_pebs == cpuc->n_large_pebs); in pebs_needs_sched_cb()
1130 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_sched_task() local
1132 if (!sched_in && pebs_needs_sched_cb(cpuc)) in intel_pmu_pebs_sched_task()
[all …]
Dcore.c2212 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all() local
2216 if (bts && test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) in __intel_pmu_disable_all()
2229 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all() local
2230 u64 intel_ctrl = hybrid(cpuc->pmu, intel_ctrl); in __intel_pmu_enable_all()
2234 if (cpuc->fixed_ctrl_val != cpuc->active_fixed_ctrl_val) { in __intel_pmu_enable_all()
2235 wrmsrl(MSR_ARCH_PERFMON_FIXED_CTR_CTRL, cpuc->fixed_ctrl_val); in __intel_pmu_enable_all()
2236 cpuc->active_fixed_ctrl_val = cpuc->fixed_ctrl_val; in __intel_pmu_enable_all()
2240 intel_ctrl & ~cpuc->intel_ctrl_guest_mask); in __intel_pmu_enable_all()
2242 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) { in __intel_pmu_enable_all()
2244 cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in __intel_pmu_enable_all()
[all …]
Dbts.c262 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start() local
273 bts->ds_back.bts_buffer_base = cpuc->ds->bts_buffer_base; in bts_event_start()
274 bts->ds_back.bts_absolute_maximum = cpuc->ds->bts_absolute_maximum; in bts_event_start()
275 bts->ds_back.bts_interrupt_threshold = cpuc->ds->bts_interrupt_threshold; in bts_event_start()
307 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop() local
332 cpuc->ds->bts_index = bts->ds_back.bts_buffer_base; in bts_event_stop()
333 cpuc->ds->bts_buffer_base = bts->ds_back.bts_buffer_base; in bts_event_stop()
334 cpuc->ds->bts_absolute_maximum = bts->ds_back.bts_absolute_maximum; in bts_event_stop()
335 cpuc->ds->bts_interrupt_threshold = bts->ds_back.bts_interrupt_threshold; in bts_event_stop()
522 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add() local
[all …]
Dknc.c216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() local
221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq()
243 struct perf_event *event = cpuc->events[bit]; in knc_pmu_handle_irq()
247 if (!test_bit(bit, cpuc->active_mask)) in knc_pmu_handle_irq()
268 if (cpuc->enabled) in knc_pmu_handle_irq()
Dp4.c919 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() local
923 struct perf_event *event = cpuc->events[idx]; in p4_pmu_disable_all()
924 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_disable_all()
998 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() local
1002 struct perf_event *event = cpuc->events[idx]; in p4_pmu_enable_all()
1003 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_enable_all()
1035 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() local
1041 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq()
1046 if (!test_bit(idx, cpuc->active_mask)) { in p4_pmu_handle_irq()
1053 event = cpuc->events[idx]; in p4_pmu_handle_irq()
[all …]
/linux-6.6.21/arch/x86/events/
Dcore.c679 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all() local
683 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_disable_all()
686 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all()
719 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable() local
724 if (!cpuc->enabled) in x86_pmu_disable()
727 cpuc->n_added = 0; in x86_pmu_disable()
728 cpuc->enabled = 0; in x86_pmu_disable()
736 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all() local
740 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all()
742 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all()
[all …]
Dperf_event.h757 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign);
776 (*get_event_constraints)(struct cpu_hw_events *cpuc,
780 void (*put_event_constraints)(struct cpu_hw_events *cpuc,
783 void (*start_scheduling)(struct cpu_hw_events *cpuc);
785 void (*commit_scheduling)(struct cpu_hw_events *cpuc, int idx, int cntr);
787 void (*stop_scheduling)(struct cpu_hw_events *cpuc);
886 void (*lbr_read)(struct cpu_hw_events *cpuc);
1166 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign);
1333 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_brs_add() local
1336 cpuc->lbr_users++; in amd_pmu_brs_add()
[all …]
/linux-6.6.21/arch/sparc/kernel/
Dperf_event.c827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument
835 enc = perf_event_get_enc(cpuc->events[idx]); in sparc_pmu_enable_event()
837 val = cpuc->pcr[pcr_index]; in sparc_pmu_enable_event()
840 cpuc->pcr[pcr_index] = val; in sparc_pmu_enable_event()
842 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_enable_event()
845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument
855 val = cpuc->pcr[pcr_index]; in sparc_pmu_disable_event()
858 cpuc->pcr[pcr_index] = val; in sparc_pmu_disable_event()
860 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_disable_event()
923 static void read_in_all_counters(struct cpu_hw_events *cpuc) in read_in_all_counters() argument
[all …]
/linux-6.6.21/arch/loongarch/kernel/
Dperf_event.c253 static int loongarch_pmu_alloc_counter(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc) in loongarch_pmu_alloc_counter() argument
258 if (!test_and_set_bit(i, cpuc->used_mask)) in loongarch_pmu_alloc_counter()
269 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_enable_event() local
274 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base) | in loongarch_pmu_enable_event()
288 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_disable_event() local
293 cpuc->saved_ctrl[idx] = loongarch_pmu_read_control(idx) & in loongarch_pmu_disable_event()
295 loongarch_pmu_write_control(idx, cpuc->saved_ctrl[idx]); in loongarch_pmu_disable_event()
388 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_add() local
394 idx = loongarch_pmu_alloc_counter(cpuc, hwc); in loongarch_pmu_add()
406 cpuc->events[idx] = event; in loongarch_pmu_add()
[all …]
/linux-6.6.21/arch/sh/kernel/
Dperf_event.c201 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() local
207 cpuc->events[idx] = NULL; in sh_pmu_stop()
219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() local
229 cpuc->events[idx] = event; in sh_pmu_start()
236 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() local
239 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del()
246 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() local
253 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add()
254 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add()
258 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
/linux-6.6.21/arch/arm/kernel/
Dperf_event_xscale.c149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq() local
174 struct perf_event *event = cpuc->events[idx]; in xscale1pmu_handle_irq()
275 xscale1pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale1pmu_get_event_idx() argument
280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx()
285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx()
288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx()
295 static void xscalepmu_clear_event_idx(struct pmu_hw_events *cpuc, in xscalepmu_clear_event_idx() argument
298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx()
501 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_handle_irq() local
520 struct perf_event *event = cpuc->events[idx]; in xscale2pmu_handle_irq()
[all …]
Dperf_event_v6.c310 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv6pmu_handle_irq() local
327 struct perf_event *event = cpuc->events[idx]; in armv6pmu_handle_irq()
388 armv6pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_get_event_idx() argument
394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx()
403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx()
406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx()
414 static void armv6pmu_clear_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_clear_event_idx() argument
417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
Dperf_event_v7.c953 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq() local
974 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq()
1032 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_get_event_idx() argument
1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx()
1053 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
1061 static void armv7pmu_clear_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_clear_event_idx() argument
1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx()
1606 static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc, in krait_pmu_get_event_idx() argument
1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx()
1630 idx = armv7pmu_get_event_idx(cpuc, event); in krait_pmu_get_event_idx()
[all …]
/linux-6.6.21/drivers/perf/
Driscv_pmu.c264 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_add() local
273 cpuc->events[idx] = event; in riscv_pmu_add()
274 cpuc->n_events++; in riscv_pmu_add()
288 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_del() local
292 cpuc->events[hwc->idx] = NULL; in riscv_pmu_del()
296 cpuc->n_events--; in riscv_pmu_del()
390 struct cpu_hw_events *cpuc; in riscv_pmu_alloc() local
403 cpuc = per_cpu_ptr(pmu->hw_events, cpuid); in riscv_pmu_alloc()
404 cpuc->n_events = 0; in riscv_pmu_alloc()
406 cpuc->events[i] = NULL; in riscv_pmu_alloc()
Darm_pmuv3.c709 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_enable_user_access() local
712 for_each_clear_bit(i, cpuc->used_mask, cpu_pmu->num_events) { in armv8pmu_enable_user_access()
770 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_handle_irq() local
796 struct perf_event *event = cpuc->events[idx]; in armv8pmu_handle_irq()
829 static int armv8pmu_get_single_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_single_idx() argument
835 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx()
841 static int armv8pmu_get_chain_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_chain_idx() argument
851 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx()
853 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx()
856 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx()
[all …]
Dapple_m1_cpu_pmu.c384 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in m1_pmu_handle_irq() local
404 struct perf_event *event = cpuc->events[idx]; in m1_pmu_handle_irq()
435 static int m1_pmu_get_event_idx(struct pmu_hw_events *cpuc, in m1_pmu_get_event_idx() argument
451 if (!test_and_set_bit(idx, cpuc->used_mask)) in m1_pmu_get_event_idx()
458 static void m1_pmu_clear_event_idx(struct pmu_hw_events *cpuc, in m1_pmu_clear_event_idx() argument
461 clear_bit(event->hw.idx, cpuc->used_mask); in m1_pmu_clear_event_idx()
Driscv_pmu_sbi.c345 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_get_idx() local
389 if (!test_and_set_bit(idx, cpuc->used_fw_ctrs)) in pmu_sbi_ctr_get_idx()
392 if (!test_and_set_bit(idx, cpuc->used_hw_ctrs)) in pmu_sbi_ctr_get_idx()
404 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_clear_idx() local
408 clear_bit(idx, cpuc->used_fw_ctrs); in pmu_sbi_ctr_clear_idx()
410 clear_bit(idx, cpuc->used_hw_ctrs); in pmu_sbi_ctr_clear_idx()
851 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pm_pmu_notify() local
852 int enabled = bitmap_weight(cpuc->used_hw_ctrs, RISCV_MAX_COUNTERS); in riscv_pm_pmu_notify()
860 event = cpuc->events[idx]; in riscv_pm_pmu_notify()
/linux-6.6.21/arch/mips/kernel/
Dperf_event_mipsxx.c314 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() argument
341 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter()
351 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event() local
357 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0x3ff) | in mipsxx_pmu_enable_event()
362 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | in mipsxx_pmu_enable_event()
369 cpuc->saved_ctrl[idx] |= in mipsxx_pmu_enable_event()
374 cpuc->saved_ctrl[idx] |= M_TC_EN_ALL; in mipsxx_pmu_enable_event()
387 cpuc->saved_ctrl[idx] |= ctrl; in mipsxx_pmu_enable_event()
397 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event() local
403 cpuc->saved_ctrl[idx] = mipsxx_pmu_read_control(idx) & in mipsxx_pmu_disable_event()
[all …]
/linux-6.6.21/arch/x86/events/zhaoxin/
Dcore.c357 struct cpu_hw_events *cpuc; in zhaoxin_pmu_handle_irq() local
362 cpuc = this_cpu_ptr(&cpu_hw_events); in zhaoxin_pmu_handle_irq()
387 struct perf_event *event = cpuc->events[bit]; in zhaoxin_pmu_handle_irq()
391 if (!test_bit(bit, cpuc->active_mask)) in zhaoxin_pmu_handle_irq()
422 zhaoxin_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in zhaoxin_get_event_constraints() argument
/linux-6.6.21/kernel/rcu/
Dsrcutree.c427 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_lock_idx() local
429 sum += atomic_long_read(&cpuc->srcu_lock_count[idx]); in srcu_readers_lock_idx()
445 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_unlock_idx() local
447 sum += atomic_long_read(&cpuc->srcu_unlock_count[idx]); in srcu_readers_unlock_idx()
449 mask = mask | READ_ONCE(cpuc->srcu_nmi_safety); in srcu_readers_unlock_idx()
553 struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); in srcu_readers_active() local
555 sum += atomic_long_read(&cpuc->srcu_lock_count[0]); in srcu_readers_active()
556 sum += atomic_long_read(&cpuc->srcu_lock_count[1]); in srcu_readers_active()
557 sum -= atomic_long_read(&cpuc->srcu_unlock_count[0]); in srcu_readers_active()
558 sum -= atomic_long_read(&cpuc->srcu_unlock_count[1]); in srcu_readers_active()

12