Searched refs:X86_PMC_IDX_FIXED (Results 1 – 4 of 4) sorted by relevance
12 #define X86_PMC_IDX_FIXED 32 macro124 #define X86_PMC_IDX_FIXED_INSTRUCTIONS (X86_PMC_IDX_FIXED + 0)128 #define X86_PMC_IDX_FIXED_CPU_CYCLES (X86_PMC_IDX_FIXED + 1)132 #define X86_PMC_IDX_FIXED_REF_CYCLES (X86_PMC_IDX_FIXED + 2)142 #define X86_PMC_IDX_FIXED_BTS (X86_PMC_IDX_FIXED + 16)
83 if (idx < X86_PMC_IDX_FIXED) in global_idx_to_pmc()86 return get_fixed_pmc_idx(pmu, idx - X86_PMC_IDX_FIXED); in global_idx_to_pmc()294 int fidx = idx - X86_PMC_IDX_FIXED; in reprogram_idx()471 (((1ull << pmu->nr_arch_fixed_counters) - 1) << X86_PMC_IDX_FIXED); in kvm_pmu_cpuid_update()489 pmu->fixed_counters[i].idx = i + X86_PMC_IDX_FIXED; in kvm_pmu_init()
644 idx = X86_PMC_IDX_FIXED; in __perf_sched_find_counter()652 for_each_set_bit_from(idx, c->idxmsk, X86_PMC_IDX_FIXED) { in __perf_sched_find_counter()833 } else if (hwc->idx >= X86_PMC_IDX_FIXED) { in x86_assign_hw_event()835 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - X86_PMC_IDX_FIXED); in x86_assign_hw_event()1372 ((1LL << x86_pmu.num_counters_fixed)-1) << X86_PMC_IDX_FIXED; in init_hw_perf_events()1630 if (x86_pmu.num_counters_fixed && idx >= X86_PMC_IDX_FIXED) { in x86_pmu_event_idx()1631 idx -= X86_PMC_IDX_FIXED; in x86_pmu_event_idx()
880 int idx = hwc->idx - X86_PMC_IDX_FIXED; in intel_pmu_disable_fixed()924 int idx = hwc->idx - X86_PMC_IDX_FIXED; in intel_pmu_enable_fixed()