Searched refs:intel_ctrl (Results 1 – 4 of 4) sorted by relevance
2199 u64 intel_ctrl = hybrid(cpuc->pmu, intel_ctrl); in __intel_pmu_enable_all() local2209 intel_ctrl & ~cpuc->intel_ctrl_guest_mask); in __intel_pmu_enable_all()2935 u64 intel_ctrl = hybrid(cpuc->pmu, intel_ctrl); in handle_pmi_common() local2979 status &= intel_ctrl | GLOBAL_STATUS_TRACE_TOPAPMI; in handle_pmi_common()4025 u64 intel_ctrl = hybrid(cpuc->pmu, intel_ctrl); in intel_guest_get_msrs() local4033 .host = intel_ctrl & ~cpuc->intel_ctrl_guest_mask, in intel_guest_get_msrs()4034 .guest = intel_ctrl & (~cpuc->intel_ctrl_host_mask | ~pebs_mask), in intel_guest_get_msrs()4533 pmu->intel_ctrl); in init_hybrid_pmu()4585 x86_pmu.intel_ctrl &= ~(1ULL << GLOBAL_CTRL_EN_PERF_METRICS); in intel_pmu_cpu_starting()5616 u64 *intel_ctrl, u64 fixed_mask) in intel_pmu_check_num_counters() argument[all …]
262 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, x86_pmu.intel_ctrl); in zhaoxin_pmu_enable_all()601 x86_pmu.intel_ctrl = (1 << (x86_pmu.num_counters)) - 1; in zhaoxin_pmu_init()602 x86_pmu.intel_ctrl |= ((1LL << x86_pmu.num_counters_fixed)-1) << INTEL_PMC_IDX_FIXED; in zhaoxin_pmu_init()
657 u64 intel_ctrl; member820 u64 intel_ctrl; member1181 u64 intel_ctrl);1295 u64 intel_ctrl = hybrid(pmu, intel_ctrl); in fixed_counter_disabled() local1297 return !(intel_ctrl >> (i + INTEL_PMC_IDX_FIXED)); in fixed_counter_disabled()
2042 u64 intel_ctrl) in x86_pmu_show_pmu_cap() argument2051 << INTEL_PMC_IDX_FIXED) & intel_ctrl)); in x86_pmu_show_pmu_cap()2052 pr_info("... event mask: %016Lx\n", intel_ctrl); in x86_pmu_show_pmu_cap()2116 if (!x86_pmu.intel_ctrl) in init_hw_perf_events()2117 x86_pmu.intel_ctrl = (1 << x86_pmu.num_counters) - 1; in init_hw_perf_events()2136 x86_pmu.intel_ctrl); in init_hw_perf_events()