/linux-6.1.9/arch/sh/kernel/ |
D | perf_event.c | 30 unsigned long used_mask[BITS_TO_LONGS(MAX_HWEVENTS)]; member 239 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 253 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 254 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 258 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/linux-6.1.9/drivers/hwtracing/coresight/ |
D | coresight-cti-platform.c | 139 tc->con_in->used_mask = 0xF0; /* sigs <4,5,6,7> */ in cti_plat_create_v8_etm_connection() 140 tc->con_out->used_mask = 0xF0; /* sigs <4,5,6,7> */ in cti_plat_create_v8_etm_connection() 197 tc->con_in->used_mask = 0x3; /* sigs <0 1> */ in cti_plat_create_v8_connections() 200 tc->con_out->used_mask = 0x7; /* sigs <0 1 2 > */ in cti_plat_create_v8_connections() 260 tgrp->used_mask |= BIT(values[idx]); in cti_plat_read_trig_group() 334 drvdata->config.trig_out_filter |= tg->used_mask; in cti_plat_process_filter_sigs()
|
D | coresight-cti-core.c | 266 drvdata->config.trig_in_use |= tc->con_in->used_mask; in cti_add_connection_entry() 267 drvdata->config.trig_out_use |= tc->con_out->used_mask; in cti_add_connection_entry() 321 tc->con_in->used_mask = n_trig_mask; in cti_add_default_connection() 322 tc->con_out->used_mask = n_trig_mask; in cti_add_default_connection()
|
D | coresight-cti.h | 70 u32 used_mask; member
|
/linux-6.1.9/drivers/perf/ |
D | arm_dsu_pmu.c | 94 DECLARE_BITMAP(used_mask, DSU_PMU_MAX_HW_CNTRS); 305 unsigned long *used_mask = hw_events->used_mask; in dsu_pmu_get_event_idx() local 308 if (test_and_set_bit(DSU_PMU_IDX_CYCLE_COUNTER, used_mask)) in dsu_pmu_get_event_idx() 313 idx = find_first_zero_bit(used_mask, dsu_pmu->num_counters); in dsu_pmu_get_event_idx() 316 set_bit(idx, hw_events->used_mask); in dsu_pmu_get_event_idx() 475 clear_bit(idx, hw_events->used_mask); in dsu_pmu_del() 486 if (bitmap_empty(dsu_pmu->hw_events.used_mask, DSU_PMU_MAX_HW_CNTRS)) in dsu_pmu_enable() 533 memset(fake_hw.used_mask, 0, sizeof(fake_hw.used_mask)); in dsu_pmu_validate_group()
|
D | arm-cci.c | 73 unsigned long *used_mask; member 320 if (test_and_set_bit(CCI400_PMU_CYCLE_CNTR_IDX, hw->used_mask)) in cci400_get_event_idx() 327 if (!test_and_set_bit(idx, hw->used_mask)) in cci400_get_event_idx() 649 for_each_set_bit(i, cci_pmu->hw_events.used_mask, cci_pmu->num_cntrs) { in cci_pmu_sync_counters() 810 if (!test_and_set_bit(idx, hw->used_mask)) in pmu_get_event_idx() 1099 bool enabled = !bitmap_empty(hw_events->used_mask, cci_pmu->num_cntrs); in cci_pmu_enable() 1223 clear_bit(idx, hw_events->used_mask); in cci_pmu_del() 1262 .used_mask = mask, in validate_group() 1632 cci_pmu->hw_events.used_mask = devm_bitmap_zalloc(dev, in cci_pmu_alloc() 1635 if (!cci_pmu->hw_events.used_mask) in cci_pmu_alloc()
|
D | arm_pmu.c | 398 memset(&fake_pmu.used_mask, 0, sizeof(fake_pmu.used_mask)); in validate_group() 527 bool enabled = !bitmap_empty(hw_events->used_mask, armpmu->num_events); in armpmu_enable() 788 bool enabled = !bitmap_empty(hw_events->used_mask, armpmu->num_events); in cpu_pm_pmu_notify()
|
D | arm_dmc620_pmu.c | 94 DECLARE_BITMAP(used_mask, DMC620_PMU_MAX_COUNTERS); 280 if (!test_and_set_bit(idx, dmc620_pmu->used_mask)) in dmc620_get_event_idx() 599 clear_bit(idx, dmc620_pmu->used_mask); in dmc620_pmu_del()
|
D | qcom_l3_pmu.c | 159 unsigned long used_mask[BITS_TO_LONGS(L3_NUM_COUNTERS)]; member 557 idx = bitmap_find_free_region(l3pmu->used_mask, L3_NUM_COUNTERS, order); in qcom_l3_cache__event_add() 584 bitmap_release_region(l3pmu->used_mask, hwc->idx, order); in qcom_l3_cache__event_del()
|
D | alibaba_uncore_drw_pmu.c | 101 DECLARE_BITMAP(used_mask, ALI_DRW_PMU_COMMON_MAX_COUNTERS); 253 if (!test_and_set_bit(idx, drw_pmu->used_mask)) in ali_drw_get_counter_idx() 634 clear_bit(idx, drw_pmu->used_mask); in ali_drw_pmu_del()
|
D | apple_m1_cpu_pmu.c | 451 if (!test_and_set_bit(idx, cpuc->used_mask)) in m1_pmu_get_event_idx() 461 clear_bit(event->hw.idx, cpuc->used_mask); in m1_pmu_clear_event_idx()
|
/linux-6.1.9/arch/xtensa/kernel/ |
D | perf_event.c | 54 unsigned long used_mask[BITS_TO_LONGS(XCHAL_NUM_PERF_COUNTERS)]; member 287 if (__test_and_set_bit(idx, ev->used_mask)) { in xtensa_pmu_add() 288 idx = find_first_zero_bit(ev->used_mask, in xtensa_pmu_add() 293 __set_bit(idx, ev->used_mask); in xtensa_pmu_add() 312 __clear_bit(event->hw.idx, ev->used_mask); in xtensa_pmu_del() 365 for_each_set_bit(i, ev->used_mask, XCHAL_NUM_PERF_COUNTERS) { in xtensa_pmu_irq_handler()
|
/linux-6.1.9/drivers/perf/hisilicon/ |
D | hisi_uncore_pmu.c | 102 unsigned long *used_mask = hisi_pmu->pmu_events.used_mask; in hisi_uncore_pmu_get_event_idx() local 106 idx = find_first_zero_bit(used_mask, num_counters); in hisi_uncore_pmu_get_event_idx() 110 set_bit(idx, used_mask); in hisi_uncore_pmu_get_event_idx() 128 clear_bit(idx, hisi_pmu->pmu_events.used_mask); in hisi_uncore_pmu_clear_event_idx() 396 bool enabled = !bitmap_empty(hisi_pmu->pmu_events.used_mask, in hisi_uncore_pmu_enable()
|
D | hisi_uncore_ddrc_pmu.c | 170 unsigned long *used_mask = ddrc_pmu->pmu_events.used_mask; in hisi_ddrc_pmu_v1_get_event_idx() local 175 if (test_bit(idx, used_mask)) in hisi_ddrc_pmu_v1_get_event_idx() 178 set_bit(idx, used_mask); in hisi_ddrc_pmu_v1_get_event_idx()
|
D | hisi_uncore_pmu.h | 67 DECLARE_BITMAP(used_mask, HISI_MAX_COUNTERS);
|
/linux-6.1.9/arch/arm/kernel/ |
D | perf_event_xscale.c | 280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 298 clear_bit(event->hw.idx, cpuc->used_mask); in xscalepmu_clear_event_idx() 656 if (!test_and_set_bit(XSCALE_COUNTER3, cpuc->used_mask)) in xscale2pmu_get_event_idx() 658 else if (!test_and_set_bit(XSCALE_COUNTER2, cpuc->used_mask)) in xscale2pmu_get_event_idx()
|
D | perf_event_v6.c | 394 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 403 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 406 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx() 417 clear_bit(event->hw.idx, cpuc->used_mask); in armv6pmu_clear_event_idx()
|
D | perf_event_v7.c | 1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 1053 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx() 1064 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx() 1626 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx() 1632 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx() 1650 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx() 1956 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx() 1962 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx() 1980 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_clear_event_idx()
|
/linux-6.1.9/arch/arm64/kernel/ |
D | perf_event.c | 755 for_each_clear_bit(i, cpuc->used_mask, cpu_pmu->num_events) { in armv8pmu_enable_user_access() 896 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_single_idx() 912 if (!test_and_set_bit(idx, cpuc->used_mask)) { in armv8pmu_get_chain_idx() 914 if (!test_and_set_bit(idx - 1, cpuc->used_mask)) in armv8pmu_get_chain_idx() 917 clear_bit(idx, cpuc->used_mask); in armv8pmu_get_chain_idx() 932 if (!test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv8pmu_get_event_idx() 954 clear_bit(idx, cpuc->used_mask); in armv8pmu_clear_event_idx() 956 clear_bit(idx - 1, cpuc->used_mask); in armv8pmu_clear_event_idx()
|
/linux-6.1.9/arch/loongarch/kernel/ |
D | perf_event.c | 97 unsigned long used_mask[BITS_TO_LONGS(LOONGARCH_MAX_HWEVENTS)]; member 258 if (!test_and_set_bit(i, cpuc->used_mask)) in loongarch_pmu_alloc_counter() 430 clear_bit(idx, cpuc->used_mask); in loongarch_pmu_del() 519 if (test_bit(n, cpuc->used_mask)) { in pmu_handle_irq()
|
/linux-6.1.9/arch/arc/kernel/ |
D | perf_event.c | 211 unsigned long used_mask[BITS_TO_LONGS(ARC_PERF_MAX_COUNTERS)]; member 514 __clear_bit(event->hw.idx, pmu_cpu->used_mask); in arc_pmu_del() 528 idx = ffz(pmu_cpu->used_mask[0]); in arc_pmu_add() 532 __set_bit(idx, pmu_cpu->used_mask); in arc_pmu_add()
|
/linux-6.1.9/arch/x86/events/intel/ |
D | p4.c | 1226 static int p4_next_cntr(int thread, unsigned long *used_mask, in p4_next_cntr() argument 1233 if (j != -1 && !test_bit(j, used_mask)) in p4_next_cntr() 1242 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in p4_pmu_schedule_events() local 1252 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in p4_pmu_schedule_events() 1282 cntr_idx = p4_next_cntr(thread, used_mask, bind); in p4_pmu_schedule_events() 1316 set_bit(cntr_idx, used_mask); in p4_pmu_schedule_events()
|
/linux-6.1.9/include/linux/perf/ |
D | arm_pmu.h | 58 DECLARE_BITMAP(used_mask, ARMPMU_MAX_HWEVENTS);
|
/linux-6.1.9/drivers/dma/idxd/ |
D | perfmon.c | 140 if (!test_and_set_bit(i, idxd_pmu->used_mask)) in perfmon_assign_event() 401 clear_bit(cntr, idxd->idxd_pmu->used_mask); in perfmon_pmu_event_stop()
|
/linux-6.1.9/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 39 unsigned long used_mask[BITS_TO_LONGS(MIPS_MAX_HWEVENTS)]; member 341 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 545 clear_bit(idx, cpuc->used_mask); in mipspmu_del() 1601 if (!test_bit(n, cpuc->used_mask)) in mipsxx_pmu_handle_shared_irq()
|