Searched refs:used_mask (Results 1 – 11 of 11) sorted by relevance
/linux-3.4.99/arch/sh/kernel/ |
D | perf_event.c | 33 unsigned long used_mask[BITS_TO_LONGS(MAX_HWEVENTS)]; member 268 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 282 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 283 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 287 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/linux-3.4.99/arch/blackfin/kernel/ |
D | perf_event.c | 232 unsigned long used_mask[BITS_TO_LONGS(MAX_HWEVENTS)]; member 341 __clear_bit(event->hw.idx, cpuc->used_mask); in bfin_pmu_del() 355 if (__test_and_set_bit(idx, cpuc->used_mask)) { in bfin_pmu_add() 356 idx = find_first_zero_bit(cpuc->used_mask, MAX_HWEVENTS); in bfin_pmu_add() 360 __set_bit(idx, cpuc->used_mask); in bfin_pmu_add()
|
/linux-3.4.99/arch/arm/kernel/ |
D | perf_event.c | 42 static DEFINE_PER_CPU(unsigned long [BITS_TO_LONGS(ARMPMU_MAX_HWEVENTS)], used_mask); 276 clear_bit(idx, hw_events->used_mask); in armpmu_del() 351 fake_pmu.used_mask = fake_used_mask; in validate_group() 586 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in armpmu_enable() 690 events->used_mask = per_cpu(used_mask, cpu); in cpu_pmu_init()
|
D | perf_event_xscale.c | 356 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 361 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 364 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 727 if (!test_and_set_bit(XSCALE_COUNTER3, cpuc->used_mask)) in xscale2pmu_get_event_idx() 729 else if (!test_and_set_bit(XSCALE_COUNTER2, cpuc->used_mask)) in xscale2pmu_get_event_idx()
|
D | perf_event_v6.c | 564 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 573 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 576 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx()
|
D | perf_event_v7.c | 1150 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 1161 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
|
/linux-3.4.99/arch/arm/include/asm/ |
D | pmu.h | 95 unsigned long *used_mask; member
|
/linux-3.4.99/arch/x86/kernel/cpu/ |
D | perf_event_p4.c | 1193 static int p4_next_cntr(int thread, unsigned long *used_mask, in p4_next_cntr() argument 1200 if (j != -1 && !test_bit(j, used_mask)) in p4_next_cntr() 1209 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in p4_pmu_schedule_events() local 1219 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in p4_pmu_schedule_events() 1249 cntr_idx = p4_next_cntr(thread, used_mask, bind); in p4_pmu_schedule_events() 1266 set_bit(cntr_idx, used_mask); in p4_pmu_schedule_events()
|
D | perf_event.c | 730 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in x86_schedule_events() local 734 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in x86_schedule_events() 759 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events() 762 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
|
/linux-3.4.99/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 40 unsigned long used_mask[BITS_TO_LONGS(MIPS_MAX_HWEVENTS)]; member 331 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 499 clear_bit(idx, cpuc->used_mask); in mipspmu_del() 1333 if (test_bit(n, cpuc->used_mask)) { \ in mipsxx_pmu_handle_shared_irq()
|
/linux-3.4.99/mm/ |
D | page_alloc.c | 3198 nodemask_t used_mask; in build_zonelists() local 3214 nodes_clear(used_mask); in build_zonelists() 3219 while ((node = find_next_best_node(local_node, &used_mask)) >= 0) { in build_zonelists()
|