Lines Matching refs:box

239 static inline bool uncore_mmio_is_valid_offset(struct intel_uncore_box *box,  in uncore_mmio_is_valid_offset()  argument
242 if (offset < box->pmu->type->mmio_map_size) in uncore_mmio_is_valid_offset()
246 offset, box->pmu->type->name); in uncore_mmio_is_valid_offset()
252 unsigned int uncore_mmio_box_ctl(struct intel_uncore_box *box) in uncore_mmio_box_ctl() argument
254 return box->pmu->type->box_ctl + in uncore_mmio_box_ctl()
255 box->pmu->type->mmio_offset * box->pmu->pmu_idx; in uncore_mmio_box_ctl()
258 static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box) in uncore_pci_box_ctl() argument
260 return box->pmu->type->box_ctl; in uncore_pci_box_ctl()
263 static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box) in uncore_pci_fixed_ctl() argument
265 return box->pmu->type->fixed_ctl; in uncore_pci_fixed_ctl()
268 static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box) in uncore_pci_fixed_ctr() argument
270 return box->pmu->type->fixed_ctr; in uncore_pci_fixed_ctr()
274 unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx) in uncore_pci_event_ctl() argument
276 if (test_bit(UNCORE_BOX_FLAG_CTL_OFFS8, &box->flags)) in uncore_pci_event_ctl()
277 return idx * 8 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
279 return idx * 4 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
283 unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_pci_perf_ctr() argument
285 return idx * 8 + box->pmu->type->perf_ctr; in uncore_pci_perf_ctr()
288 static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box) in uncore_msr_box_offset() argument
290 struct intel_uncore_pmu *pmu = box->pmu; in uncore_msr_box_offset()
296 static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box) in uncore_msr_box_ctl() argument
298 if (!box->pmu->type->box_ctl) in uncore_msr_box_ctl()
300 return box->pmu->type->box_ctl + uncore_msr_box_offset(box); in uncore_msr_box_ctl()
303 static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box) in uncore_msr_fixed_ctl() argument
305 if (!box->pmu->type->fixed_ctl) in uncore_msr_fixed_ctl()
307 return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box); in uncore_msr_fixed_ctl()
310 static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box) in uncore_msr_fixed_ctr() argument
312 return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box); in uncore_msr_fixed_ctr()
357 unsigned int uncore_freerunning_counter(struct intel_uncore_box *box, in uncore_freerunning_counter() argument
362 struct intel_uncore_pmu *pmu = box->pmu; in uncore_freerunning_counter()
372 unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx) in uncore_msr_event_ctl() argument
374 if (test_bit(UNCORE_BOX_FLAG_CFL8_CBOX_MSR_OFFS, &box->flags)) { in uncore_msr_event_ctl()
376 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_event_ctl()
378 return box->pmu->type->event_ctl + in uncore_msr_event_ctl()
379 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_event_ctl()
380 uncore_msr_box_offset(box); in uncore_msr_event_ctl()
385 unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_msr_perf_ctr() argument
387 if (test_bit(UNCORE_BOX_FLAG_CFL8_CBOX_MSR_OFFS, &box->flags)) { in uncore_msr_perf_ctr()
389 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_perf_ctr()
391 return box->pmu->type->perf_ctr + in uncore_msr_perf_ctr()
392 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_perf_ctr()
393 uncore_msr_box_offset(box); in uncore_msr_perf_ctr()
398 unsigned uncore_fixed_ctl(struct intel_uncore_box *box) in uncore_fixed_ctl() argument
400 if (box->pci_dev || box->io_addr) in uncore_fixed_ctl()
401 return uncore_pci_fixed_ctl(box); in uncore_fixed_ctl()
403 return uncore_msr_fixed_ctl(box); in uncore_fixed_ctl()
407 unsigned uncore_fixed_ctr(struct intel_uncore_box *box) in uncore_fixed_ctr() argument
409 if (box->pci_dev || box->io_addr) in uncore_fixed_ctr()
410 return uncore_pci_fixed_ctr(box); in uncore_fixed_ctr()
412 return uncore_msr_fixed_ctr(box); in uncore_fixed_ctr()
416 unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx) in uncore_event_ctl() argument
418 if (box->pci_dev || box->io_addr) in uncore_event_ctl()
419 return uncore_pci_event_ctl(box, idx); in uncore_event_ctl()
421 return uncore_msr_event_ctl(box, idx); in uncore_event_ctl()
425 unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_perf_ctr() argument
427 if (box->pci_dev || box->io_addr) in uncore_perf_ctr()
428 return uncore_pci_perf_ctr(box, idx); in uncore_perf_ctr()
430 return uncore_msr_perf_ctr(box, idx); in uncore_perf_ctr()
433 static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box) in uncore_perf_ctr_bits() argument
435 return box->pmu->type->perf_ctr_bits; in uncore_perf_ctr_bits()
438 static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box) in uncore_fixed_ctr_bits() argument
440 return box->pmu->type->fixed_ctr_bits; in uncore_fixed_ctr_bits()
444 unsigned int uncore_freerunning_bits(struct intel_uncore_box *box, in uncore_freerunning_bits() argument
449 return box->pmu->type->freerunning[type].bits; in uncore_freerunning_bits()
452 static inline int uncore_num_freerunning(struct intel_uncore_box *box, in uncore_num_freerunning() argument
457 return box->pmu->type->freerunning[type].num_counters; in uncore_num_freerunning()
460 static inline int uncore_num_freerunning_types(struct intel_uncore_box *box, in uncore_num_freerunning_types() argument
463 return box->pmu->type->num_freerunning_types; in uncore_num_freerunning_types()
466 static inline bool check_valid_freerunning_event(struct intel_uncore_box *box, in check_valid_freerunning_event() argument
472 return (type < uncore_num_freerunning_types(box, event)) && in check_valid_freerunning_event()
473 (idx < uncore_num_freerunning(box, event)); in check_valid_freerunning_event()
476 static inline int uncore_num_counters(struct intel_uncore_box *box) in uncore_num_counters() argument
478 return box->pmu->type->num_counters; in uncore_num_counters()
490 static inline int uncore_freerunning_hw_config(struct intel_uncore_box *box, in uncore_freerunning_hw_config() argument
499 static inline void uncore_disable_event(struct intel_uncore_box *box, in uncore_disable_event() argument
502 box->pmu->type->ops->disable_event(box, event); in uncore_disable_event()
505 static inline void uncore_enable_event(struct intel_uncore_box *box, in uncore_enable_event() argument
508 box->pmu->type->ops->enable_event(box, event); in uncore_enable_event()
511 static inline u64 uncore_read_counter(struct intel_uncore_box *box, in uncore_read_counter() argument
514 return box->pmu->type->ops->read_counter(box, event); in uncore_read_counter()
517 static inline void uncore_box_init(struct intel_uncore_box *box) in uncore_box_init() argument
519 if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { in uncore_box_init()
520 if (box->pmu->type->ops->init_box) in uncore_box_init()
521 box->pmu->type->ops->init_box(box); in uncore_box_init()
525 static inline void uncore_box_exit(struct intel_uncore_box *box) in uncore_box_exit() argument
527 if (test_and_clear_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { in uncore_box_exit()
528 if (box->pmu->type->ops->exit_box) in uncore_box_exit()
529 box->pmu->type->ops->exit_box(box); in uncore_box_exit()
533 static inline bool uncore_box_is_fake(struct intel_uncore_box *box) in uncore_box_is_fake() argument
535 return (box->dieid < 0); in uncore_box_is_fake()
549 u64 uncore_msr_read_counter(struct intel_uncore_box *box, struct perf_event *event);
550 void uncore_mmio_exit_box(struct intel_uncore_box *box);
551 u64 uncore_mmio_read_counter(struct intel_uncore_box *box,
553 void uncore_pmu_start_hrtimer(struct intel_uncore_box *box);
554 void uncore_pmu_cancel_hrtimer(struct intel_uncore_box *box);
560 void uncore_perf_event_update(struct intel_uncore_box *box, struct perf_event *event);
562 uncore_get_constraint(struct intel_uncore_box *box, struct perf_event *event);
563 void uncore_put_constraint(struct intel_uncore_box *box, struct perf_event *event);
564 u64 uncore_shared_reg_config(struct intel_uncore_box *box, int idx);