Lines Matching refs:stream

415 static u32 gen12_oa_hw_tail_read(struct i915_perf_stream *stream)  in gen12_oa_hw_tail_read()  argument
417 struct intel_uncore *uncore = stream->uncore; in gen12_oa_hw_tail_read()
423 static u32 gen8_oa_hw_tail_read(struct i915_perf_stream *stream) in gen8_oa_hw_tail_read() argument
425 struct intel_uncore *uncore = stream->uncore; in gen8_oa_hw_tail_read()
430 static u32 gen7_oa_hw_tail_read(struct i915_perf_stream *stream) in gen7_oa_hw_tail_read() argument
432 struct intel_uncore *uncore = stream->uncore; in gen7_oa_hw_tail_read()
462 static bool oa_buffer_check_unlocked(struct i915_perf_stream *stream) in oa_buffer_check_unlocked() argument
464 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in oa_buffer_check_unlocked()
465 int report_size = stream->oa_buffer.format_size; in oa_buffer_check_unlocked()
475 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in oa_buffer_check_unlocked()
477 hw_tail = stream->perf->ops.oa_hw_tail_read(stream); in oa_buffer_check_unlocked()
486 if (hw_tail == stream->oa_buffer.aging_tail && in oa_buffer_check_unlocked()
487 (now - stream->oa_buffer.aging_timestamp) > OA_TAIL_MARGIN_NSEC) { in oa_buffer_check_unlocked()
492 stream->oa_buffer.tail = stream->oa_buffer.aging_tail; in oa_buffer_check_unlocked()
500 head = stream->oa_buffer.head - gtt_offset; in oa_buffer_check_unlocked()
501 aged_tail = stream->oa_buffer.tail - gtt_offset; in oa_buffer_check_unlocked()
518 u32 *report32 = (void *)(stream->oa_buffer.vaddr + tail); in oa_buffer_check_unlocked()
527 __ratelimit(&stream->perf->tail_pointer_race)) in oa_buffer_check_unlocked()
532 stream->oa_buffer.tail = gtt_offset + tail; in oa_buffer_check_unlocked()
533 stream->oa_buffer.aging_tail = gtt_offset + hw_tail; in oa_buffer_check_unlocked()
534 stream->oa_buffer.aging_timestamp = now; in oa_buffer_check_unlocked()
537 pollin = OA_TAKEN(stream->oa_buffer.tail - gtt_offset, in oa_buffer_check_unlocked()
538 stream->oa_buffer.head - gtt_offset) >= report_size; in oa_buffer_check_unlocked()
540 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in oa_buffer_check_unlocked()
560 static int append_oa_status(struct i915_perf_stream *stream, in append_oa_status() argument
596 static int append_oa_sample(struct i915_perf_stream *stream, in append_oa_sample() argument
602 int report_size = stream->oa_buffer.format_size; in append_oa_sample()
607 header.size = stream->sample_size; in append_oa_sample()
646 static int gen8_append_oa_reports(struct i915_perf_stream *stream, in gen8_append_oa_reports() argument
651 struct intel_uncore *uncore = stream->uncore; in gen8_append_oa_reports()
652 int report_size = stream->oa_buffer.format_size; in gen8_append_oa_reports()
653 u8 *oa_buf_base = stream->oa_buffer.vaddr; in gen8_append_oa_reports()
654 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen8_append_oa_reports()
662 if (drm_WARN_ON(&uncore->i915->drm, !stream->enabled)) in gen8_append_oa_reports()
665 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
667 head = stream->oa_buffer.head; in gen8_append_oa_reports()
668 tail = stream->oa_buffer.tail; in gen8_append_oa_reports()
670 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
728 (GRAPHICS_VER(stream->perf->i915) == 12 ? in gen8_append_oa_reports()
732 ctx_id = report32[2] & stream->specific_ctx_id_mask; in gen8_append_oa_reports()
742 if (!(report32[0] & stream->perf->gen8_valid_ctx_bit) && in gen8_append_oa_reports()
743 GRAPHICS_VER(stream->perf->i915) <= 11) in gen8_append_oa_reports()
777 if (!stream->perf->exclusive_stream->ctx || in gen8_append_oa_reports()
778 stream->specific_ctx_id == ctx_id || in gen8_append_oa_reports()
779 stream->oa_buffer.last_ctx_id == stream->specific_ctx_id || in gen8_append_oa_reports()
786 if (stream->perf->exclusive_stream->ctx && in gen8_append_oa_reports()
787 stream->specific_ctx_id != ctx_id) { in gen8_append_oa_reports()
791 ret = append_oa_sample(stream, buf, count, offset, in gen8_append_oa_reports()
796 stream->oa_buffer.last_ctx_id = ctx_id; in gen8_append_oa_reports()
810 oaheadptr = GRAPHICS_VER(stream->perf->i915) == 12 ? in gen8_append_oa_reports()
813 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
822 stream->oa_buffer.head = head; in gen8_append_oa_reports()
824 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
850 static int gen8_oa_read(struct i915_perf_stream *stream, in gen8_oa_read() argument
855 struct intel_uncore *uncore = stream->uncore; in gen8_oa_read()
860 if (drm_WARN_ON(&uncore->i915->drm, !stream->oa_buffer.vaddr)) in gen8_oa_read()
863 oastatus_reg = GRAPHICS_VER(stream->perf->i915) == 12 ? in gen8_oa_read()
883 ret = append_oa_status(stream, buf, count, offset, in gen8_oa_read()
889 stream->period_exponent); in gen8_oa_read()
891 stream->perf->ops.oa_disable(stream); in gen8_oa_read()
892 stream->perf->ops.oa_enable(stream); in gen8_oa_read()
902 ret = append_oa_status(stream, buf, count, offset, in gen8_oa_read()
915 return gen8_append_oa_reports(stream, buf, count, offset); in gen8_oa_read()
939 static int gen7_append_oa_reports(struct i915_perf_stream *stream, in gen7_append_oa_reports() argument
944 struct intel_uncore *uncore = stream->uncore; in gen7_append_oa_reports()
945 int report_size = stream->oa_buffer.format_size; in gen7_append_oa_reports()
946 u8 *oa_buf_base = stream->oa_buffer.vaddr; in gen7_append_oa_reports()
947 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen7_append_oa_reports()
955 if (drm_WARN_ON(&uncore->i915->drm, !stream->enabled)) in gen7_append_oa_reports()
958 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
960 head = stream->oa_buffer.head; in gen7_append_oa_reports()
961 tail = stream->oa_buffer.tail; in gen7_append_oa_reports()
963 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1013 if (__ratelimit(&stream->perf->spurious_report_rs)) in gen7_append_oa_reports()
1018 ret = append_oa_sample(stream, buf, count, offset, report); in gen7_append_oa_reports()
1030 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1040 stream->oa_buffer.head = head; in gen7_append_oa_reports()
1042 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1064 static int gen7_oa_read(struct i915_perf_stream *stream, in gen7_oa_read() argument
1069 struct intel_uncore *uncore = stream->uncore; in gen7_oa_read()
1073 if (drm_WARN_ON(&uncore->i915->drm, !stream->oa_buffer.vaddr)) in gen7_oa_read()
1083 oastatus1 &= ~stream->perf->gen7_latched_oastatus1; in gen7_oa_read()
1106 ret = append_oa_status(stream, buf, count, offset, in gen7_oa_read()
1112 stream->period_exponent); in gen7_oa_read()
1114 stream->perf->ops.oa_disable(stream); in gen7_oa_read()
1115 stream->perf->ops.oa_enable(stream); in gen7_oa_read()
1121 ret = append_oa_status(stream, buf, count, offset, in gen7_oa_read()
1125 stream->perf->gen7_latched_oastatus1 |= in gen7_oa_read()
1129 return gen7_append_oa_reports(stream, buf, count, offset); in gen7_oa_read()
1146 static int i915_oa_wait_unlocked(struct i915_perf_stream *stream) in i915_oa_wait_unlocked() argument
1149 if (!stream->periodic) in i915_oa_wait_unlocked()
1152 return wait_event_interruptible(stream->poll_wq, in i915_oa_wait_unlocked()
1153 oa_buffer_check_unlocked(stream)); in i915_oa_wait_unlocked()
1166 static void i915_oa_poll_wait(struct i915_perf_stream *stream, in i915_oa_poll_wait() argument
1170 poll_wait(file, &stream->poll_wq, wait); in i915_oa_poll_wait()
1185 static int i915_oa_read(struct i915_perf_stream *stream, in i915_oa_read() argument
1190 return stream->perf->ops.read(stream, buf, count, offset); in i915_oa_read()
1193 static struct intel_context *oa_pin_context(struct i915_perf_stream *stream) in oa_pin_context() argument
1196 struct i915_gem_context *ctx = stream->ctx; in oa_pin_context()
1202 if (ce->engine != stream->engine) /* first match! */ in oa_pin_context()
1230 stream->pinned_ctx = ce; in oa_pin_context()
1231 return stream->pinned_ctx; in oa_pin_context()
1244 static int oa_get_render_ctx_id(struct i915_perf_stream *stream) in oa_get_render_ctx_id() argument
1248 ce = oa_pin_context(stream); in oa_get_render_ctx_id()
1258 stream->specific_ctx_id = i915_ggtt_offset(ce->state); in oa_get_render_ctx_id()
1259 stream->specific_ctx_id_mask = 0; in oa_get_render_ctx_id()
1276 stream->specific_ctx_id = ce->lrc.lrca >> 12; in oa_get_render_ctx_id()
1282 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1285 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1287 stream->specific_ctx_id = stream->specific_ctx_id_mask; in oa_get_render_ctx_id()
1294 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1297 stream->specific_ctx_id = in oa_get_render_ctx_id()
1301 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1308 stream->specific_ctx_id = in oa_get_render_ctx_id()
1317 ce->tag = stream->specific_ctx_id; in oa_get_render_ctx_id()
1319 drm_dbg(&stream->perf->i915->drm, in oa_get_render_ctx_id()
1321 stream->specific_ctx_id, in oa_get_render_ctx_id()
1322 stream->specific_ctx_id_mask); in oa_get_render_ctx_id()
1334 static void oa_put_render_ctx_id(struct i915_perf_stream *stream) in oa_put_render_ctx_id() argument
1338 ce = fetch_and_zero(&stream->pinned_ctx); in oa_put_render_ctx_id()
1344 stream->specific_ctx_id = INVALID_CTX_ID; in oa_put_render_ctx_id()
1345 stream->specific_ctx_id_mask = 0; in oa_put_render_ctx_id()
1349 free_oa_buffer(struct i915_perf_stream *stream) in free_oa_buffer() argument
1351 i915_vma_unpin_and_release(&stream->oa_buffer.vma, in free_oa_buffer()
1354 stream->oa_buffer.vaddr = NULL; in free_oa_buffer()
1358 free_oa_configs(struct i915_perf_stream *stream) in free_oa_configs() argument
1362 i915_oa_config_put(stream->oa_config); in free_oa_configs()
1363 llist_for_each_entry_safe(oa_bo, tmp, stream->oa_config_bos.first, node) in free_oa_configs()
1368 free_noa_wait(struct i915_perf_stream *stream) in free_noa_wait() argument
1370 i915_vma_unpin_and_release(&stream->noa_wait, 0); in free_noa_wait()
1373 static void i915_oa_stream_destroy(struct i915_perf_stream *stream) in i915_oa_stream_destroy() argument
1375 struct i915_perf *perf = stream->perf; in i915_oa_stream_destroy()
1377 BUG_ON(stream != perf->exclusive_stream); in i915_oa_stream_destroy()
1386 perf->ops.disable_metric_set(stream); in i915_oa_stream_destroy()
1388 free_oa_buffer(stream); in i915_oa_stream_destroy()
1390 intel_uncore_forcewake_put(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_destroy()
1391 intel_engine_pm_put(stream->engine); in i915_oa_stream_destroy()
1393 if (stream->ctx) in i915_oa_stream_destroy()
1394 oa_put_render_ctx_id(stream); in i915_oa_stream_destroy()
1396 free_oa_configs(stream); in i915_oa_stream_destroy()
1397 free_noa_wait(stream); in i915_oa_stream_destroy()
1405 static void gen7_init_oa_buffer(struct i915_perf_stream *stream) in gen7_init_oa_buffer() argument
1407 struct intel_uncore *uncore = stream->uncore; in gen7_init_oa_buffer()
1408 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen7_init_oa_buffer()
1411 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_init_oa_buffer()
1418 stream->oa_buffer.head = gtt_offset; in gen7_init_oa_buffer()
1426 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen7_init_oa_buffer()
1427 stream->oa_buffer.tail = gtt_offset; in gen7_init_oa_buffer()
1429 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_init_oa_buffer()
1435 stream->perf->gen7_latched_oastatus1 = 0; in gen7_init_oa_buffer()
1448 memset(stream->oa_buffer.vaddr, 0, OA_BUFFER_SIZE); in gen7_init_oa_buffer()
1451 static void gen8_init_oa_buffer(struct i915_perf_stream *stream) in gen8_init_oa_buffer() argument
1453 struct intel_uncore *uncore = stream->uncore; in gen8_init_oa_buffer()
1454 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen8_init_oa_buffer()
1457 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_init_oa_buffer()
1461 stream->oa_buffer.head = gtt_offset; in gen8_init_oa_buffer()
1478 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen8_init_oa_buffer()
1479 stream->oa_buffer.tail = gtt_offset; in gen8_init_oa_buffer()
1486 stream->oa_buffer.last_ctx_id = INVALID_CTX_ID; in gen8_init_oa_buffer()
1488 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_init_oa_buffer()
1502 memset(stream->oa_buffer.vaddr, 0, OA_BUFFER_SIZE); in gen8_init_oa_buffer()
1505 static void gen12_init_oa_buffer(struct i915_perf_stream *stream) in gen12_init_oa_buffer() argument
1507 struct intel_uncore *uncore = stream->uncore; in gen12_init_oa_buffer()
1508 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen12_init_oa_buffer()
1511 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen12_init_oa_buffer()
1516 stream->oa_buffer.head = gtt_offset; in gen12_init_oa_buffer()
1532 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen12_init_oa_buffer()
1533 stream->oa_buffer.tail = gtt_offset; in gen12_init_oa_buffer()
1540 stream->oa_buffer.last_ctx_id = INVALID_CTX_ID; in gen12_init_oa_buffer()
1542 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen12_init_oa_buffer()
1556 memset(stream->oa_buffer.vaddr, 0, in gen12_init_oa_buffer()
1557 stream->oa_buffer.vma->size); in gen12_init_oa_buffer()
1560 static int alloc_oa_buffer(struct i915_perf_stream *stream) in alloc_oa_buffer() argument
1562 struct drm_i915_private *i915 = stream->perf->i915; in alloc_oa_buffer()
1567 if (drm_WARN_ON(&i915->drm, stream->oa_buffer.vma)) in alloc_oa_buffer()
1573 bo = i915_gem_object_create_shmem(stream->perf->i915, OA_BUFFER_SIZE); in alloc_oa_buffer()
1587 stream->oa_buffer.vma = vma; in alloc_oa_buffer()
1589 stream->oa_buffer.vaddr = in alloc_oa_buffer()
1591 if (IS_ERR(stream->oa_buffer.vaddr)) { in alloc_oa_buffer()
1592 ret = PTR_ERR(stream->oa_buffer.vaddr); in alloc_oa_buffer()
1604 stream->oa_buffer.vaddr = NULL; in alloc_oa_buffer()
1605 stream->oa_buffer.vma = NULL; in alloc_oa_buffer()
1610 static u32 *save_restore_register(struct i915_perf_stream *stream, u32 *cs, in save_restore_register() argument
1619 if (GRAPHICS_VER(stream->perf->i915) >= 8) in save_restore_register()
1625 *cs++ = intel_gt_scratch_offset(stream->engine->gt, in save_restore_register()
1633 static int alloc_noa_wait(struct i915_perf_stream *stream) in alloc_noa_wait() argument
1635 struct drm_i915_private *i915 = stream->perf->i915; in alloc_noa_wait()
1639 intel_gt_ns_to_clock_interval(to_gt(stream->perf->i915), in alloc_noa_wait()
1640 atomic64_read(&stream->perf->noa_programming_delay)); in alloc_noa_wait()
1641 const u32 base = stream->engine->mmio_base; in alloc_noa_wait()
1688 stream, cs, true /* save */, CS_GPR(i), in alloc_noa_wait()
1691 stream, cs, true /* save */, MI_PREDICATE_RESULT_1(RENDER_RING_BASE), in alloc_noa_wait()
1795 stream, cs, false /* restore */, CS_GPR(i), in alloc_noa_wait()
1798 stream, cs, false /* restore */, MI_PREDICATE_RESULT_1(RENDER_RING_BASE), in alloc_noa_wait()
1809 stream->noa_wait = vma; in alloc_noa_wait()
1860 alloc_oa_config_buffer(struct i915_perf_stream *stream, in alloc_oa_config_buffer() argument
1880 obj = i915_gem_object_create_shmem(stream->perf->i915, config_length); in alloc_oa_config_buffer()
1909 *cs++ = (GRAPHICS_VER(stream->perf->i915) < 8 ? in alloc_oa_config_buffer()
1912 *cs++ = i915_ggtt_offset(stream->noa_wait); in alloc_oa_config_buffer()
1919 &stream->engine->gt->ggtt->vm, in alloc_oa_config_buffer()
1927 llist_add(&oa_bo->node, &stream->oa_config_bos); in alloc_oa_config_buffer()
1948 get_oa_vma(struct i915_perf_stream *stream, struct i915_oa_config *oa_config) in get_oa_vma() argument
1956 llist_for_each_entry(oa_bo, stream->oa_config_bos.first, node) { in get_oa_vma()
1964 oa_bo = alloc_oa_config_buffer(stream, oa_config); in get_oa_vma()
1973 emit_oa_config(struct i915_perf_stream *stream, in emit_oa_config() argument
1983 vma = get_oa_vma(stream, oa_config); in emit_oa_config()
2045 static struct intel_context *oa_context(struct i915_perf_stream *stream) in oa_context() argument
2047 return stream->pinned_ctx ?: stream->engine->kernel_context; in oa_context()
2051 hsw_enable_metric_set(struct i915_perf_stream *stream, in hsw_enable_metric_set() argument
2054 struct intel_uncore *uncore = stream->uncore; in hsw_enable_metric_set()
2071 return emit_oa_config(stream, in hsw_enable_metric_set()
2072 stream->oa_config, oa_context(stream), in hsw_enable_metric_set()
2076 static void hsw_disable_metric_set(struct i915_perf_stream *stream) in hsw_disable_metric_set() argument
2078 struct intel_uncore *uncore = stream->uncore; in hsw_disable_metric_set()
2118 const struct i915_perf_stream *stream) in gen8_update_reg_state_unlocked() argument
2120 u32 ctx_oactxctrl = stream->perf->ctx_oactxctrl_offset; in gen8_update_reg_state_unlocked()
2121 u32 ctx_flexeu0 = stream->perf->ctx_flexeu0_offset; in gen8_update_reg_state_unlocked()
2136 (stream->period_exponent << GEN8_OA_TIMER_PERIOD_SHIFT) | in gen8_update_reg_state_unlocked()
2137 (stream->periodic ? GEN8_OA_TIMER_ENABLE : 0) | in gen8_update_reg_state_unlocked()
2142 oa_config_flex_reg(stream->oa_config, flex_regs[i]); in gen8_update_reg_state_unlocked()
2278 static int gen12_configure_oar_context(struct i915_perf_stream *stream, in gen12_configure_oar_context() argument
2282 struct intel_context *ce = stream->pinned_ctx; in gen12_configure_oar_context()
2283 u32 format = stream->oa_buffer.format; in gen12_configure_oar_context()
2287 stream->perf->ctx_oactxctrl_offset + 1, in gen12_configure_oar_context()
2352 oa_configure_all_contexts(struct i915_perf_stream *stream, in oa_configure_all_contexts() argument
2357 struct drm_i915_private *i915 = stream->perf->i915; in oa_configure_all_contexts()
2362 lockdep_assert_held(&stream->perf->lock); in oa_configure_all_contexts()
2421 gen12_configure_all_contexts(struct i915_perf_stream *stream, in gen12_configure_all_contexts() argument
2432 return oa_configure_all_contexts(stream, in gen12_configure_all_contexts()
2438 lrc_configure_all_contexts(struct i915_perf_stream *stream, in lrc_configure_all_contexts() argument
2443 const u32 ctx_flexeu0 = stream->perf->ctx_flexeu0_offset; in lrc_configure_all_contexts()
2452 stream->perf->ctx_oactxctrl_offset + 1, in lrc_configure_all_contexts()
2466 (stream->period_exponent << GEN8_OA_TIMER_PERIOD_SHIFT) | in lrc_configure_all_contexts()
2467 (stream->periodic ? GEN8_OA_TIMER_ENABLE : 0) | in lrc_configure_all_contexts()
2473 return oa_configure_all_contexts(stream, in lrc_configure_all_contexts()
2479 gen8_enable_metric_set(struct i915_perf_stream *stream, in gen8_enable_metric_set() argument
2482 struct intel_uncore *uncore = stream->uncore; in gen8_enable_metric_set()
2483 struct i915_oa_config *oa_config = stream->oa_config; in gen8_enable_metric_set()
2509 if (IS_GRAPHICS_VER(stream->perf->i915, 9, 11)) { in gen8_enable_metric_set()
2520 ret = lrc_configure_all_contexts(stream, oa_config, active); in gen8_enable_metric_set()
2524 return emit_oa_config(stream, in gen8_enable_metric_set()
2525 stream->oa_config, oa_context(stream), in gen8_enable_metric_set()
2529 static u32 oag_report_ctx_switches(const struct i915_perf_stream *stream) in oag_report_ctx_switches() argument
2532 (stream->sample_flags & SAMPLE_OA_REPORT) ? in oag_report_ctx_switches()
2537 gen12_enable_metric_set(struct i915_perf_stream *stream, in gen12_enable_metric_set() argument
2540 struct intel_uncore *uncore = stream->uncore; in gen12_enable_metric_set()
2541 struct i915_oa_config *oa_config = stream->oa_config; in gen12_enable_metric_set()
2542 bool periodic = stream->periodic; in gen12_enable_metric_set()
2543 u32 period_exponent = stream->period_exponent; in gen12_enable_metric_set()
2554 oag_report_ctx_switches(stream)); in gen12_enable_metric_set()
2567 ret = gen12_configure_all_contexts(stream, oa_config, active); in gen12_enable_metric_set()
2576 if (stream->ctx) { in gen12_enable_metric_set()
2577 ret = gen12_configure_oar_context(stream, active); in gen12_enable_metric_set()
2582 return emit_oa_config(stream, in gen12_enable_metric_set()
2583 stream->oa_config, oa_context(stream), in gen12_enable_metric_set()
2587 static void gen8_disable_metric_set(struct i915_perf_stream *stream) in gen8_disable_metric_set() argument
2589 struct intel_uncore *uncore = stream->uncore; in gen8_disable_metric_set()
2592 lrc_configure_all_contexts(stream, NULL, NULL); in gen8_disable_metric_set()
2597 static void gen11_disable_metric_set(struct i915_perf_stream *stream) in gen11_disable_metric_set() argument
2599 struct intel_uncore *uncore = stream->uncore; in gen11_disable_metric_set()
2602 lrc_configure_all_contexts(stream, NULL, NULL); in gen11_disable_metric_set()
2608 static void gen12_disable_metric_set(struct i915_perf_stream *stream) in gen12_disable_metric_set() argument
2610 struct intel_uncore *uncore = stream->uncore; in gen12_disable_metric_set()
2613 gen12_configure_all_contexts(stream, NULL, NULL); in gen12_disable_metric_set()
2616 if (stream->ctx) in gen12_disable_metric_set()
2617 gen12_configure_oar_context(stream, NULL); in gen12_disable_metric_set()
2623 static void gen7_oa_enable(struct i915_perf_stream *stream) in gen7_oa_enable() argument
2625 struct intel_uncore *uncore = stream->uncore; in gen7_oa_enable()
2626 struct i915_gem_context *ctx = stream->ctx; in gen7_oa_enable()
2627 u32 ctx_id = stream->specific_ctx_id; in gen7_oa_enable()
2628 bool periodic = stream->periodic; in gen7_oa_enable()
2629 u32 period_exponent = stream->period_exponent; in gen7_oa_enable()
2630 u32 report_format = stream->oa_buffer.format; in gen7_oa_enable()
2641 gen7_init_oa_buffer(stream); in gen7_oa_enable()
2653 static void gen8_oa_enable(struct i915_perf_stream *stream) in gen8_oa_enable() argument
2655 struct intel_uncore *uncore = stream->uncore; in gen8_oa_enable()
2656 u32 report_format = stream->oa_buffer.format; in gen8_oa_enable()
2667 gen8_init_oa_buffer(stream); in gen8_oa_enable()
2679 static void gen12_oa_enable(struct i915_perf_stream *stream) in gen12_oa_enable() argument
2681 struct intel_uncore *uncore = stream->uncore; in gen12_oa_enable()
2682 u32 report_format = stream->oa_buffer.format; in gen12_oa_enable()
2688 if (!(stream->sample_flags & SAMPLE_OA_REPORT)) in gen12_oa_enable()
2691 gen12_init_oa_buffer(stream); in gen12_oa_enable()
2707 static void i915_oa_stream_enable(struct i915_perf_stream *stream) in i915_oa_stream_enable() argument
2709 stream->pollin = false; in i915_oa_stream_enable()
2711 stream->perf->ops.oa_enable(stream); in i915_oa_stream_enable()
2713 if (stream->sample_flags & SAMPLE_OA_REPORT) in i915_oa_stream_enable()
2714 hrtimer_start(&stream->poll_check_timer, in i915_oa_stream_enable()
2715 ns_to_ktime(stream->poll_oa_period), in i915_oa_stream_enable()
2719 static void gen7_oa_disable(struct i915_perf_stream *stream) in gen7_oa_disable() argument
2721 struct intel_uncore *uncore = stream->uncore; in gen7_oa_disable()
2727 drm_err(&stream->perf->i915->drm, in gen7_oa_disable()
2731 static void gen8_oa_disable(struct i915_perf_stream *stream) in gen8_oa_disable() argument
2733 struct intel_uncore *uncore = stream->uncore; in gen8_oa_disable()
2739 drm_err(&stream->perf->i915->drm, in gen8_oa_disable()
2743 static void gen12_oa_disable(struct i915_perf_stream *stream) in gen12_oa_disable() argument
2745 struct intel_uncore *uncore = stream->uncore; in gen12_oa_disable()
2752 drm_err(&stream->perf->i915->drm, in gen12_oa_disable()
2760 drm_err(&stream->perf->i915->drm, in gen12_oa_disable()
2772 static void i915_oa_stream_disable(struct i915_perf_stream *stream) in i915_oa_stream_disable() argument
2774 stream->perf->ops.oa_disable(stream); in i915_oa_stream_disable()
2776 if (stream->sample_flags & SAMPLE_OA_REPORT) in i915_oa_stream_disable()
2777 hrtimer_cancel(&stream->poll_check_timer); in i915_oa_stream_disable()
2789 static int i915_perf_stream_enable_sync(struct i915_perf_stream *stream) in i915_perf_stream_enable_sync() argument
2798 err = stream->perf->ops.enable_metric_set(stream, active); in i915_perf_stream_enable_sync()
2856 static int i915_oa_stream_init(struct i915_perf_stream *stream, in i915_oa_stream_init() argument
2860 struct drm_i915_private *i915 = stream->perf->i915; in i915_oa_stream_init()
2861 struct i915_perf *perf = stream->perf; in i915_oa_stream_init()
2881 (GRAPHICS_VER(perf->i915) < 12 || !stream->ctx)) { in i915_oa_stream_init()
2906 stream->engine = props->engine; in i915_oa_stream_init()
2907 stream->uncore = stream->engine->gt->uncore; in i915_oa_stream_init()
2909 stream->sample_size = sizeof(struct drm_i915_perf_record_header); in i915_oa_stream_init()
2913 stream->sample_flags = props->sample_flags; in i915_oa_stream_init()
2914 stream->sample_size += format_size; in i915_oa_stream_init()
2916 stream->oa_buffer.format_size = format_size; in i915_oa_stream_init()
2917 if (drm_WARN_ON(&i915->drm, stream->oa_buffer.format_size == 0)) in i915_oa_stream_init()
2920 stream->hold_preemption = props->hold_preemption; in i915_oa_stream_init()
2922 stream->oa_buffer.format = in i915_oa_stream_init()
2925 stream->periodic = props->oa_periodic; in i915_oa_stream_init()
2926 if (stream->periodic) in i915_oa_stream_init()
2927 stream->period_exponent = props->oa_period_exponent; in i915_oa_stream_init()
2929 if (stream->ctx) { in i915_oa_stream_init()
2930 ret = oa_get_render_ctx_id(stream); in i915_oa_stream_init()
2937 ret = alloc_noa_wait(stream); in i915_oa_stream_init()
2943 stream->oa_config = i915_perf_get_oa_config(perf, props->metrics_set); in i915_oa_stream_init()
2944 if (!stream->oa_config) { in i915_oa_stream_init()
2962 intel_engine_pm_get(stream->engine); in i915_oa_stream_init()
2963 intel_uncore_forcewake_get(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_init()
2965 ret = alloc_oa_buffer(stream); in i915_oa_stream_init()
2969 stream->ops = &i915_oa_stream_ops; in i915_oa_stream_init()
2972 WRITE_ONCE(perf->exclusive_stream, stream); in i915_oa_stream_init()
2974 ret = i915_perf_stream_enable_sync(stream); in i915_oa_stream_init()
2981 stream->oa_config->uuid); in i915_oa_stream_init()
2983 hrtimer_init(&stream->poll_check_timer, in i915_oa_stream_init()
2985 stream->poll_check_timer.function = oa_poll_check_timer_cb; in i915_oa_stream_init()
2986 init_waitqueue_head(&stream->poll_wq); in i915_oa_stream_init()
2987 spin_lock_init(&stream->oa_buffer.ptr_lock); in i915_oa_stream_init()
2993 perf->ops.disable_metric_set(stream); in i915_oa_stream_init()
2995 free_oa_buffer(stream); in i915_oa_stream_init()
2998 free_oa_configs(stream); in i915_oa_stream_init()
3000 intel_uncore_forcewake_put(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_init()
3001 intel_engine_pm_put(stream->engine); in i915_oa_stream_init()
3004 free_noa_wait(stream); in i915_oa_stream_init()
3007 if (stream->ctx) in i915_oa_stream_init()
3008 oa_put_render_ctx_id(stream); in i915_oa_stream_init()
3016 struct i915_perf_stream *stream; in i915_oa_init_reg_state() local
3022 stream = READ_ONCE(engine->i915->perf.exclusive_stream); in i915_oa_init_reg_state()
3023 if (stream && GRAPHICS_VER(stream->perf->i915) < 12) in i915_oa_init_reg_state()
3024 gen8_update_reg_state_unlocked(ce, stream); in i915_oa_init_reg_state()
3050 struct i915_perf_stream *stream = file->private_data; in i915_perf_read() local
3051 struct i915_perf *perf = stream->perf; in i915_perf_read()
3059 if (!stream->enabled || !(stream->sample_flags & SAMPLE_OA_REPORT)) in i915_perf_read()
3071 ret = stream->ops->wait_unlocked(stream); in i915_perf_read()
3076 ret = stream->ops->read(stream, buf, count, &offset); in i915_perf_read()
3081 ret = stream->ops->read(stream, buf, count, &offset); in i915_perf_read()
3097 stream->pollin = false; in i915_perf_read()
3105 struct i915_perf_stream *stream = in oa_poll_check_timer_cb() local
3106 container_of(hrtimer, typeof(*stream), poll_check_timer); in oa_poll_check_timer_cb()
3108 if (oa_buffer_check_unlocked(stream)) { in oa_poll_check_timer_cb()
3109 stream->pollin = true; in oa_poll_check_timer_cb()
3110 wake_up(&stream->poll_wq); in oa_poll_check_timer_cb()
3114 ns_to_ktime(stream->poll_oa_period)); in oa_poll_check_timer_cb()
3134 static __poll_t i915_perf_poll_locked(struct i915_perf_stream *stream, in i915_perf_poll_locked() argument
3140 stream->ops->poll_wait(stream, file, wait); in i915_perf_poll_locked()
3148 if (stream->pollin) in i915_perf_poll_locked()
3169 struct i915_perf_stream *stream = file->private_data; in i915_perf_poll() local
3170 struct i915_perf *perf = stream->perf; in i915_perf_poll()
3174 ret = i915_perf_poll_locked(stream, file, wait); in i915_perf_poll()
3190 static void i915_perf_enable_locked(struct i915_perf_stream *stream) in i915_perf_enable_locked() argument
3192 if (stream->enabled) in i915_perf_enable_locked()
3196 stream->enabled = true; in i915_perf_enable_locked()
3198 if (stream->ops->enable) in i915_perf_enable_locked()
3199 stream->ops->enable(stream); in i915_perf_enable_locked()
3201 if (stream->hold_preemption) in i915_perf_enable_locked()
3202 intel_context_set_nopreempt(stream->pinned_ctx); in i915_perf_enable_locked()
3219 static void i915_perf_disable_locked(struct i915_perf_stream *stream) in i915_perf_disable_locked() argument
3221 if (!stream->enabled) in i915_perf_disable_locked()
3225 stream->enabled = false; in i915_perf_disable_locked()
3227 if (stream->hold_preemption) in i915_perf_disable_locked()
3228 intel_context_clear_nopreempt(stream->pinned_ctx); in i915_perf_disable_locked()
3230 if (stream->ops->disable) in i915_perf_disable_locked()
3231 stream->ops->disable(stream); in i915_perf_disable_locked()
3234 static long i915_perf_config_locked(struct i915_perf_stream *stream, in i915_perf_config_locked() argument
3238 long ret = stream->oa_config->id; in i915_perf_config_locked()
3240 config = i915_perf_get_oa_config(stream->perf, metrics_set); in i915_perf_config_locked()
3244 if (config != stream->oa_config) { in i915_perf_config_locked()
3256 err = emit_oa_config(stream, config, oa_context(stream), NULL); in i915_perf_config_locked()
3258 config = xchg(&stream->oa_config, config); in i915_perf_config_locked()
3280 static long i915_perf_ioctl_locked(struct i915_perf_stream *stream, in i915_perf_ioctl_locked() argument
3286 i915_perf_enable_locked(stream); in i915_perf_ioctl_locked()
3289 i915_perf_disable_locked(stream); in i915_perf_ioctl_locked()
3292 return i915_perf_config_locked(stream, arg); in i915_perf_ioctl_locked()
3313 struct i915_perf_stream *stream = file->private_data; in i915_perf_ioctl() local
3314 struct i915_perf *perf = stream->perf; in i915_perf_ioctl()
3318 ret = i915_perf_ioctl_locked(stream, cmd, arg); in i915_perf_ioctl()
3334 static void i915_perf_destroy_locked(struct i915_perf_stream *stream) in i915_perf_destroy_locked() argument
3336 if (stream->enabled) in i915_perf_destroy_locked()
3337 i915_perf_disable_locked(stream); in i915_perf_destroy_locked()
3339 if (stream->ops->destroy) in i915_perf_destroy_locked()
3340 stream->ops->destroy(stream); in i915_perf_destroy_locked()
3342 if (stream->ctx) in i915_perf_destroy_locked()
3343 i915_gem_context_put(stream->ctx); in i915_perf_destroy_locked()
3345 kfree(stream); in i915_perf_destroy_locked()
3361 struct i915_perf_stream *stream = file->private_data; in i915_perf_release() local
3362 struct i915_perf *perf = stream->perf; in i915_perf_release()
3365 i915_perf_destroy_locked(stream); in i915_perf_release()
3420 struct i915_perf_stream *stream = NULL; in i915_perf_open_ioctl_locked() local
3493 stream = kzalloc(sizeof(*stream), GFP_KERNEL); in i915_perf_open_ioctl_locked()
3494 if (!stream) { in i915_perf_open_ioctl_locked()
3499 stream->perf = perf; in i915_perf_open_ioctl_locked()
3500 stream->ctx = specific_ctx; in i915_perf_open_ioctl_locked()
3501 stream->poll_oa_period = props->poll_oa_period; in i915_perf_open_ioctl_locked()
3503 ret = i915_oa_stream_init(stream, param, props); in i915_perf_open_ioctl_locked()
3511 if (WARN_ON(stream->sample_flags != props->sample_flags)) { in i915_perf_open_ioctl_locked()
3521 stream_fd = anon_inode_getfd("[i915_perf]", &fops, stream, f_flags); in i915_perf_open_ioctl_locked()
3528 i915_perf_enable_locked(stream); in i915_perf_open_ioctl_locked()
3538 if (stream->ops->destroy) in i915_perf_open_ioctl_locked()
3539 stream->ops->destroy(stream); in i915_perf_open_ioctl_locked()
3541 kfree(stream); in i915_perf_open_ioctl_locked()