Lines Matching refs:ce
849 const struct intel_context *ce, in init_common_regs() argument
865 regs[CTX_TIMESTAMP] = ce->stats.runtime.last; in init_common_regs()
929 const struct intel_context *ce, in __lrc_init_regs() argument
949 init_common_regs(regs, ce, engine, inhibit); in __lrc_init_regs()
950 init_ppgtt_regs(regs, vm_alias(ce->vm)); in __lrc_init_regs()
957 void lrc_init_regs(const struct intel_context *ce, in lrc_init_regs() argument
961 __lrc_init_regs(ce->lrc_reg_state, ce, engine, inhibit); in lrc_init_regs()
964 void lrc_reset_regs(const struct intel_context *ce, in lrc_reset_regs() argument
967 __reset_stop_ring(ce->lrc_reg_state, engine); in lrc_reset_regs()
995 static u32 context_wa_bb_offset(const struct intel_context *ce) in context_wa_bb_offset() argument
997 return PAGE_SIZE * ce->wa_bb_page; in context_wa_bb_offset()
1000 static u32 *context_indirect_bb(const struct intel_context *ce) in context_indirect_bb() argument
1004 GEM_BUG_ON(!ce->wa_bb_page); in context_indirect_bb()
1006 ptr = ce->lrc_reg_state; in context_indirect_bb()
1008 ptr += context_wa_bb_offset(ce); in context_indirect_bb()
1013 void lrc_init_state(struct intel_context *ce, in lrc_init_state() argument
1024 __set_bit(CONTEXT_VALID_BIT, &ce->flags); in lrc_init_state()
1032 if (ce->wa_bb_page) in lrc_init_state()
1033 memset(state + context_wa_bb_offset(ce), 0, PAGE_SIZE); in lrc_init_state()
1039 __lrc_init_regs(state + LRC_STATE_OFFSET, ce, engine, inhibit); in lrc_init_state()
1042 u32 lrc_indirect_bb(const struct intel_context *ce) in lrc_indirect_bb() argument
1044 return i915_ggtt_offset(ce->state) + context_wa_bb_offset(ce); in lrc_indirect_bb()
1047 static u32 *setup_predicate_disable_wa(const struct intel_context *ce, u32 *cs) in setup_predicate_disable_wa() argument
1051 *cs++ = lrc_indirect_bb(ce) + DG2_PREDICATE_RESULT_WA; in setup_predicate_disable_wa()
1061 *cs++ = lrc_indirect_bb(ce) + DG2_PREDICATE_RESULT_WA; in setup_predicate_disable_wa()
1072 __lrc_alloc_state(struct intel_context *ce, struct intel_engine_cs *engine) in __lrc_alloc_state() argument
1084 ce->wa_bb_page = context_size / PAGE_SIZE; in __lrc_alloc_state()
1088 if (intel_context_is_parent(ce) && intel_engine_uses_guc(engine)) { in __lrc_alloc_state()
1089 ce->parallel.guc.parent_page = context_size / PAGE_SIZE; in __lrc_alloc_state()
1119 pinned_timeline(struct intel_context *ce, struct intel_engine_cs *engine) in pinned_timeline() argument
1121 struct intel_timeline *tl = fetch_and_zero(&ce->timeline); in pinned_timeline()
1126 int lrc_alloc(struct intel_context *ce, struct intel_engine_cs *engine) in lrc_alloc() argument
1132 GEM_BUG_ON(ce->state); in lrc_alloc()
1134 vma = __lrc_alloc_state(ce, engine); in lrc_alloc()
1138 ring = intel_engine_create_ring(engine, ce->ring_size); in lrc_alloc()
1144 if (!page_mask_bits(ce->timeline)) { in lrc_alloc()
1151 if (unlikely(ce->timeline)) in lrc_alloc()
1152 tl = pinned_timeline(ce, engine); in lrc_alloc()
1160 ce->timeline = tl; in lrc_alloc()
1163 ce->ring = ring; in lrc_alloc()
1164 ce->state = vma; in lrc_alloc()
1175 void lrc_reset(struct intel_context *ce) in lrc_reset() argument
1177 GEM_BUG_ON(!intel_context_is_pinned(ce)); in lrc_reset()
1179 intel_ring_reset(ce->ring, ce->ring->emit); in lrc_reset()
1182 lrc_init_regs(ce, ce->engine, true); in lrc_reset()
1183 ce->lrc.lrca = lrc_update_regs(ce, ce->engine, ce->ring->tail); in lrc_reset()
1187 lrc_pre_pin(struct intel_context *ce, in lrc_pre_pin() argument
1192 GEM_BUG_ON(!ce->state); in lrc_pre_pin()
1193 GEM_BUG_ON(!i915_vma_is_pinned(ce->state)); in lrc_pre_pin()
1195 *vaddr = i915_gem_object_pin_map(ce->state->obj, in lrc_pre_pin()
1196 intel_gt_coherent_map_type(ce->engine->gt, in lrc_pre_pin()
1197 ce->state->obj, in lrc_pre_pin()
1205 lrc_pin(struct intel_context *ce, in lrc_pin() argument
1209 ce->lrc_reg_state = vaddr + LRC_STATE_OFFSET; in lrc_pin()
1211 if (!__test_and_set_bit(CONTEXT_INIT_BIT, &ce->flags)) in lrc_pin()
1212 lrc_init_state(ce, engine, vaddr); in lrc_pin()
1214 ce->lrc.lrca = lrc_update_regs(ce, engine, ce->ring->tail); in lrc_pin()
1218 void lrc_unpin(struct intel_context *ce) in lrc_unpin() argument
1220 if (unlikely(ce->parallel.last_rq)) { in lrc_unpin()
1221 i915_request_put(ce->parallel.last_rq); in lrc_unpin()
1222 ce->parallel.last_rq = NULL; in lrc_unpin()
1224 check_redzone((void *)ce->lrc_reg_state - LRC_STATE_OFFSET, in lrc_unpin()
1225 ce->engine); in lrc_unpin()
1228 void lrc_post_unpin(struct intel_context *ce) in lrc_post_unpin() argument
1230 i915_gem_object_unpin_map(ce->state->obj); in lrc_post_unpin()
1233 void lrc_fini(struct intel_context *ce) in lrc_fini() argument
1235 if (!ce->state) in lrc_fini()
1238 intel_ring_put(fetch_and_zero(&ce->ring)); in lrc_fini()
1239 i915_vma_put(fetch_and_zero(&ce->state)); in lrc_fini()
1244 struct intel_context *ce = container_of(kref, typeof(*ce), ref); in lrc_destroy() local
1246 GEM_BUG_ON(!i915_active_is_idle(&ce->active)); in lrc_destroy()
1247 GEM_BUG_ON(intel_context_is_pinned(ce)); in lrc_destroy()
1249 lrc_fini(ce); in lrc_destroy()
1251 intel_context_fini(ce); in lrc_destroy()
1252 intel_context_free(ce); in lrc_destroy()
1256 gen12_emit_timestamp_wa(const struct intel_context *ce, u32 *cs) in gen12_emit_timestamp_wa() argument
1262 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_timestamp_wa()
1282 gen12_emit_restore_scratch(const struct intel_context *ce, u32 *cs) in gen12_emit_restore_scratch() argument
1284 GEM_BUG_ON(lrc_ring_gpr0(ce->engine) == -1); in gen12_emit_restore_scratch()
1290 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_restore_scratch()
1291 (lrc_ring_gpr0(ce->engine) + 1) * sizeof(u32); in gen12_emit_restore_scratch()
1298 gen12_emit_cmd_buf_wa(const struct intel_context *ce, u32 *cs) in gen12_emit_cmd_buf_wa() argument
1300 GEM_BUG_ON(lrc_ring_cmd_buf_cctl(ce->engine) == -1); in gen12_emit_cmd_buf_wa()
1306 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_cmd_buf_wa()
1307 (lrc_ring_cmd_buf_cctl(ce->engine) + 1) * sizeof(u32); in gen12_emit_cmd_buf_wa()
1325 dg2_emit_rcs_hang_wabb(const struct intel_context *ce, u32 *cs) in dg2_emit_rcs_hang_wabb() argument
1328 *cs++ = i915_mmio_reg_offset(GEN12_STATE_ACK_DEBUG(ce->engine->mmio_base)); in dg2_emit_rcs_hang_wabb()
1332 *cs++ = i915_mmio_reg_offset(RING_NOPID(ce->engine->mmio_base)); in dg2_emit_rcs_hang_wabb()
1336 *cs++ = i915_mmio_reg_offset(RING_NOPID(ce->engine->mmio_base)); in dg2_emit_rcs_hang_wabb()
1360 gen12_emit_indirect_ctx_rcs(const struct intel_context *ce, u32 *cs) in gen12_emit_indirect_ctx_rcs() argument
1362 cs = gen12_emit_timestamp_wa(ce, cs); in gen12_emit_indirect_ctx_rcs()
1363 cs = gen12_emit_cmd_buf_wa(ce, cs); in gen12_emit_indirect_ctx_rcs()
1364 cs = gen12_emit_restore_scratch(ce, cs); in gen12_emit_indirect_ctx_rcs()
1367 if (IS_DG2_GRAPHICS_STEP(ce->engine->i915, G10, STEP_A0, STEP_B0) || in gen12_emit_indirect_ctx_rcs()
1368 IS_DG2_GRAPHICS_STEP(ce->engine->i915, G11, STEP_A0, STEP_B0)) in gen12_emit_indirect_ctx_rcs()
1369 cs = dg2_emit_rcs_hang_wabb(ce, cs); in gen12_emit_indirect_ctx_rcs()
1372 if (IS_DG2_GRAPHICS_STEP(ce->engine->i915, G10, STEP_B0, STEP_C0) || in gen12_emit_indirect_ctx_rcs()
1373 IS_DG2_G11(ce->engine->i915)) in gen12_emit_indirect_ctx_rcs()
1376 cs = gen12_emit_aux_table_inv(ce->engine, cs); in gen12_emit_indirect_ctx_rcs()
1379 if (IS_MTL_GRAPHICS_STEP(ce->engine->i915, M, STEP_A0, STEP_B0) || in gen12_emit_indirect_ctx_rcs()
1380 IS_MTL_GRAPHICS_STEP(ce->engine->i915, P, STEP_A0, STEP_B0) || in gen12_emit_indirect_ctx_rcs()
1381 IS_DG2(ce->engine->i915)) in gen12_emit_indirect_ctx_rcs()
1388 gen12_emit_indirect_ctx_xcs(const struct intel_context *ce, u32 *cs) in gen12_emit_indirect_ctx_xcs() argument
1390 cs = gen12_emit_timestamp_wa(ce, cs); in gen12_emit_indirect_ctx_xcs()
1391 cs = gen12_emit_restore_scratch(ce, cs); in gen12_emit_indirect_ctx_xcs()
1394 if (IS_DG2_GRAPHICS_STEP(ce->engine->i915, G10, STEP_B0, STEP_C0) || in gen12_emit_indirect_ctx_xcs()
1395 IS_DG2_G11(ce->engine->i915)) in gen12_emit_indirect_ctx_xcs()
1396 if (ce->engine->class == COMPUTE_CLASS) in gen12_emit_indirect_ctx_xcs()
1401 return gen12_emit_aux_table_inv(ce->engine, cs); in gen12_emit_indirect_ctx_xcs()
1405 setup_indirect_ctx_bb(const struct intel_context *ce, in setup_indirect_ctx_bb() argument
1409 u32 * const start = context_indirect_bb(ce); in setup_indirect_ctx_bb()
1412 cs = emit(ce, start); in setup_indirect_ctx_bb()
1418 setup_predicate_disable_wa(ce, start + DG2_PREDICATE_RESULT_BB / sizeof(*start)); in setup_indirect_ctx_bb()
1420 lrc_setup_indirect_ctx(ce->lrc_reg_state, engine, in setup_indirect_ctx_bb()
1421 lrc_indirect_bb(ce), in setup_indirect_ctx_bb()
1459 static u32 lrc_descriptor(const struct intel_context *ce) in lrc_descriptor() argument
1464 if (i915_vm_is_4lvl(ce->vm)) in lrc_descriptor()
1469 if (GRAPHICS_VER(ce->vm->i915) == 8) in lrc_descriptor()
1472 return i915_ggtt_offset(ce->state) | desc; in lrc_descriptor()
1475 u32 lrc_update_regs(const struct intel_context *ce, in lrc_update_regs() argument
1479 struct intel_ring *ring = ce->ring; in lrc_update_regs()
1480 u32 *regs = ce->lrc_reg_state; in lrc_update_regs()
1493 intel_sseu_make_rpcs(engine->gt, &ce->sseu); in lrc_update_regs()
1495 i915_oa_init_reg_state(ce, engine); in lrc_update_regs()
1498 if (ce->wa_bb_page) { in lrc_update_regs()
1499 u32 *(*fn)(const struct intel_context *ce, u32 *cs); in lrc_update_regs()
1502 if (ce->engine->class == RENDER_CLASS) in lrc_update_regs()
1507 setup_indirect_ctx_bb(ce, engine, fn); in lrc_update_regs()
1510 return lrc_descriptor(ce) | CTX_DESC_FORCE_RESTORE; in lrc_update_regs()
1513 void lrc_update_offsets(struct intel_context *ce, in lrc_update_offsets() argument
1516 set_offsets(ce->lrc_reg_state, reg_offsets(engine), engine, false); in lrc_update_offsets()
1519 void lrc_check_regs(const struct intel_context *ce, in lrc_check_regs() argument
1523 const struct intel_ring *ring = ce->ring; in lrc_check_regs()
1524 u32 *regs = ce->lrc_reg_state; in lrc_check_regs()
1883 static u32 lrc_get_runtime(const struct intel_context *ce) in lrc_get_runtime() argument
1891 return READ_ONCE(ce->lrc_reg_state[CTX_TIMESTAMP]); in lrc_get_runtime()
1894 void lrc_update_runtime(struct intel_context *ce) in lrc_update_runtime() argument
1896 struct intel_context_stats *stats = &ce->stats; in lrc_update_runtime()
1901 stats->runtime.last = lrc_get_runtime(ce); in lrc_update_runtime()
1907 CE_TRACE(ce, "runtime underflow: last=%u, new=%u, delta=%d\n", in lrc_update_runtime()