Lines Matching refs:ce
762 const struct intel_context *ce, in init_common_regs() argument
777 regs[CTX_TIMESTAMP] = ce->runtime.last; in init_common_regs()
837 const struct intel_context *ce, in __lrc_init_regs() argument
857 init_common_regs(regs, ce, engine, inhibit); in __lrc_init_regs()
858 init_ppgtt_regs(regs, vm_alias(ce->vm)); in __lrc_init_regs()
865 void lrc_init_regs(const struct intel_context *ce, in lrc_init_regs() argument
869 __lrc_init_regs(ce->lrc_reg_state, ce, engine, inhibit); in lrc_init_regs()
872 void lrc_reset_regs(const struct intel_context *ce, in lrc_reset_regs() argument
875 __reset_stop_ring(ce->lrc_reg_state, engine); in lrc_reset_regs()
903 void lrc_init_state(struct intel_context *ce, in lrc_init_state() argument
914 __set_bit(CONTEXT_VALID_BIT, &ce->flags); in lrc_init_state()
925 __lrc_init_regs(state + LRC_STATE_OFFSET, ce, engine, inhibit); in lrc_init_state()
929 __lrc_alloc_state(struct intel_context *ce, struct intel_engine_cs *engine) in __lrc_alloc_state() argument
941 ce->wa_bb_page = context_size / PAGE_SIZE; in __lrc_alloc_state()
945 if (intel_context_is_parent(ce) && intel_engine_uses_guc(engine)) { in __lrc_alloc_state()
946 ce->parallel.guc.parent_page = context_size / PAGE_SIZE; in __lrc_alloc_state()
967 pinned_timeline(struct intel_context *ce, struct intel_engine_cs *engine) in pinned_timeline() argument
969 struct intel_timeline *tl = fetch_and_zero(&ce->timeline); in pinned_timeline()
974 int lrc_alloc(struct intel_context *ce, struct intel_engine_cs *engine) in lrc_alloc() argument
980 GEM_BUG_ON(ce->state); in lrc_alloc()
982 vma = __lrc_alloc_state(ce, engine); in lrc_alloc()
986 ring = intel_engine_create_ring(engine, ce->ring_size); in lrc_alloc()
992 if (!page_mask_bits(ce->timeline)) { in lrc_alloc()
999 if (unlikely(ce->timeline)) in lrc_alloc()
1000 tl = pinned_timeline(ce, engine); in lrc_alloc()
1008 ce->timeline = tl; in lrc_alloc()
1011 ce->ring = ring; in lrc_alloc()
1012 ce->state = vma; in lrc_alloc()
1023 void lrc_reset(struct intel_context *ce) in lrc_reset() argument
1025 GEM_BUG_ON(!intel_context_is_pinned(ce)); in lrc_reset()
1027 intel_ring_reset(ce->ring, ce->ring->emit); in lrc_reset()
1030 lrc_init_regs(ce, ce->engine, true); in lrc_reset()
1031 ce->lrc.lrca = lrc_update_regs(ce, ce->engine, ce->ring->tail); in lrc_reset()
1035 lrc_pre_pin(struct intel_context *ce, in lrc_pre_pin() argument
1040 GEM_BUG_ON(!ce->state); in lrc_pre_pin()
1041 GEM_BUG_ON(!i915_vma_is_pinned(ce->state)); in lrc_pre_pin()
1043 *vaddr = i915_gem_object_pin_map(ce->state->obj, in lrc_pre_pin()
1044 i915_coherent_map_type(ce->engine->i915, in lrc_pre_pin()
1045 ce->state->obj, in lrc_pre_pin()
1053 lrc_pin(struct intel_context *ce, in lrc_pin() argument
1057 ce->lrc_reg_state = vaddr + LRC_STATE_OFFSET; in lrc_pin()
1059 if (!__test_and_set_bit(CONTEXT_INIT_BIT, &ce->flags)) in lrc_pin()
1060 lrc_init_state(ce, engine, vaddr); in lrc_pin()
1062 ce->lrc.lrca = lrc_update_regs(ce, engine, ce->ring->tail); in lrc_pin()
1066 void lrc_unpin(struct intel_context *ce) in lrc_unpin() argument
1068 check_redzone((void *)ce->lrc_reg_state - LRC_STATE_OFFSET, in lrc_unpin()
1069 ce->engine); in lrc_unpin()
1072 void lrc_post_unpin(struct intel_context *ce) in lrc_post_unpin() argument
1074 i915_gem_object_unpin_map(ce->state->obj); in lrc_post_unpin()
1077 void lrc_fini(struct intel_context *ce) in lrc_fini() argument
1079 if (!ce->state) in lrc_fini()
1082 intel_ring_put(fetch_and_zero(&ce->ring)); in lrc_fini()
1083 i915_vma_put(fetch_and_zero(&ce->state)); in lrc_fini()
1088 struct intel_context *ce = container_of(kref, typeof(*ce), ref); in lrc_destroy() local
1090 GEM_BUG_ON(!i915_active_is_idle(&ce->active)); in lrc_destroy()
1091 GEM_BUG_ON(intel_context_is_pinned(ce)); in lrc_destroy()
1093 lrc_fini(ce); in lrc_destroy()
1095 intel_context_fini(ce); in lrc_destroy()
1096 intel_context_free(ce); in lrc_destroy()
1100 gen12_emit_timestamp_wa(const struct intel_context *ce, u32 *cs) in gen12_emit_timestamp_wa() argument
1106 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_timestamp_wa()
1126 gen12_emit_restore_scratch(const struct intel_context *ce, u32 *cs) in gen12_emit_restore_scratch() argument
1128 GEM_BUG_ON(lrc_ring_gpr0(ce->engine) == -1); in gen12_emit_restore_scratch()
1134 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_restore_scratch()
1135 (lrc_ring_gpr0(ce->engine) + 1) * sizeof(u32); in gen12_emit_restore_scratch()
1142 gen12_emit_cmd_buf_wa(const struct intel_context *ce, u32 *cs) in gen12_emit_cmd_buf_wa() argument
1144 GEM_BUG_ON(lrc_ring_cmd_buf_cctl(ce->engine) == -1); in gen12_emit_cmd_buf_wa()
1150 *cs++ = i915_ggtt_offset(ce->state) + LRC_STATE_OFFSET + in gen12_emit_cmd_buf_wa()
1151 (lrc_ring_cmd_buf_cctl(ce->engine) + 1) * sizeof(u32); in gen12_emit_cmd_buf_wa()
1164 gen12_emit_indirect_ctx_rcs(const struct intel_context *ce, u32 *cs) in gen12_emit_indirect_ctx_rcs() argument
1166 cs = gen12_emit_timestamp_wa(ce, cs); in gen12_emit_indirect_ctx_rcs()
1167 cs = gen12_emit_cmd_buf_wa(ce, cs); in gen12_emit_indirect_ctx_rcs()
1168 cs = gen12_emit_restore_scratch(ce, cs); in gen12_emit_indirect_ctx_rcs()
1174 gen12_emit_indirect_ctx_xcs(const struct intel_context *ce, u32 *cs) in gen12_emit_indirect_ctx_xcs() argument
1176 cs = gen12_emit_timestamp_wa(ce, cs); in gen12_emit_indirect_ctx_xcs()
1177 cs = gen12_emit_restore_scratch(ce, cs); in gen12_emit_indirect_ctx_xcs()
1182 static u32 context_wa_bb_offset(const struct intel_context *ce) in context_wa_bb_offset() argument
1184 return PAGE_SIZE * ce->wa_bb_page; in context_wa_bb_offset()
1187 static u32 *context_indirect_bb(const struct intel_context *ce) in context_indirect_bb() argument
1191 GEM_BUG_ON(!ce->wa_bb_page); in context_indirect_bb()
1193 ptr = ce->lrc_reg_state; in context_indirect_bb()
1195 ptr += context_wa_bb_offset(ce); in context_indirect_bb()
1201 setup_indirect_ctx_bb(const struct intel_context *ce, in setup_indirect_ctx_bb() argument
1205 u32 * const start = context_indirect_bb(ce); in setup_indirect_ctx_bb()
1208 cs = emit(ce, start); in setup_indirect_ctx_bb()
1213 lrc_setup_indirect_ctx(ce->lrc_reg_state, engine, in setup_indirect_ctx_bb()
1214 i915_ggtt_offset(ce->state) + in setup_indirect_ctx_bb()
1215 context_wa_bb_offset(ce), in setup_indirect_ctx_bb()
1253 static u32 lrc_descriptor(const struct intel_context *ce) in lrc_descriptor() argument
1258 if (i915_vm_is_4lvl(ce->vm)) in lrc_descriptor()
1263 if (GRAPHICS_VER(ce->vm->i915) == 8) in lrc_descriptor()
1266 return i915_ggtt_offset(ce->state) | desc; in lrc_descriptor()
1269 u32 lrc_update_regs(const struct intel_context *ce, in lrc_update_regs() argument
1273 struct intel_ring *ring = ce->ring; in lrc_update_regs()
1274 u32 *regs = ce->lrc_reg_state; in lrc_update_regs()
1287 intel_sseu_make_rpcs(engine->gt, &ce->sseu); in lrc_update_regs()
1289 i915_oa_init_reg_state(ce, engine); in lrc_update_regs()
1292 if (ce->wa_bb_page) { in lrc_update_regs()
1293 u32 *(*fn)(const struct intel_context *ce, u32 *cs); in lrc_update_regs()
1296 if (ce->engine->class == RENDER_CLASS) in lrc_update_regs()
1301 setup_indirect_ctx_bb(ce, engine, fn); in lrc_update_regs()
1304 return lrc_descriptor(ce) | CTX_DESC_FORCE_RESTORE; in lrc_update_regs()
1307 void lrc_update_offsets(struct intel_context *ce, in lrc_update_offsets() argument
1310 set_offsets(ce->lrc_reg_state, reg_offsets(engine), engine, false); in lrc_update_offsets()
1313 void lrc_check_regs(const struct intel_context *ce, in lrc_check_regs() argument
1317 const struct intel_ring *ring = ce->ring; in lrc_check_regs()
1318 u32 *regs = ce->lrc_reg_state; in lrc_check_regs()
1676 static void st_update_runtime_underflow(struct intel_context *ce, s32 dt) in st_update_runtime_underflow() argument
1679 ce->runtime.num_underflow++; in st_update_runtime_underflow()
1680 ce->runtime.max_underflow = max_t(u32, ce->runtime.max_underflow, -dt); in st_update_runtime_underflow()
1684 void lrc_update_runtime(struct intel_context *ce) in lrc_update_runtime() argument
1689 if (intel_context_is_barrier(ce)) in lrc_update_runtime()
1692 old = ce->runtime.last; in lrc_update_runtime()
1693 ce->runtime.last = lrc_get_runtime(ce); in lrc_update_runtime()
1694 dt = ce->runtime.last - old; in lrc_update_runtime()
1697 CE_TRACE(ce, "runtime underflow: last=%u, new=%u, delta=%d\n", in lrc_update_runtime()
1698 old, ce->runtime.last, dt); in lrc_update_runtime()
1699 st_update_runtime_underflow(ce, dt); in lrc_update_runtime()
1703 ewma_runtime_add(&ce->runtime.avg, dt); in lrc_update_runtime()
1704 ce->runtime.total += dt; in lrc_update_runtime()