Lines Matching refs:cs
79 u32 *cs; in emit_semaphore_signal() local
85 cs = intel_ring_begin(rq, 4); in emit_semaphore_signal()
86 if (IS_ERR(cs)) { in emit_semaphore_signal()
88 return PTR_ERR(cs); in emit_semaphore_signal()
91 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT; in emit_semaphore_signal()
92 *cs++ = offset; in emit_semaphore_signal()
93 *cs++ = 0; in emit_semaphore_signal()
94 *cs++ = 1; in emit_semaphore_signal()
96 intel_ring_advance(rq, cs); in emit_semaphore_signal()
370 u32 *cs; in __live_lrc_state() local
392 cs = intel_ring_begin(rq, 4 * MAX_IDX); in __live_lrc_state()
393 if (IS_ERR(cs)) { in __live_lrc_state()
394 err = PTR_ERR(cs); in __live_lrc_state()
399 *cs++ = MI_STORE_REGISTER_MEM_GEN8 | MI_USE_GGTT; in __live_lrc_state()
400 *cs++ = i915_mmio_reg_offset(RING_START(engine->mmio_base)); in __live_lrc_state()
401 *cs++ = i915_ggtt_offset(scratch) + RING_START_IDX * sizeof(u32); in __live_lrc_state()
402 *cs++ = 0; in __live_lrc_state()
406 *cs++ = MI_STORE_REGISTER_MEM_GEN8 | MI_USE_GGTT; in __live_lrc_state()
407 *cs++ = i915_mmio_reg_offset(RING_TAIL(engine->mmio_base)); in __live_lrc_state()
408 *cs++ = i915_ggtt_offset(scratch) + RING_TAIL_IDX * sizeof(u32); in __live_lrc_state()
409 *cs++ = 0; in __live_lrc_state()
428 cs = i915_gem_object_pin_map(scratch->obj, I915_MAP_WB); in __live_lrc_state()
429 if (IS_ERR(cs)) { in __live_lrc_state()
430 err = PTR_ERR(cs); in __live_lrc_state()
435 if (cs[n] != expected[n]) { in __live_lrc_state()
437 engine->name, n, cs[n], expected[n]); in __live_lrc_state()
493 u32 *cs; in gpr_make_dirty() local
500 cs = intel_ring_begin(rq, 2 * NUM_GPR_DW + 2); in gpr_make_dirty()
501 if (IS_ERR(cs)) { in gpr_make_dirty()
503 return PTR_ERR(cs); in gpr_make_dirty()
506 *cs++ = MI_LOAD_REGISTER_IMM(NUM_GPR_DW); in gpr_make_dirty()
508 *cs++ = CS_GPR(ce->engine, n); in gpr_make_dirty()
509 *cs++ = STACK_MAGIC; in gpr_make_dirty()
511 *cs++ = MI_NOOP; in gpr_make_dirty()
513 intel_ring_advance(rq, cs); in gpr_make_dirty()
528 u32 *cs; in __gpr_read() local
536 cs = intel_ring_begin(rq, 6 + 4 * NUM_GPR_DW); in __gpr_read()
537 if (IS_ERR(cs)) { in __gpr_read()
539 return ERR_CAST(cs); in __gpr_read()
542 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in __gpr_read()
543 *cs++ = MI_NOOP; in __gpr_read()
545 *cs++ = MI_SEMAPHORE_WAIT | in __gpr_read()
549 *cs++ = 0; in __gpr_read()
550 *cs++ = offset; in __gpr_read()
551 *cs++ = 0; in __gpr_read()
554 *cs++ = MI_STORE_REGISTER_MEM_GEN8 | MI_USE_GGTT; in __gpr_read()
555 *cs++ = CS_GPR(ce->engine, n); in __gpr_read()
556 *cs++ = i915_ggtt_offset(scratch) + n * sizeof(u32); in __gpr_read()
557 *cs++ = 0; in __gpr_read()
583 u32 *cs; in __live_lrc_gpr() local
630 cs = i915_gem_object_pin_map_unlocked(scratch->obj, I915_MAP_WB); in __live_lrc_gpr()
631 if (IS_ERR(cs)) { in __live_lrc_gpr()
632 err = PTR_ERR(cs); in __live_lrc_gpr()
637 if (cs[n]) { in __live_lrc_gpr()
641 cs[n]); in __live_lrc_gpr()
705 u32 *cs; in create_timestamp() local
712 cs = intel_ring_begin(rq, 10); in create_timestamp()
713 if (IS_ERR(cs)) { in create_timestamp()
714 err = PTR_ERR(cs); in create_timestamp()
718 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in create_timestamp()
719 *cs++ = MI_NOOP; in create_timestamp()
721 *cs++ = MI_SEMAPHORE_WAIT | in create_timestamp()
725 *cs++ = 0; in create_timestamp()
726 *cs++ = offset; in create_timestamp()
727 *cs++ = 0; in create_timestamp()
729 *cs++ = MI_STORE_REGISTER_MEM_GEN8 | MI_USE_GGTT; in create_timestamp()
730 *cs++ = i915_mmio_reg_offset(RING_CTX_TIMESTAMP(rq->engine->mmio_base)); in create_timestamp()
731 *cs++ = offset + idx * sizeof(u32); in create_timestamp()
732 *cs++ = 0; in create_timestamp()
734 intel_ring_advance(rq, cs); in create_timestamp()
916 u32 dw, x, *cs, *hw; in store_context() local
923 cs = i915_gem_object_pin_map_unlocked(batch->obj, I915_MAP_WC); in store_context()
924 if (IS_ERR(cs)) { in store_context()
926 return ERR_CAST(cs); in store_context()
956 *cs++ = MI_STORE_REGISTER_MEM_GEN8; in store_context()
957 *cs++ = hw[dw]; in store_context()
958 *cs++ = lower_32_bits(scratch->node.start + x); in store_context()
959 *cs++ = upper_32_bits(scratch->node.start + x); in store_context()
967 *cs++ = MI_BATCH_BUFFER_END; in store_context()
1000 u32 *cs; in record_registers() local
1033 cs = intel_ring_begin(rq, 14); in record_registers()
1034 if (IS_ERR(cs)) { in record_registers()
1035 err = PTR_ERR(cs); in record_registers()
1039 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in record_registers()
1040 *cs++ = MI_BATCH_BUFFER_START_GEN8 | BIT(8); in record_registers()
1041 *cs++ = lower_32_bits(b_before->node.start); in record_registers()
1042 *cs++ = upper_32_bits(b_before->node.start); in record_registers()
1044 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in record_registers()
1045 *cs++ = MI_SEMAPHORE_WAIT | in record_registers()
1049 *cs++ = 0; in record_registers()
1050 *cs++ = i915_ggtt_offset(ce->engine->status_page.vma) + in record_registers()
1052 *cs++ = 0; in record_registers()
1053 *cs++ = MI_NOOP; in record_registers()
1055 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in record_registers()
1056 *cs++ = MI_BATCH_BUFFER_START_GEN8 | BIT(8); in record_registers()
1057 *cs++ = lower_32_bits(b_after->node.start); in record_registers()
1058 *cs++ = upper_32_bits(b_after->node.start); in record_registers()
1060 intel_ring_advance(rq, cs); in record_registers()
1080 u32 dw, *cs, *hw; in load_context() local
1087 cs = i915_gem_object_pin_map_unlocked(batch->obj, I915_MAP_WC); in load_context()
1088 if (IS_ERR(cs)) { in load_context()
1090 return ERR_CAST(cs); in load_context()
1118 *cs++ = MI_LOAD_REGISTER_IMM(len); in load_context()
1120 *cs++ = hw[dw]; in load_context()
1121 *cs++ = poison; in load_context()
1127 *cs++ = MI_BATCH_BUFFER_END; in load_context()
1141 u32 *cs; in poison_registers() local
1158 cs = intel_ring_begin(rq, 8); in poison_registers()
1159 if (IS_ERR(cs)) { in poison_registers()
1160 err = PTR_ERR(cs); in poison_registers()
1164 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in poison_registers()
1165 *cs++ = MI_BATCH_BUFFER_START_GEN8 | BIT(8); in poison_registers()
1166 *cs++ = lower_32_bits(batch->node.start); in poison_registers()
1167 *cs++ = upper_32_bits(batch->node.start); in poison_registers()
1169 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT; in poison_registers()
1170 *cs++ = i915_ggtt_offset(ce->engine->status_page.vma) + in poison_registers()
1172 *cs++ = 0; in poison_registers()
1173 *cs++ = 1; in poison_registers()
1175 intel_ring_advance(rq, cs); in poison_registers()
1477 emit_indirect_ctx_bb_canary(const struct intel_context *ce, u32 *cs) in emit_indirect_ctx_bb_canary() argument
1479 *cs++ = MI_STORE_REGISTER_MEM_GEN8 | in emit_indirect_ctx_bb_canary()
1482 *cs++ = i915_mmio_reg_offset(RING_START(0)); in emit_indirect_ctx_bb_canary()
1483 *cs++ = i915_ggtt_offset(ce->state) + in emit_indirect_ctx_bb_canary()
1486 *cs++ = 0; in emit_indirect_ctx_bb_canary()
1488 return cs; in emit_indirect_ctx_bb_canary()
1494 u32 *cs = context_indirect_bb(ce); in indirect_ctx_bb_setup() local
1496 cs[CTX_BB_CANARY_INDEX] = 0xdeadf00d; in indirect_ctx_bb_setup()