Lines Matching refs:gt
31 struct intel_gt *gt; member
39 static int hang_init(struct hang *h, struct intel_gt *gt) in hang_init() argument
45 h->gt = gt; in hang_init()
47 h->ctx = kernel_context(gt->i915, NULL); in hang_init()
53 h->hws = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_init()
59 h->obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_init()
74 i915_coherent_map_type(gt->i915, h->obj, false)); in hang_init()
119 struct intel_gt *gt = h->gt; in hang_create_request() local
129 obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_create_request()
135 vaddr = i915_gem_object_pin_map_unlocked(obj, i915_coherent_map_type(gt->i915, obj, false)); in hang_create_request()
185 if (GRAPHICS_VER(gt->i915) >= 8) { in hang_create_request()
199 } else if (GRAPHICS_VER(gt->i915) >= 6) { in hang_create_request()
212 } else if (GRAPHICS_VER(gt->i915) >= 4) { in hang_create_request()
239 intel_gt_chipset_flush(engine->gt); in hang_create_request()
248 if (GRAPHICS_VER(gt->i915) <= 5) in hang_create_request()
274 intel_gt_chipset_flush(h->gt); in hang_fini()
284 igt_flush_test(h->gt->i915); in hang_fini()
299 struct intel_gt *gt = arg; in igt_hang_sanitycheck() local
308 err = hang_init(&h, gt); in igt_hang_sanitycheck()
312 for_each_engine(engine, gt, id) { in igt_hang_sanitycheck()
330 intel_gt_chipset_flush(engine->gt); in igt_hang_sanitycheck()
335 intel_wedge_on_timeout(&w, gt, HZ / 10 /* 100ms */) in igt_hang_sanitycheck()
338 if (intel_gt_is_wedged(gt)) in igt_hang_sanitycheck()
363 struct intel_gt *gt = arg; in igt_reset_nop() local
364 struct i915_gpu_error *global = >->i915->gpu_error; in igt_reset_nop()
376 for_each_engine(engine, gt, id) { in igt_reset_nop()
404 igt_global_reset_lock(gt); in igt_reset_nop()
405 intel_gt_reset(gt, ALL_ENGINES, NULL); in igt_reset_nop()
406 igt_global_reset_unlock(gt); in igt_reset_nop()
408 if (intel_gt_is_wedged(gt)) { in igt_reset_nop()
421 err = igt_flush_test(gt->i915); in igt_reset_nop()
429 if (igt_flush_test(gt->i915)) { in igt_reset_nop()
439 struct intel_gt *gt = arg; in igt_reset_nop_engine() local
440 struct i915_gpu_error *global = >->i915->gpu_error; in igt_reset_nop_engine()
446 if (!intel_has_reset_engine(gt)) in igt_reset_nop_engine()
449 for_each_engine(engine, gt, id) { in igt_reset_nop_engine()
474 set_bit(I915_RESET_ENGINE + id, >->reset.flags); in igt_reset_nop_engine()
491 drm_info_printer(gt->i915->drm.dev); in igt_reset_nop_engine()
502 intel_gt_set_wedged(gt); in igt_reset_nop_engine()
531 clear_bit(I915_RESET_ENGINE + id, >->reset.flags); in igt_reset_nop_engine()
537 if (igt_flush_test(gt->i915)) in igt_reset_nop_engine()
559 struct intel_gt *gt = arg; in igt_reset_fail_engine() local
565 if (!intel_has_reset_engine(gt)) in igt_reset_fail_engine()
568 for_each_engine(engine, gt, id) { in igt_reset_fail_engine()
585 set_bit(I915_RESET_ENGINE + id, >->reset.flags); in igt_reset_fail_engine()
611 drm_info_printer(gt->i915->drm.dev); in igt_reset_fail_engine()
622 intel_gt_set_wedged(gt); in igt_reset_fail_engine()
661 drm_info_printer(gt->i915->drm.dev); in igt_reset_fail_engine()
682 clear_bit(I915_RESET_ENGINE + id, >->reset.flags); in igt_reset_fail_engine()
686 if (igt_flush_test(gt->i915)) in igt_reset_fail_engine()
695 static int __igt_reset_engine(struct intel_gt *gt, bool active) in __igt_reset_engine() argument
697 struct i915_gpu_error *global = >->i915->gpu_error; in __igt_reset_engine()
705 if (!intel_has_reset_engine(gt)) in __igt_reset_engine()
709 err = hang_init(&h, gt); in __igt_reset_engine()
714 for_each_engine(engine, gt, id) { in __igt_reset_engine()
737 set_bit(I915_RESET_ENGINE + id, >->reset.flags); in __igt_reset_engine()
764 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in __igt_reset_engine()
827 clear_bit(I915_RESET_ENGINE + id, >->reset.flags); in __igt_reset_engine()
835 err = igt_flush_test(gt->i915); in __igt_reset_engine()
842 if (intel_gt_is_wedged(gt)) { in __igt_reset_engine()
889 intel_gt_set_wedged(rq->engine->gt); in active_request_put()
968 static int __igt_reset_engines(struct intel_gt *gt, in __igt_reset_engines() argument
972 struct i915_gpu_error *global = >->i915->gpu_error; in __igt_reset_engines()
982 if (!intel_has_reset_engine(gt)) in __igt_reset_engines()
986 err = hang_init(&h, gt); in __igt_reset_engines()
994 for_each_engine(engine, gt, id) { in __igt_reset_engines()
1015 for_each_engine(other, gt, tmp) { in __igt_reset_engines()
1045 set_bit(I915_RESET_ENGINE + id, >->reset.flags); in __igt_reset_engines()
1071 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in __igt_reset_engines()
1115 intel_gt_set_wedged(gt); in __igt_reset_engines()
1122 drm_info_printer(gt->i915->drm.dev); in __igt_reset_engines()
1134 intel_gt_set_wedged(gt); in __igt_reset_engines()
1147 drm_info_printer(gt->i915->drm.dev); in __igt_reset_engines()
1168 clear_bit(I915_RESET_ENGINE + id, >->reset.flags); in __igt_reset_engines()
1187 for_each_engine(other, gt, tmp) { in __igt_reset_engines()
1227 err = igt_flush_test(gt->i915); in __igt_reset_engines()
1234 if (intel_gt_is_wedged(gt)) in __igt_reset_engines()
1263 struct intel_gt *gt = arg; in igt_reset_engines() local
1269 if (!(gt->i915->caps.scheduler & I915_SCHEDULER_CAP_PRIORITY)) in igt_reset_engines()
1281 static u32 fake_hangcheck(struct intel_gt *gt, intel_engine_mask_t mask) in fake_hangcheck() argument
1283 u32 count = i915_reset_count(>->i915->gpu_error); in fake_hangcheck()
1285 intel_gt_reset(gt, mask, NULL); in fake_hangcheck()
1292 struct intel_gt *gt = arg; in igt_reset_wait() local
1293 struct i915_gpu_error *global = >->i915->gpu_error; in igt_reset_wait()
1294 struct intel_engine_cs *engine = gt->engine[RCS0]; in igt_reset_wait()
1306 igt_global_reset_lock(gt); in igt_reset_wait()
1308 err = hang_init(&h, gt); in igt_reset_wait()
1325 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in igt_reset_wait()
1331 intel_gt_set_wedged(gt); in igt_reset_wait()
1337 reset_count = fake_hangcheck(gt, ALL_ENGINES); in igt_reset_wait()
1358 igt_global_reset_unlock(gt); in igt_reset_wait()
1360 if (intel_gt_is_wedged(gt)) in igt_reset_wait()
1419 static int __igt_reset_evict_vma(struct intel_gt *gt, in __igt_reset_evict_vma() argument
1424 struct intel_engine_cs *engine = gt->engine[RCS0]; in __igt_reset_evict_vma()
1433 if (!gt->ggtt->num_fences && flags & EXEC_OBJECT_NEEDS_FENCE) in __igt_reset_evict_vma()
1441 err = hang_init(&h, gt); in __igt_reset_evict_vma()
1447 obj = i915_gem_object_create_internal(gt->i915, SZ_1M); in __igt_reset_evict_vma()
1521 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in __igt_reset_evict_vma()
1527 intel_gt_set_wedged(gt); in __igt_reset_evict_vma()
1545 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in __igt_reset_evict_vma()
1550 intel_gt_set_wedged(gt); in __igt_reset_evict_vma()
1555 igt_global_reset_lock(gt); in __igt_reset_evict_vma()
1556 fake_hangcheck(gt, rq->engine->mask); in __igt_reset_evict_vma()
1557 igt_global_reset_unlock(gt); in __igt_reset_evict_vma()
1563 intel_wedge_on_timeout(&w, gt, HZ / 10 /* 100ms */) in __igt_reset_evict_vma()
1575 if (intel_gt_is_wedged(gt)) in __igt_reset_evict_vma()
1583 struct intel_gt *gt = arg; in igt_reset_evict_ggtt() local
1585 return __igt_reset_evict_vma(gt, >->ggtt->vm, in igt_reset_evict_ggtt()
1591 struct intel_gt *gt = arg; in igt_reset_evict_ppgtt() local
1596 if (INTEL_PPGTT(gt->i915) < INTEL_PPGTT_FULL) in igt_reset_evict_ppgtt()
1599 ppgtt = i915_ppgtt_create(gt, 0); in igt_reset_evict_ppgtt()
1603 err = __igt_reset_evict_vma(gt, &ppgtt->vm, in igt_reset_evict_ppgtt()
1612 struct intel_gt *gt = arg; in igt_reset_evict_fence() local
1614 return __igt_reset_evict_vma(gt, >->ggtt->vm, in igt_reset_evict_fence()
1618 static int wait_for_others(struct intel_gt *gt, in wait_for_others() argument
1624 for_each_engine(engine, gt, id) { in wait_for_others()
1637 struct intel_gt *gt = arg; in igt_reset_queue() local
1638 struct i915_gpu_error *global = >->i915->gpu_error; in igt_reset_queue()
1646 igt_global_reset_lock(gt); in igt_reset_queue()
1648 err = hang_init(&h, gt); in igt_reset_queue()
1652 for_each_engine(engine, gt, id) { in igt_reset_queue()
1706 err = wait_for_others(gt, engine); in igt_reset_queue()
1714 intel_gt_set_wedged(gt); in igt_reset_queue()
1719 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in igt_reset_queue()
1730 intel_gt_set_wedged(gt); in igt_reset_queue()
1736 reset_count = fake_hangcheck(gt, BIT(id)); in igt_reset_queue()
1772 intel_gt_chipset_flush(engine->gt); in igt_reset_queue()
1789 err = igt_flush_test(gt->i915); in igt_reset_queue()
1799 igt_global_reset_unlock(gt); in igt_reset_queue()
1801 if (intel_gt_is_wedged(gt)) in igt_reset_queue()
1809 struct intel_gt *gt = arg; in igt_handle_error() local
1810 struct i915_gpu_error *global = >->i915->gpu_error; in igt_handle_error()
1811 struct intel_engine_cs *engine = gt->engine[RCS0]; in igt_handle_error()
1819 if (!intel_has_reset_engine(gt)) in igt_handle_error()
1825 err = hang_init(&h, gt); in igt_handle_error()
1842 struct drm_printer p = drm_info_printer(gt->i915->drm.dev); in igt_handle_error()
1848 intel_gt_set_wedged(gt); in igt_handle_error()
1857 intel_gt_handle_error(gt, engine->mask, 0, NULL); in igt_handle_error()
1918 err = hang_init(&h, engine->gt); in igt_atomic_reset_engine()
1940 intel_gt_set_wedged(engine->gt); in igt_atomic_reset_engine()
1947 intel_wedge_on_timeout(&w, engine->gt, HZ / 20 /* 50ms */) in igt_atomic_reset_engine()
1949 if (intel_gt_is_wedged(engine->gt)) in igt_atomic_reset_engine()
1961 struct intel_gt *gt = arg; in igt_reset_engines_atomic() local
1967 if (!intel_has_reset_engine(gt)) in igt_reset_engines_atomic()
1970 if (intel_uc_uses_guc_submission(>->uc)) in igt_reset_engines_atomic()
1973 igt_global_reset_lock(gt); in igt_reset_engines_atomic()
1976 if (!igt_force_reset(gt)) in igt_reset_engines_atomic()
1983 for_each_engine(engine, gt, id) { in igt_reset_engines_atomic()
1992 igt_force_reset(gt); in igt_reset_engines_atomic()
1994 igt_global_reset_unlock(gt); in igt_reset_engines_atomic()
2017 struct intel_gt *gt = &i915->gt; in intel_hangcheck_live_selftests() local
2021 if (!intel_has_gpu_reset(gt)) in intel_hangcheck_live_selftests()
2024 if (intel_gt_is_wedged(gt)) in intel_hangcheck_live_selftests()
2027 wakeref = intel_runtime_pm_get(gt->uncore->rpm); in intel_hangcheck_live_selftests()
2029 err = intel_gt_live_subtests(tests, gt); in intel_hangcheck_live_selftests()
2031 intel_runtime_pm_put(gt->uncore->rpm, wakeref); in intel_hangcheck_live_selftests()