/linux/drivers/gpu/drm/i915/gt/ |
A D | intel_gt_pm.c | 33 intel_gt_pm_get(gt); in user_forcewake() 40 intel_gt_pm_put(gt); in user_forcewake() 58 gt->stats.total = in runtime_end() 67 struct intel_gt *gt = container_of(wf, typeof(*gt), wakeref); in __gt_unpark() local 70 GT_TRACE(gt, "\n"); in __gt_unpark() 91 runtime_begin(gt); in __gt_unpark() 98 struct intel_gt *gt = container_of(wf, typeof(*gt), wakeref); in __gt_park() local 102 GT_TRACE(gt, "\n"); in __gt_park() 104 runtime_end(gt); in __gt_park() 129 intel_wakeref_init(>->wakeref, gt->uncore->rpm, &wf_ops); in intel_gt_pm_init_early() [all …]
|
A D | intel_gt.c | 28 gt->i915 = i915; in intel_gt_init_early() 29 gt->uncore = &i915->uncore; in intel_gt_init_early() 40 intel_gt_init_reset(gt); in intel_gt_init_early() 85 gt->ggtt = ggtt; in intel_gt_init_hw_early() 134 gt->info.mslice_mask = in intel_gt_init_mmio() 148 gt->info.l3bank_mask = in intel_gt_init_mmio() 220 init_unused_rings(gt); in intel_gt_init_hw() 235 intel_mocs_init(gt); in intel_gt_init_hw() 682 gt->vm = kernel_vm(gt); in intel_gt_init() 683 if (!gt->vm) { in intel_gt_init() [all …]
|
A D | intel_gt_irq.c | 191 if (HAS_ENGINE(gt, VCS4) || HAS_ENGINE(gt, VCS5)) in gen11_gt_irq_reset() 193 if (HAS_ENGINE(gt, VCS6) || HAS_ENGINE(gt, VCS7)) in gen11_gt_irq_reset() 196 if (HAS_ENGINE(gt, VECS2) || HAS_ENGINE(gt, VECS3)) in gen11_gt_irq_reset() 234 if (HAS_ENGINE(gt, VCS4) || HAS_ENGINE(gt, VCS5)) in gen11_gt_irq_postinstall() 236 if (HAS_ENGINE(gt, VCS6) || HAS_ENGINE(gt, VCS7)) in gen11_gt_irq_postinstall() 239 if (HAS_ENGINE(gt, VECS2) || HAS_ENGINE(gt, VECS3)) in gen11_gt_irq_postinstall() 246 gt->pm_imr = ~gt->pm_ier; in gen11_gt_irq_postinstall() 272 gen5_gt_disable_irq(gt, GT_PARITY_ERROR(gt->i915)); in gen7_parity_error_irq_handler() 380 gt->pm_imr = ~gt->pm_ier; in gen8_gt_irq_postinstall() 401 intel_uncore_write(gt->uncore, GTIMR, gt->gt_imr); in gen5_gt_update_irq() [all …]
|
A D | intel_reset.c | 289 GT_TRACE(gt, in gen6_hw_domain_reset() 653 if (is_mock_gt(gt)) in intel_get_gpu_reset() 797 revoke_mmaps(gt); in gt_revoke() 1086 GT_TRACE(gt, "flags=%lx\n", gt->reset.flags); in intel_gt_reset() 1095 gt_revoke(gt); in intel_gt_reset() 1147 ret = resume(gt); in intel_gt_reset() 1183 struct intel_gt *gt = engine->gt; in __intel_engine_reset_bh() local 1340 intel_has_reset_engine(gt) && !intel_gt_is_wedged(gt)) { in intel_gt_handle_error() 1472 i915_gem_shrinker_taints_mutex(gt->i915, >->reset.mutex); in intel_gt_init_reset() 1498 w->gt = gt; in __intel_init_wedge() [all …]
|
A D | intel_gt_pm.h | 16 return intel_wakeref_is_active(>->wakeref); in intel_gt_pm_is_awake() 21 intel_wakeref_get(>->wakeref); in intel_gt_pm_get() 26 __intel_wakeref_get(>->wakeref); in __intel_gt_pm_get() 36 intel_wakeref_might_get(>->wakeref); in intel_gt_pm_might_get() 41 intel_wakeref_put(>->wakeref); in intel_gt_pm_put() 46 intel_wakeref_put_async(>->wakeref); in intel_gt_pm_put_async() 51 intel_wakeref_might_put(>->wakeref); in intel_gt_pm_might_put() 54 #define with_intel_gt_pm(gt, tmp) \ argument 55 for (tmp = 1, intel_gt_pm_get(gt); tmp; \ 56 intel_gt_pm_put(gt), tmp = 0) [all …]
|
A D | selftest_reset.c | 51 igt_global_reset_lock(gt); in __igt_reset_stolen() 245 intel_gt_set_wedged(gt); in igt_wedged_reset() 264 intel_gt_pm_get(gt); in igt_atomic_reset() 268 if (!igt_force_reset(gt)) in igt_atomic_reset() 282 reset_finish(gt, awake); in igt_atomic_reset() 291 igt_force_reset(gt); in igt_atomic_reset() 295 intel_gt_pm_put(gt); in igt_atomic_reset() 316 intel_gt_pm_get(gt); in igt_atomic_engine_reset() 360 igt_force_reset(gt); in igt_atomic_engine_reset() 364 intel_gt_pm_put(gt); in igt_atomic_engine_reset() [all …]
|
A D | intel_gt_pm_irq.c | 16 u32 mask = gt->pm_imr; in write_pm_imr() 39 lockdep_assert_held(>->irq_lock); in gen6_gt_pm_update_irq() 41 new_val = gt->pm_imr; in gen6_gt_pm_update_irq() 45 if (new_val != gt->pm_imr) { in gen6_gt_pm_update_irq() 46 gt->pm_imr = new_val; in gen6_gt_pm_update_irq() 47 write_pm_imr(gt); in gen6_gt_pm_update_irq() 77 u32 mask = gt->pm_ier; in write_pm_ier() 96 gt->pm_ier |= enable_mask; in gen6_gt_pm_enable_irq() 97 write_pm_ier(gt); in gen6_gt_pm_enable_irq() 105 gt->pm_ier &= ~disable_mask; in gen6_gt_pm_disable_irq() [all …]
|
A D | intel_gt.h | 16 #define GT_TRACE(gt, fmt, ...) do { \ argument 17 const struct intel_gt *gt__ __maybe_unused = (gt); \ 39 int intel_gt_probe_lmem(struct intel_gt *gt); 40 int intel_gt_init_mmio(struct intel_gt *gt); 42 int intel_gt_init(struct intel_gt *gt); 46 void intel_gt_driver_remove(struct intel_gt *gt); 47 void intel_gt_driver_release(struct intel_gt *gt); 58 void intel_gt_chipset_flush(struct intel_gt *gt); 63 return i915_ggtt_offset(gt->scratch) + field; in intel_gt_scratch_offset() 75 !test_bit(I915_WEDGED, >->reset.flags)); in intel_gt_is_wedged() [all …]
|
A D | selftest_gt_pm.c | 67 struct intel_gt *gt = arg; in live_gt_clocks() local 80 if (GRAPHICS_VER(gt->i915) == 5) in live_gt_clocks() 89 if (GRAPHICS_VER(gt->i915) == 4) in live_gt_clocks() 99 intel_gt_pm_get(gt); in live_gt_clocks() 136 intel_gt_pm_put(gt); in live_gt_clocks() 143 struct intel_gt *gt = arg; in live_gt_resume() local 149 intel_gt_suspend_prepare(gt); in live_gt_resume() 150 intel_gt_suspend_late(gt); in live_gt_resume() 152 if (gt->rc6.enabled) { in live_gt_resume() 159 err = intel_gt_resume(gt); in live_gt_resume() [all …]
|
A D | selftest_hangcheck.c | 31 struct intel_gt *gt; member 45 h->gt = gt; in hang_init() 119 struct intel_gt *gt = h->gt; in hang_create_request() local 299 struct intel_gt *gt = arg; in igt_hang_sanitycheck() local 308 err = hang_init(&h, gt); in igt_hang_sanitycheck() 363 struct intel_gt *gt = arg; in igt_reset_nop() local 1308 err = hang_init(&h, gt); in igt_reset_wait() 1585 return __igt_reset_evict_vma(gt, >->ggtt->vm, in igt_reset_evict_ggtt() 1614 return __igt_reset_evict_vma(gt, >->ggtt->vm, in igt_reset_evict_fence() 1992 igt_force_reset(gt); in igt_reset_engines_atomic() [all …]
|
A D | intel_reset.h | 21 void intel_gt_init_reset(struct intel_gt *gt); 22 void intel_gt_fini_reset(struct intel_gt *gt); 25 void intel_gt_handle_error(struct intel_gt *gt, 31 void intel_gt_reset(struct intel_gt *gt, 44 void intel_gt_set_wedged(struct intel_gt *gt); 45 bool intel_gt_unset_wedged(struct intel_gt *gt); 46 int intel_gt_terminally_wedged(struct intel_gt *gt); 58 int intel_reset_guc(struct intel_gt *gt); 62 struct intel_gt *gt; member 67 struct intel_gt *gt, [all …]
|
A D | intel_gt_clock_utils.c | 158 gt->clock_frequency = read_clock_frequency(gt->uncore); in intel_gt_init_clock_frequency() 159 if (gt->clock_frequency) in intel_gt_init_clock_frequency() 160 gt->clock_period_ns = intel_gt_clock_interval_to_ns(gt, 1); in intel_gt_init_clock_frequency() 162 GT_TRACE(gt, in intel_gt_init_clock_frequency() 164 gt->clock_frequency / 1000, in intel_gt_init_clock_frequency() 165 gt->clock_period_ns, in intel_gt_init_clock_frequency() 173 if (gt->clock_frequency != read_clock_frequency(gt->uncore)) { in intel_gt_check_clock_frequency() 174 dev_err(gt->i915->drm.dev, in intel_gt_check_clock_frequency() 176 gt->clock_frequency, in intel_gt_check_clock_frequency() 177 read_clock_frequency(gt->uncore)); in intel_gt_check_clock_frequency() [all …]
|
A D | intel_gt_debugfs.c | 18 int ret = intel_gt_terminally_wedged(gt); in intel_gt_debugfs_reset_show() 35 wait_event(gt->reset.queue, in intel_gt_debugfs_reset_store() 38 intel_gt_handle_error(gt, val, I915_ERROR_CAPTURE, in intel_gt_debugfs_reset_store() 69 void intel_gt_debugfs_register(struct intel_gt *gt) in intel_gt_debugfs_register() argument 73 if (!gt->i915->drm.primary->debugfs_root) in intel_gt_debugfs_register() 80 gt_debugfs_register(gt, root); in intel_gt_debugfs_register() 82 intel_gt_engines_debugfs_register(gt, root); in intel_gt_debugfs_register() 83 intel_gt_pm_debugfs_register(gt, root); in intel_gt_debugfs_register() 84 intel_sseu_debugfs_register(gt, root); in intel_gt_debugfs_register() 86 intel_uc_debugfs_register(>->uc, root); in intel_gt_debugfs_register() [all …]
|
A D | selftest_slpc.c | 42 struct intel_gt *gt = &i915->gt; in live_slpc_clamp_min() local 54 if (igt_spinner_init(&spin, gt)) in live_slpc_clamp_min() 72 intel_gt_pm_wait_for_idle(gt); in live_slpc_clamp_min() 73 intel_gt_pm_get(gt); in live_slpc_clamp_min() 156 if (igt_flush_test(gt->i915)) in live_slpc_clamp_min() 159 intel_gt_pm_put(gt); in live_slpc_clamp_min() 169 struct intel_gt *gt = &i915->gt; in live_slpc_clamp_max() local 178 slpc = >->uc.guc.slpc; in live_slpc_clamp_max() 179 rps = >->rps; in live_slpc_clamp_max() 203 intel_gt_pm_get(gt); in live_slpc_clamp_max() [all …]
|
A D | intel_gt_pm_debugfs.c | 25 atomic_inc(>->user_wakeref); in intel_gt_pm_debugfs_forcewake_user_open() 26 intel_gt_pm_get(gt); in intel_gt_pm_debugfs_forcewake_user_open() 27 if (GRAPHICS_VER(gt->i915) >= 6) in intel_gt_pm_debugfs_forcewake_user_open() 35 if (GRAPHICS_VER(gt->i915) >= 6) in intel_gt_pm_debugfs_forcewake_user_release() 37 intel_gt_pm_put(gt); in intel_gt_pm_debugfs_forcewake_user_release() 38 atomic_dec(>->user_wakeref); in intel_gt_pm_debugfs_forcewake_user_release() 579 struct intel_gt *gt = data; in llc_eval() local 581 return HAS_LLC(gt->i915); in llc_eval() 609 yesno(gt->awake), in rps_boost_show() 658 struct intel_gt *gt = data; in rps_eval() local [all …]
|
A D | selftest_rps.c | 217 struct intel_gt *gt = arg; in live_rps_clock_interval() local 235 intel_gt_pm_get(gt); in live_rps_clock_interval() 353 intel_rps_enable(>->rps); in live_rps_clock_interval() 354 intel_gt_pm_put(gt); in live_rps_clock_interval() 369 struct intel_gt *gt = arg; in live_rps_control() local 397 intel_gt_pm_get(gt); in live_rps_control() 487 intel_gt_pm_put(gt); in live_rps_control() 601 struct intel_gt *gt = arg; in live_rps_frequency_cs() local 742 struct intel_gt *gt = arg; in live_rps_frequency_srm() local 1037 intel_gt_pm_get(gt); in live_rps_interrupt() [all …]
|
A D | intel_gt_requests.c | 43 if (!intel_gt_pm_is_awake(gt)) in flush_submission() 46 for_each_engine(engine, gt, id) { in flush_submission() 207 struct intel_gt *gt = in retire_work_handler() local 212 intel_gt_retire_requests(gt); in retire_work_handler() 215 void intel_gt_init_requests(struct intel_gt *gt) in intel_gt_init_requests() argument 220 void intel_gt_park_requests(struct intel_gt *gt) in intel_gt_park_requests() argument 222 cancel_delayed_work(>->requests.retire_work); in intel_gt_park_requests() 231 void intel_gt_fini_requests(struct intel_gt *gt) in intel_gt_fini_requests() argument 236 flush_work(>->watchdog.work); in intel_gt_fini_requests() 241 struct intel_gt *gt = in intel_gt_watchdog_work() local [all …]
|
/linux/drivers/gpu/drm/i915/ |
A D | Makefile | 80 gt-y += \ 83 gt/gen6_ppgtt.o \ 86 gt/gen8_ppgtt.o \ 97 gt/intel_gt.o \ 107 gt/intel_gtt.o \ 108 gt/intel_llc.o \ 109 gt/intel_lrc.o \ 113 gt/intel_rc6.o \ 119 gt/intel_rps.o \ 127 gt-y += \ [all …]
|
A D | i915_gpu_error.c | 736 gt->fault_data1, gt->fault_data0); in err_print_gt() 775 if (gt->uc) in err_print_gt() 834 if (error->gt) in __err_print_to_sgl() 984 if (gt->uc) in cleanup_gt() 987 kfree(gt); in cleanup_gt() 996 struct intel_gt_coredump *gt = error->gt; in __i915_gpu_coredump_free() local 998 error->gt = gt->next; in __i915_gpu_coredump_free() 1719 for (gt = error->gt; gt; gt = gt->next) { in error_msg() 1806 gc->_gt = gt; in intel_gt_coredump_alloc() 1857 error->gt = intel_gt_coredump_alloc(gt, ALLOW_FAIL); in i915_gpu_coredump() [all …]
|
/linux/drivers/gpu/drm/gma500/ |
A D | gtt.c | 156 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages() 173 drm_gem_put_pages(>->gem, gt->pages, true, false); in psb_gtt_detach_pages() 196 if (gt->in_gart == 0 && gt->stolen == 0) { in psb_gtt_pin() 206 gt->pages, (gpu_base + gt->offset), in psb_gtt_pin() 237 if (gt->in_gart == 0 && gt->stolen == 0) { in psb_gtt_unpin() 239 (gpu_base + gt->offset), gt->npage, 0, 0); in psb_gtt_unpin() 296 gt->offset = gt->resource.start - r->start; in psb_gtt_alloc_range() 297 return gt; in psb_gtt_alloc_range() 299 kfree(gt); in psb_gtt_alloc_range() 318 WARN_ON(gt->in_gart && !gt->stolen); in psb_gtt_free_range() [all …]
|
/linux/drivers/gpu/drm/i915/selftests/ |
A D | igt_reset.c | 14 void igt_global_reset_lock(struct intel_gt *gt) in igt_global_reset_lock() argument 22 wait_event(gt->reset.queue, in igt_global_reset_lock() 25 for_each_engine(engine, gt, id) { in igt_global_reset_lock() 27 >->reset.flags)) in igt_global_reset_lock() 33 void igt_global_reset_unlock(struct intel_gt *gt) in igt_global_reset_unlock() argument 38 for_each_engine(engine, gt, id) in igt_global_reset_unlock() 42 wake_up_all(>->reset.queue); in igt_global_reset_unlock() 45 bool igt_force_reset(struct intel_gt *gt) in igt_force_reset() argument 47 intel_gt_set_wedged(gt); in igt_force_reset() 48 intel_gt_reset(gt, 0, NULL); in igt_force_reset() [all …]
|
/linux/drivers/gpu/drm/i915/pxp/ |
A D | intel_pxp_irq.c | 22 struct intel_gt *gt = pxp_to_gt(pxp); in intel_pxp_irq_handler() local 27 lockdep_assert_held(>->irq_lock); in intel_pxp_irq_handler() 57 spin_lock_irq(>->irq_lock); in pxp_irq_reset() 59 spin_unlock_irq(>->irq_lock); in pxp_irq_reset() 66 spin_lock_irq(>->irq_lock); in intel_pxp_irq_enable() 74 spin_unlock_irq(>->irq_lock); in intel_pxp_irq_enable() 90 spin_lock_irq(>->irq_lock); in intel_pxp_irq_disable() 93 __pxp_set_interrupts(gt, 0); in intel_pxp_irq_disable() 95 spin_unlock_irq(>->irq_lock); in intel_pxp_irq_disable() 96 intel_synchronize_irq(gt->i915); in intel_pxp_irq_disable() [all …]
|
/linux/drivers/media/radio/ |
A D | radio-gemtek.c | 152 struct radio_isa_card *isa = >->isa; in gemtek_bu2614_transmit() 155 mute = gt->muted ? GEMTEK_MT : 0x00; in gemtek_bu2614_transmit() 182 struct gemtek *gt = kzalloc(sizeof(*gt), GFP_KERNEL); in gemtek_alloc() local 184 if (gt) in gemtek_alloc() 185 gt->muted = true; in gemtek_alloc() 186 return gt ? >->isa : NULL; in gemtek_alloc() 196 if (hardmute && gt->muted) in gemtek_s_frequency() 199 gemtek_bu2614_set(gt, BU2614_PORT, 0); in gemtek_s_frequency() 207 gemtek_bu2614_transmit(gt); in gemtek_s_frequency() 219 gt->muted = mute; in gemtek_s_mute_volume() [all …]
|
/linux/drivers/gpu/drm/i915/gt/uc/ |
A D | intel_guc.c | 82 spin_lock_irq(>->irq_lock); in gen9_reset_guc_interrupts() 83 gen6_gt_pm_reset_iir(gt, gt->pm_guc_events); in gen9_reset_guc_interrupts() 93 spin_lock_irq(>->irq_lock); in gen9_enable_guc_interrupts() 95 gt->pm_guc_events); in gen9_enable_guc_interrupts() 96 gen6_gt_pm_enable_irq(gt, gt->pm_guc_events); in gen9_enable_guc_interrupts() 106 spin_lock_irq(>->irq_lock); in gen9_disable_guc_interrupts() 108 gen6_gt_pm_disable_irq(gt, gt->pm_guc_events); in gen9_disable_guc_interrupts() 120 spin_lock_irq(>->irq_lock); in gen11_reset_guc_interrupts() 130 spin_lock_irq(>->irq_lock); in gen11_enable_guc_interrupts() 143 spin_lock_irq(>->irq_lock); in gen11_disable_guc_interrupts() [all …]
|
/linux/arch/riscv/kvm/ |
A D | vcpu_timer.c | 20 return get_cycles64() + gt->time_delta; in kvm_riscv_current_cycles() 24 struct kvm_guest_timer *gt, in kvm_riscv_delta_cycles2ns() argument 31 cycles_now = kvm_riscv_current_cycles(gt); in kvm_riscv_delta_cycles2ns() 36 delta_ns = (cycles_delta * gt->nsec_mult) >> gt->nsec_shift; in kvm_riscv_delta_cycles2ns() 47 struct kvm_guest_timer *gt = &vcpu->kvm->arch.timer; in kvm_riscv_vcpu_hrtimer_expired() local 112 reg_val = kvm_riscv_current_cycles(gt); in kvm_riscv_vcpu_get_reg_timer() 156 gt->time_delta = reg_val - get_cycles64(); in kvm_riscv_vcpu_set_reg_timer() 210 csr_write(CSR_HTIMEDELTA, gt->time_delta); in kvm_riscv_vcpu_timer_restore() 219 struct kvm_guest_timer *gt = &kvm->arch.timer; in kvm_riscv_guest_timer_init() local 221 riscv_cs_get_mult_shift(>->nsec_mult, >->nsec_shift); in kvm_riscv_guest_timer_init() [all …]
|