Lines Matching refs:ring

56 	struct amdgpu_ring		*ring;  member
100 static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq) in amdgpu_fence_write() argument
102 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write()
116 static u32 amdgpu_fence_read(struct amdgpu_ring *ring) in amdgpu_fence_read() argument
118 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read()
140 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **f, struct amdgpu_job *job, in amdgpu_fence_emit() argument
143 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_emit()
156 am_fence->ring = ring; in amdgpu_fence_emit()
162 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit()
169 &ring->fence_drv.lock, in amdgpu_fence_emit()
170 adev->fence_context + ring->idx, seq); in amdgpu_fence_emit()
173 &ring->fence_drv.lock, in amdgpu_fence_emit()
174 adev->fence_context + ring->idx, seq); in amdgpu_fence_emit()
177 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
180 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
217 int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s, in amdgpu_fence_emit_polling() argument
226 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling()
227 r = amdgpu_fence_wait_polling(ring, in amdgpu_fence_emit_polling()
228 seq - ring->fence_drv.num_fences_mask, in amdgpu_fence_emit_polling()
233 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
248 static void amdgpu_fence_schedule_fallback(struct amdgpu_ring *ring) in amdgpu_fence_schedule_fallback() argument
250 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback()
265 bool amdgpu_fence_process(struct amdgpu_ring *ring) in amdgpu_fence_process() argument
267 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_process()
268 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_process()
272 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_process()
273 seq = amdgpu_fence_read(ring); in amdgpu_fence_process()
277 if (del_timer(&ring->fence_drv.fallback_timer) && in amdgpu_fence_process()
278 seq != ring->fence_drv.sync_seq) in amdgpu_fence_process()
279 amdgpu_fence_schedule_fallback(ring); in amdgpu_fence_process()
319 struct amdgpu_ring *ring = from_timer(ring, t, in amdgpu_fence_fallback() local
322 if (amdgpu_fence_process(ring)) in amdgpu_fence_fallback()
323 DRM_WARN("Fence fallback timer expired on ring %s\n", ring->name); in amdgpu_fence_fallback()
334 int amdgpu_fence_wait_empty(struct amdgpu_ring *ring) in amdgpu_fence_wait_empty() argument
336 uint64_t seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_wait_empty()
343 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
367 signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring, in amdgpu_fence_wait_polling() argument
374 seq = amdgpu_fence_read(ring); in amdgpu_fence_wait_polling()
390 unsigned amdgpu_fence_count_emitted(struct amdgpu_ring *ring) in amdgpu_fence_count_emitted() argument
397 amdgpu_fence_process(ring); in amdgpu_fence_count_emitted()
399 emitted -= atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_count_emitted()
400 emitted += READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_count_emitted()
417 int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring, in amdgpu_fence_driver_start_ring() argument
421 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_driver_start_ring()
424 if (ring->funcs->type != AMDGPU_RING_TYPE_UVD) { in amdgpu_fence_driver_start_ring()
425 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; in amdgpu_fence_driver_start_ring()
426 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
430 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring()
431 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
433 amdgpu_fence_write(ring, atomic_read(&ring->fence_drv.last_seq)); in amdgpu_fence_driver_start_ring()
435 ring->fence_drv.irq_src = irq_src; in amdgpu_fence_driver_start_ring()
436 ring->fence_drv.irq_type = irq_type; in amdgpu_fence_driver_start_ring()
437 ring->fence_drv.initialized = true; in amdgpu_fence_driver_start_ring()
440 ring->name, ring->fence_drv.gpu_addr); in amdgpu_fence_driver_start_ring()
455 int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring, in amdgpu_fence_driver_init_ring() argument
459 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_driver_init_ring()
469 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring()
470 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
471 ring->fence_drv.sync_seq = 0; in amdgpu_fence_driver_init_ring()
472 atomic_set(&ring->fence_drv.last_seq, 0); in amdgpu_fence_driver_init_ring()
473 ring->fence_drv.initialized = false; in amdgpu_fence_driver_init_ring()
475 timer_setup(&ring->fence_drv.fallback_timer, amdgpu_fence_fallback, 0); in amdgpu_fence_driver_init_ring()
477 ring->fence_drv.num_fences_mask = num_hw_submission * 2 - 1; in amdgpu_fence_driver_init_ring()
478 spin_lock_init(&ring->fence_drv.lock); in amdgpu_fence_driver_init_ring()
479 ring->fence_drv.fences = kcalloc(num_hw_submission * 2, sizeof(void *), in amdgpu_fence_driver_init_ring()
481 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
485 if (ring->no_scheduler) in amdgpu_fence_driver_init_ring()
488 switch (ring->funcs->type) { in amdgpu_fence_driver_init_ring()
503 r = drm_sched_init(&ring->sched, &amdgpu_sched_ops, in amdgpu_fence_driver_init_ring()
505 timeout, NULL, sched_score, ring->name); in amdgpu_fence_driver_init_ring()
508 ring->name); in amdgpu_fence_driver_init_ring()
545 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_hw_fini() local
547 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_hw_fini()
552 r = amdgpu_fence_wait_empty(ring); in amdgpu_fence_driver_hw_fini()
557 amdgpu_fence_driver_force_completion(ring); in amdgpu_fence_driver_hw_fini()
559 if (ring->fence_drv.irq_src) in amdgpu_fence_driver_hw_fini()
560 amdgpu_irq_put(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_hw_fini()
561 ring->fence_drv.irq_type); in amdgpu_fence_driver_hw_fini()
563 del_timer_sync(&ring->fence_drv.fallback_timer); in amdgpu_fence_driver_hw_fini()
572 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_sw_fini() local
574 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_sw_fini()
577 if (!ring->no_scheduler) in amdgpu_fence_driver_sw_fini()
578 drm_sched_fini(&ring->sched); in amdgpu_fence_driver_sw_fini()
580 for (j = 0; j <= ring->fence_drv.num_fences_mask; ++j) in amdgpu_fence_driver_sw_fini()
581 dma_fence_put(ring->fence_drv.fences[j]); in amdgpu_fence_driver_sw_fini()
582 kfree(ring->fence_drv.fences); in amdgpu_fence_driver_sw_fini()
583 ring->fence_drv.fences = NULL; in amdgpu_fence_driver_sw_fini()
584 ring->fence_drv.initialized = false; in amdgpu_fence_driver_sw_fini()
605 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_hw_init() local
606 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_hw_init()
610 if (ring->fence_drv.irq_src) in amdgpu_fence_driver_hw_init()
611 amdgpu_irq_get(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_hw_init()
612 ring->fence_drv.irq_type); in amdgpu_fence_driver_hw_init()
622 void amdgpu_fence_driver_clear_job_fences(struct amdgpu_ring *ring) in amdgpu_fence_driver_clear_job_fences() argument
627 for (i = 0; i <= ring->fence_drv.num_fences_mask; i++) { in amdgpu_fence_driver_clear_job_fences()
628 ptr = &ring->fence_drv.fences[i]; in amdgpu_fence_driver_clear_job_fences()
641 void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring) in amdgpu_fence_driver_force_completion() argument
643 amdgpu_fence_write(ring, ring->fence_drv.sync_seq); in amdgpu_fence_driver_force_completion()
644 amdgpu_fence_process(ring); in amdgpu_fence_driver_force_completion()
658 return (const char *)to_amdgpu_fence(f)->ring->name; in amdgpu_fence_get_timeline_name()
678 if (!timer_pending(&to_amdgpu_fence(f)->ring->fence_drv.fallback_timer)) in amdgpu_fence_enable_signaling()
679 amdgpu_fence_schedule_fallback(to_amdgpu_fence(f)->ring); in amdgpu_fence_enable_signaling()
781 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_debugfs_fence_info_show() local
782 if (!ring || !ring->fence_drv.initialized) in amdgpu_debugfs_fence_info_show()
785 amdgpu_fence_process(ring); in amdgpu_debugfs_fence_info_show()
787 seq_printf(m, "--- ring %d (%s) ---\n", i, ring->name); in amdgpu_debugfs_fence_info_show()
789 atomic_read(&ring->fence_drv.last_seq)); in amdgpu_debugfs_fence_info_show()
791 ring->fence_drv.sync_seq); in amdgpu_debugfs_fence_info_show()
793 if (ring->funcs->type == AMDGPU_RING_TYPE_GFX || in amdgpu_debugfs_fence_info_show()
794 ring->funcs->type == AMDGPU_RING_TYPE_SDMA) { in amdgpu_debugfs_fence_info_show()
796 le32_to_cpu(*ring->trail_fence_cpu_addr)); in amdgpu_debugfs_fence_info_show()
798 ring->trail_seq); in amdgpu_debugfs_fence_info_show()
801 if (ring->funcs->type != AMDGPU_RING_TYPE_GFX) in amdgpu_debugfs_fence_info_show()
806 le32_to_cpu(*(ring->fence_drv.cpu_addr + 2))); in amdgpu_debugfs_fence_info_show()
809 le32_to_cpu(*(ring->fence_drv.cpu_addr + 4))); in amdgpu_debugfs_fence_info_show()
812 le32_to_cpu(*(ring->fence_drv.cpu_addr + 6))); in amdgpu_debugfs_fence_info_show()