Lines Matching refs:id_mgr

202 	struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub];  in amdgpu_vmid_grab_idle()  local
210 fences = kmalloc_array(id_mgr->num_ids, sizeof(void *), GFP_KERNEL); in amdgpu_vmid_grab_idle()
216 list_for_each_entry((*idle), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_idle()
228 if (&(*idle)->list == &id_mgr->ids_lru) { in amdgpu_vmid_grab_idle()
347 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_used() local
355 list_for_each_entry_reverse((*id), &id_mgr->ids_lru, list) { in amdgpu_vmid_grab_used()
415 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab() local
420 mutex_lock(&id_mgr->lock); in amdgpu_vmid_grab()
450 list_move_tail(&id->list, &id_mgr->ids_lru); in amdgpu_vmid_grab()
460 job->vmid = id - id_mgr->ids; in amdgpu_vmid_grab()
465 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_grab()
473 struct amdgpu_vmid_mgr *id_mgr; in amdgpu_vmid_alloc_reserved() local
477 id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_alloc_reserved()
478 mutex_lock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
481 if (atomic_inc_return(&id_mgr->reserved_vmid_num) > in amdgpu_vmid_alloc_reserved()
484 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_alloc_reserved()
489 idle = list_first_entry(&id_mgr->ids_lru, struct amdgpu_vmid, list); in amdgpu_vmid_alloc_reserved()
492 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
496 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_alloc_reserved()
504 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_free_reserved() local
506 mutex_lock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
509 &id_mgr->ids_lru); in amdgpu_vmid_free_reserved()
511 atomic_dec(&id_mgr->reserved_vmid_num); in amdgpu_vmid_free_reserved()
513 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_free_reserved()
528 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_reset() local
529 struct amdgpu_vmid *id = &id_mgr->ids[vmid]; in amdgpu_vmid_reset()
531 mutex_lock(&id_mgr->lock); in amdgpu_vmid_reset()
539 mutex_unlock(&id_mgr->lock); in amdgpu_vmid_reset()
554 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_reset_all() local
555 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_reset_all()
557 for (j = 1; j < id_mgr->num_ids; ++j) in amdgpu_vmid_reset_all()
574 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_init() local
575 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_init()
577 mutex_init(&id_mgr->lock); in amdgpu_vmid_mgr_init()
578 INIT_LIST_HEAD(&id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
579 atomic_set(&id_mgr->reserved_vmid_num, 0); in amdgpu_vmid_mgr_init()
582 id_mgr->num_ids = adev->vm_manager.first_kfd_vmid; in amdgpu_vmid_mgr_init()
585 for (j = 1; j < id_mgr->num_ids; ++j) { in amdgpu_vmid_mgr_init()
587 amdgpu_sync_create(&id_mgr->ids[j].active); in amdgpu_vmid_mgr_init()
588 list_add_tail(&id_mgr->ids[j].list, &id_mgr->ids_lru); in amdgpu_vmid_mgr_init()
605 struct amdgpu_vmid_mgr *id_mgr = in amdgpu_vmid_mgr_fini() local
606 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_fini()
608 mutex_destroy(&id_mgr->lock); in amdgpu_vmid_mgr_fini()
610 struct amdgpu_vmid *id = &id_mgr->ids[j]; in amdgpu_vmid_mgr_fini()