Lines Matching refs:bo

210 void amdgpu_amdkfd_release_notify(struct amdgpu_bo *bo)  in amdgpu_amdkfd_release_notify()  argument
212 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify()
213 u32 domain = bo->preferred_domains; in amdgpu_amdkfd_release_notify()
214 bool sg = (bo->preferred_domains == AMDGPU_GEM_DOMAIN_CPU); in amdgpu_amdkfd_release_notify()
216 if (bo->flags & AMDGPU_AMDKFD_CREATE_USERPTR_BO) { in amdgpu_amdkfd_release_notify()
221 unreserve_mem_limit(adev, amdgpu_bo_size(bo), domain, sg); in amdgpu_amdkfd_release_notify()
223 kfree(bo->kfd_bo); in amdgpu_amdkfd_release_notify()
236 static int amdgpu_amdkfd_remove_eviction_fence(struct amdgpu_bo *bo, in amdgpu_amdkfd_remove_eviction_fence() argument
239 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_amdkfd_remove_eviction_fence()
289 int amdgpu_amdkfd_remove_fence_on_pt_pd_bos(struct amdgpu_bo *bo) in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() argument
291 struct amdgpu_bo *root = bo; in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
317 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
318 ret = amdgpu_amdkfd_remove_eviction_fence(bo, ef); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
319 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos()
325 static int amdgpu_amdkfd_bo_validate(struct amdgpu_bo *bo, uint32_t domain, in amdgpu_amdkfd_bo_validate() argument
331 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
335 amdgpu_bo_placement_from_domain(bo, domain); in amdgpu_amdkfd_bo_validate()
337 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate()
341 amdgpu_bo_sync_wait(bo, AMDGPU_FENCE_OWNER_KFD, false); in amdgpu_amdkfd_bo_validate()
347 static int amdgpu_amdkfd_validate_vm_bo(void *_unused, struct amdgpu_bo *bo) in amdgpu_amdkfd_validate_vm_bo() argument
349 return amdgpu_amdkfd_bo_validate(bo, bo->allowed_domains, false); in amdgpu_amdkfd_validate_vm_bo()
361 struct amdgpu_bo *pd = vm->root.bo; in vm_validate_pt_pd_bos()
377 vm->pd_phys_addr = amdgpu_gmc_pd_addr(vm->root.bo); in vm_validate_pt_pd_bos()
392 struct amdgpu_bo *pd = vm->root.bo; in vm_update_pds()
405 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags()
475 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_userptr() local
477 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
478 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr()
503 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT); in kfd_mem_dmamap_userptr()
504 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmamap_userptr()
525 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_dmabuf() local
527 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT); in kfd_mem_dmamap_dmabuf()
528 return ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmamap_dmabuf()
556 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_userptr() local
558 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmaunmap_userptr()
563 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in kfd_mem_dmaunmap_userptr()
564 ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmaunmap_userptr()
576 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_dmabuf() local
578 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in kfd_mem_dmaunmap_dmabuf()
579 ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in kfd_mem_dmaunmap_dmabuf()
602 struct amdgpu_bo **bo) in kfd_mem_attach_userptr() argument
604 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach_userptr()
608 ret = amdgpu_bo_reserve(mem->bo, false); in kfd_mem_attach_userptr()
615 ttm_bo_type_sg, mem->bo->tbo.base.resv, in kfd_mem_attach_userptr()
617 amdgpu_bo_unreserve(mem->bo); in kfd_mem_attach_userptr()
621 *bo = gem_to_amdgpu_bo(gobj); in kfd_mem_attach_userptr()
622 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_userptr()
629 struct amdgpu_bo **bo) in kfd_mem_attach_dmabuf() argument
635 mem->dmabuf = amdgpu_gem_prime_export(&mem->bo->tbo.base, in kfd_mem_attach_dmabuf()
649 *bo = gem_to_amdgpu_bo(gobj); in kfd_mem_attach_dmabuf()
650 (*bo)->flags |= AMDGPU_GEM_CREATE_PREEMPTIBLE; in kfd_mem_attach_dmabuf()
651 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_dmabuf()
672 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in kfd_mem_attach()
673 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach()
676 struct amdgpu_bo *bo[2] = {NULL, NULL}; in kfd_mem_attach() local
700 bo[i] = mem->bo; in kfd_mem_attach()
701 drm_gem_object_get(&bo[i]->tbo.base); in kfd_mem_attach()
705 bo[i] = bo[0]; in kfd_mem_attach()
706 drm_gem_object_get(&bo[i]->tbo.base); in kfd_mem_attach()
707 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
710 ret = kfd_mem_attach_userptr(adev, mem, &bo[i]); in kfd_mem_attach()
714 mem->bo->tbo.type != ttm_bo_type_sg) { in kfd_mem_attach()
720 ret = kfd_mem_attach_dmabuf(adev, mem, &bo[i]); in kfd_mem_attach()
728 bo[i] = mem->bo; in kfd_mem_attach()
729 drm_gem_object_get(&bo[i]->tbo.base); in kfd_mem_attach()
733 ret = amdgpu_bo_reserve(bo[i], false); in kfd_mem_attach()
738 attachment[i]->bo_va = amdgpu_vm_bo_add(adev, vm, bo[i]); in kfd_mem_attach()
739 amdgpu_bo_unreserve(bo[i]); in kfd_mem_attach()
761 amdgpu_bo_reserve(bo[i], true); in kfd_mem_attach()
763 amdgpu_bo_unreserve(bo[i]); in kfd_mem_attach()
766 if (bo[i]) in kfd_mem_attach()
767 drm_gem_object_put(&bo[i]->tbo.base); in kfd_mem_attach()
775 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_detach() local
780 drm_gem_object_put(&bo->tbo.base); in kfd_mem_detach()
790 struct amdgpu_bo *bo = mem->bo; in add_kgd_mem_to_kfd_bo_list() local
794 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list()
829 struct amdgpu_bo *bo = mem->bo; in init_user_pages() local
835 ret = amdgpu_ttm_tt_set_userptr(&bo->tbo, user_addr, 0); in init_user_pages()
841 ret = amdgpu_mn_register(bo, user_addr); in init_user_pages()
848 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages); in init_user_pages()
854 ret = amdgpu_bo_reserve(bo, true); in init_user_pages()
859 amdgpu_bo_placement_from_domain(bo, mem->domain); in init_user_pages()
860 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in init_user_pages()
863 amdgpu_bo_unreserve(bo); in init_user_pages()
866 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm); in init_user_pages()
869 amdgpu_mn_unregister(bo); in init_user_pages()
906 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_vm() local
923 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_vm()
956 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_cond_vms() local
986 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_cond_vms()
1095 amdgpu_bo_size(entry->bo_va->base.bo), in map_bo_to_gpuvm()
1160 struct amdgpu_bo *pd = peer_vm->root.bo; in process_sync_pds_resv()
1227 ret = amdgpu_bo_reserve(vm->root.bo, true); in init_kfd_vm()
1235 ret = amdgpu_bo_sync_wait(vm->root.bo, in init_kfd_vm()
1239 ret = dma_resv_reserve_shared(vm->root.bo->tbo.base.resv, 1); in init_kfd_vm()
1242 amdgpu_bo_fence(vm->root.bo, in init_kfd_vm()
1244 amdgpu_bo_unreserve(vm->root.bo); in init_kfd_vm()
1258 amdgpu_bo_unreserve(vm->root.bo); in init_kfd_vm()
1324 struct amdgpu_bo *pd = vm->root.bo; in amdgpu_amdkfd_gpuvm_destroy_cb()
1380 struct amdgpu_bo *pd = avm->root.bo; in amdgpu_amdkfd_gpuvm_get_process_page_dir()
1398 struct amdgpu_bo *bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() local
1478 bo = gem_to_amdgpu_bo(gobj); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1480 bo->tbo.sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1481 bo->tbo.ttm->sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1483 bo->kfd_bo = *mem; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1484 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1486 bo->flags |= AMDGPU_AMDKFD_CREATE_USERPTR_BO; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1501 *offset = amdgpu_bo_mmap_offset(bo); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1532 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1561 amdgpu_mn_unregister(mem->bo); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1571 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1588 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1589 sg_free_table(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1590 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1597 if ((mem->bo->preferred_domains == AMDGPU_GEM_DOMAIN_VRAM) && in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1605 drm_vma_node_revoke(&mem->bo->tbo.base.vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1614 drm_gem_object_put(&mem->bo->tbo.base); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1626 struct amdgpu_bo *bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() local
1633 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1634 if (!bo) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1649 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1658 bo_size = bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1680 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1681 bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1689 !amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1694 ret = amdgpu_amdkfd_bo_validate(bo, domain, true); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1727 if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1728 amdgpu_bo_fence(bo, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1754 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1798 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1799 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1800 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1831 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() local
1833 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1843 ret = amdgpu_bo_reserve(bo, true); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1849 ret = amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1855 ret = amdgpu_bo_kmap(bo, kptr); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1862 bo, mem->process_info->eviction_fence); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1866 *size = amdgpu_bo_size(bo); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1868 amdgpu_bo_unreserve(bo); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1874 amdgpu_bo_unpin(bo); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1876 amdgpu_bo_unreserve(bo); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1885 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel() local
1887 amdgpu_bo_reserve(bo, true); in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel()
1888 amdgpu_bo_kunmap(bo); in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel()
1889 amdgpu_bo_unpin(bo); in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel()
1890 amdgpu_bo_unreserve(bo); in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel()
1916 struct amdgpu_bo *bo; in amdgpu_amdkfd_gpuvm_import_dmabuf() local
1928 bo = gem_to_amdgpu_bo(obj); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1929 if (!(bo->preferred_domains & (AMDGPU_GEM_DOMAIN_VRAM | in amdgpu_amdkfd_gpuvm_import_dmabuf()
1945 *size = amdgpu_bo_size(bo); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1948 *mmap_offset = amdgpu_bo_mmap_offset(bo); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1954 ((bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ? in amdgpu_amdkfd_gpuvm_import_dmabuf()
1959 drm_gem_object_get(&bo->tbo.base); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1960 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1962 (*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ? in amdgpu_amdkfd_gpuvm_import_dmabuf()
2015 struct amdgpu_bo *bo; in update_invalid_user_pages() local
2028 bo = mem->bo; in update_invalid_user_pages()
2030 if (amdgpu_bo_reserve(bo, true)) in update_invalid_user_pages()
2032 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in update_invalid_user_pages()
2033 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in update_invalid_user_pages()
2034 amdgpu_bo_unreserve(bo); in update_invalid_user_pages()
2058 bo = mem->bo; in update_invalid_user_pages()
2061 ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages); in update_invalid_user_pages()
2080 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm); in update_invalid_user_pages()
2108 struct amdgpu_bo *bo; in validate_invalid_user_pages() local
2134 mem->resv_list.bo = mem->validate_list.bo; in validate_invalid_user_pages()
2156 bo = mem->bo; in validate_invalid_user_pages()
2159 if (bo->tbo.ttm->pages[0]) { in validate_invalid_user_pages()
2160 amdgpu_bo_placement_from_domain(bo, mem->domain); in validate_invalid_user_pages()
2161 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in validate_invalid_user_pages()
2333 mem->resv_list.bo = mem->validate_list.bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2361 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_restore_process_bos() local
2365 total_size += amdgpu_bo_size(bo); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2367 ret = amdgpu_amdkfd_bo_validate(bo, domain, false); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2370 failed_size += amdgpu_bo_size(bo); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2371 ret = amdgpu_amdkfd_bo_validate(bo, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2378 ret = amdgpu_sync_fence(&sync_obj, bo->tbo.moving); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2429 amdgpu_bo_fence(mem->bo, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2435 struct amdgpu_bo *bo = peer_vm->root.bo; in amdgpu_amdkfd_gpuvm_restore_process_bos() local
2437 amdgpu_bo_fence(bo, &process_info->eviction_fence->base, true); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2464 (*mem)->bo = amdgpu_bo_ref(gws_bo); in amdgpu_amdkfd_add_gws_to_process()
2516 struct amdgpu_bo *gws_bo = kgd_mem->bo; in amdgpu_amdkfd_remove_gws_from_process()