Home
last modified time | relevance | path

Searched refs:tbo (Results 1 – 25 of 59) sorted by relevance

123

/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_object.c62 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
75 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy()
92 amdgpu_bo_destroy(tbo); in amdgpu_bo_vm_destroy()
383 ttm_resource_free(&(*bo_ptr)->tbo, &(*bo_ptr)->tbo.resource); in amdgpu_bo_create_kernel_at()
772 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.resource->num_pages, &bo->kmap); in amdgpu_bo_kmap()
824 ttm_bo_get(&bo->tbo); in amdgpu_bo_ref()
841 tbo = &((*bo)->tbo); in amdgpu_bo_unref()
842 ttm_bo_put(tbo); in amdgpu_bo_unref()
944 ttm_bo_pin(&bo->tbo); in amdgpu_bo_pin_restricted()
1222 if (abo->tbo.base.dma_buf && !abo->tbo.base.import_attach && in amdgpu_bo_move_notify()
[all …]
A Damdgpu_dma_buf.c115 if (bo->tbo.moving) { in amdgpu_dma_buf_pin()
116 r = dma_fence_wait(bo->tbo.moving, true); in amdgpu_dma_buf_pin()
163 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
183 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map()
186 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
187 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map()
198 bo->tbo.base.size, attach->dev, in amdgpu_dma_buf_map()
268 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access()
308 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
387 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify()
[all …]
A Damdgpu_object.h100 struct ttm_buffer_object tbo; member
132 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo()
173 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve()
176 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve()
187 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
192 return bo->tbo.base.size; in amdgpu_bo_size()
197 return bo->tbo.base.size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages()
213 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in amdgpu_bo_mmap_offset()
221 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_in_cpu_visible_vram()
224 if (bo->tbo.resource->mem_type != TTM_PL_VRAM) in amdgpu_bo_in_cpu_visible_vram()
[all …]
A Damdgpu_amdkfd_gpuvm.c794 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list()
923 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_vm()
986 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_cond_vms()
1480 bo->tbo.sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1481 bo->tbo.ttm->sg = sg; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1588 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1590 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1658 bo_size = bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1727 if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1799 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
[all …]
A Damdgpu_gem.c123 *obj = &bo->tbo.base; in amdgpu_gem_object_create()
168 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm); in amdgpu_gem_object_open()
173 abo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_gem_object_open()
209 tv.bo = &bo->tbo; in amdgpu_gem_object_close()
229 fence = dma_resv_excl_fence(bo->tbo.base.resv); in amdgpu_gem_object_close()
253 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) in amdgpu_gem_object_mmap()
336 resv = vm->root.bo->tbo.base.resv; in amdgpu_gem_create_ioctl()
472 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap()
737 tv.bo = &abo->tbo; in amdgpu_gem_va_ioctl()
828 info.bo_size = robj->tbo.base.size; in amdgpu_gem_op_ioctl()
[all …]
A Damdgpu_gtt_mgr.c30 struct ttm_buffer_object *tbo; member
126 struct ttm_buffer_object *tbo, in amdgpu_gtt_mgr_new() argument
131 uint32_t num_pages = PFN_UP(tbo->base.size); in amdgpu_gtt_mgr_new()
147 node->tbo = tbo; in amdgpu_gtt_mgr_new()
148 ttm_resource_init(tbo, place, &node->base.base); in amdgpu_gtt_mgr_new()
154 num_pages, tbo->page_alignment, in amdgpu_gtt_mgr_new()
239 r = amdgpu_ttm_recover_gart(node->tbo); in amdgpu_gtt_mgr_recover()
A Damdgpu_vm.c375 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
665 if (abo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_del_from_lru_notify()
701 ttm_bo_move_to_lru_tail(&bo->tbo, bo->tbo.resource, in amdgpu_vm_move_to_lru_tail()
705 shadow->tbo.resource, in amdgpu_vm_move_to_lru_tail()
1898 resv = bo->tbo.base.resv; in amdgpu_vm_bo_update()
1907 mem = bo->tbo.resource; in amdgpu_vm_bo_update()
1967 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_update()
2309 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_insert_map()
2671 if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_rmv()
2765 if (evicted && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_invalidate()
[all …]
A Damdgpu_cs.c56 p->uf_entry.tv.bo = &bo->tbo; in amdgpu_cs_user_fence_chunk()
68 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) { in amdgpu_cs_user_fence_chunk()
406 .resv = bo->tbo.base.resv in amdgpu_cs_bo_validate()
411 if (bo->tbo.pin_count) in amdgpu_cs_bo_validate()
418 (!bo->tbo.base.dma_buf || in amdgpu_cs_bo_validate()
469 if (amdgpu_ttm_tt_is_userptr(bo->tbo.ttm) && in amdgpu_cs_list_validate()
477 amdgpu_ttm_tt_set_user_pages(bo->tbo.ttm, in amdgpu_cs_list_validate()
558 for (i = 0; i < bo->tbo.ttm->num_pages; i++) { in amdgpu_cs_parser_bos()
632 r = amdgpu_ttm_alloc_gart(&uf->tbo); in amdgpu_cs_parser_bos()
656 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_cs_sync_rings()
[all …]
A Damdgpu_vram_mgr.c221 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vram_mgr_bo_visible_size()
222 struct ttm_resource *res = bo->tbo.resource; in amdgpu_vram_mgr_bo_visible_size()
369 struct ttm_buffer_object *tbo, in amdgpu_vram_mgr_new() argument
388 if (tbo->type != ttm_bo_type_kernel) in amdgpu_vram_mgr_new()
392 mem_bytes = tbo->base.size; in amdgpu_vram_mgr_new()
409 tbo->page_alignment); in amdgpu_vram_mgr_new()
420 ttm_resource_init(tbo, place, &node->base); in amdgpu_vram_mgr_new()
434 uint32_t alignment = tbo->page_alignment; in amdgpu_vram_mgr_new()
A Damdgpu_mn.c68 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_invalidate_gfx()
78 r = dma_resv_wait_timeout(bo->tbo.base.resv, true, false, in amdgpu_mn_invalidate_gfx()
105 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_mn_invalidate_hsa()
A Damdgpu_ttm.c482 if (WARN_ON_ONCE(abo->tbo.pin_count > 0)) in amdgpu_bo_move()
663 struct ttm_tt *ttm = bo->tbo.ttm; in amdgpu_ttm_tt_get_user_pages()
833 struct ttm_buffer_object *tbo, in amdgpu_ttm_gart_bind() argument
836 struct amdgpu_bo *abo = ttm_to_amdgpu_bo(tbo); in amdgpu_ttm_gart_bind()
837 struct ttm_tt *ttm = tbo->ttm; in amdgpu_ttm_gart_bind()
1023 if (!tbo->ttm) in amdgpu_ttm_recover_gart()
1026 flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, tbo->resource); in amdgpu_ttm_recover_gart()
1027 r = amdgpu_ttm_gart_bind(adev, tbo, flags); in amdgpu_ttm_recover_gart()
1999 if (bo->tbo.resource->mem_type == TTM_PL_TT) { in amdgpu_fill_buffer()
2000 r = amdgpu_ttm_alloc_gart(&bo->tbo); in amdgpu_fill_buffer()
[all …]
A Damdgpu_vm_cpu.c79 if (vmbo->bo.tbo.moving) { in amdgpu_vm_cpu_update()
80 r = dma_fence_wait(vmbo->bo.tbo.moving, true); in amdgpu_vm_cpu_update()
A Damdgpu_vm_sdma.c40 r = amdgpu_ttm_alloc_gart(&table->bo.tbo); in amdgpu_vm_sdma_map_table()
45 r = amdgpu_ttm_alloc_gart(&table->shadow->tbo); in amdgpu_vm_sdma_map_table()
212 r = amdgpu_sync_fence(&p->job->sync, bo->tbo.moving); in amdgpu_vm_sdma_update()
/linux/drivers/gpu/drm/qxl/
A Dqxl_object.c40 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
41 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
65 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain()
143 bo->tbo.priority = priority; in qxl_bo_create()
155 ttm_bo_pin(&bo->tbo); in qxl_bo_create()
156 ttm_bo_unreserve(&bo->tbo); in qxl_bo_create()
300 if (bo->tbo.pin_count) { in __qxl_bo_pin()
301 ttm_bo_pin(&bo->tbo); in __qxl_bo_pin()
307 ttm_bo_pin(&bo->tbo); in __qxl_bo_pin()
315 ttm_bo_unpin(&bo->tbo); in __qxl_bo_unpin()
[all …]
A Dqxl_gem.c35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local
41 tbo = &qobj->tbo; in qxl_gem_object_free()
42 ttm_bo_put(tbo); in qxl_gem_object_free()
66 *obj = &qbo->tbo.base; in qxl_gem_object_create()
A Dqxl_object.h34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
53 return bo->tbo.base.size; in qxl_bo_size()
A Dqxl_debugfs.c64 fobj = dma_resv_shared_list(bo->tbo.base.resv); in qxl_debugfs_buffers_info()
69 (unsigned long)bo->tbo.base.size, in qxl_debugfs_buffers_info()
70 bo->tbo.pin_count, rel); in qxl_debugfs_buffers_info()
A Dqxl_release.c176 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add()
185 entry->tv.bo = &bo->tbo; in qxl_release_list_add()
196 if (!bo->tbo.pin_count) { in qxl_release_validate_bo()
198 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_release_validate_bo()
203 ret = dma_resv_reserve_shared(bo->tbo.base.resv, 1); in qxl_release_validate_bo()
208 ret = qxl_bo_check_id(to_qxl(bo->tbo.base.dev), bo); in qxl_release_validate_bo()
A Dqxl_drv.h77 struct ttm_buffer_object tbo; member
98 #define gem_to_qxl_bo(gobj) container_of((gobj), struct qxl_bo, tbo.base)
99 #define to_qxl_bo(tobj) container_of((tobj), struct qxl_bo, tbo)
295 (bo->tbo.resource->mem_type == TTM_PL_VRAM) in qxl_bo_physical_address()
300 return slot->high_bits | ((bo->tbo.resource->start << PAGE_SHIFT) + offset); in qxl_bo_physical_address()
/linux/drivers/gpu/drm/radeon/
A Dradeon_object.c77 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
85 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy()
251 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.resource->num_pages, &bo->kmap); in radeon_bo_kmap()
277 ttm_bo_get(&bo->tbo); in radeon_bo_ref()
287 tbo = &((*bo)->tbo); in radeon_bo_unref()
288 ttm_bo_put(tbo); in radeon_bo_unref()
301 if (bo->tbo.pin_count) { in radeon_bo_pin_restricted()
302 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted()
338 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted()
358 ttm_bo_unpin(&bo->tbo); in radeon_bo_unpin()
[all …]
A Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset()
98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset()
107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset()
112 return bo->tbo.base.size; in radeon_bo_size()
117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages()
122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment()
133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
A Dradeon_prime.c39 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table()
40 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table()
59 bo->tbo.base.funcs = &radeon_gem_object_funcs; in radeon_gem_prime_import_sg_table()
66 return &bo->tbo.base; in radeon_gem_prime_import_sg_table()
83 if (bo->tbo.moving) { in radeon_gem_prime_pin()
84 ret = dma_fence_wait(bo->tbo.moving, false); in radeon_gem_prime_pin()
117 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
A Dradeon_mn.c57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
69 r = dma_resv_wait_timeout(bo->tbo.base.resv, true, false, in radeon_mn_invalidate()
75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
A Dradeon_gem.c132 *obj = &robj->tbo.base; in radeon_gem_object_create()
268 struct radeon_device *rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_gem_object_mmap()
270 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap()
424 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_gem_userptr_ioctl()
494 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
526 r = dma_resv_test_signaled(robj->tbo.base.resv, true); in radeon_gem_busy_ioctl()
532 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_busy_ioctl()
562 cur_placement = READ_ONCE(robj->tbo.resource->mem_type); in radeon_gem_wait_idle_ioctl()
633 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm()
659 r = radeon_vm_bo_update(rdev, bo_va, bo_va->bo->tbo.resource); in radeon_gem_va_update_vm()
[all …]
A Dradeon_cs.c162 if (radeon_ttm_tt_has_userptr(p->rdev, p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
185 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs()
259 resv = reloc->robj->tbo.base.resv; in radeon_cs_sync_rings()
403 return (int)la->robj->tbo.resource->num_pages - in cmp_size_smaller_first()
404 (int)lb->robj->tbo.resource->num_pages; in cmp_size_smaller_first()
447 drm_gem_object_put(&bo->tbo.base); in radeon_cs_parser_fini()
519 rdev->ring_tmp_bo.bo->tbo.resource); in radeon_bo_vm_update_pte()
533 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte()

Completed in 70 milliseconds

123