Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 25 of 271) sorted by relevance

1234567891011

/linux/drivers/staging/media/atomisp/pci/hmm/
A Dhmm_bo.c70 memset(bo, 0, sizeof(*bo)); in __bo_init()
270 bo->pgnr = bo->pgnr - pgnr; in __bo_break_up()
287 if (!bo->prev && !bo->next) { in __bo_take_off_handling()
294 } else if (!bo->prev && bo->next) { in __bo_take_off_handling()
304 } else if (bo->prev && !bo->next) { in __bo_take_off_handling()
312 } else if (bo->prev && bo->next) { in __bo_take_off_handling()
313 bo->next->prev = bo->prev; in __bo_take_off_handling()
314 bo->prev->next = bo->next; in __bo_take_off_handling()
496 bo = __bo_merge(prev_bo, bo); in hmm_bo_release()
503 bo = __bo_merge(bo, next_bo); in hmm_bo_release()
[all …]
A Dhmm.c78 hmm_bo_type_string[bo->type], bo->pgnr); in bo_show()
80 total[bo->type] += bo->pgnr; in bo_show()
247 if (!bo) { in hmm_alloc()
295 if (!bo) { in hmm_free()
311 if (!bo) { in hmm_check_bo()
594 if (!bo) { in hmm_virt_to_phys()
612 if (!bo) { in hmm_mmap()
629 if (!bo) { in hmm_vmap()
649 if (!bo) { in hmm_flush_vmap()
664 if (!bo) { in hmm_vunmap()
[all …]
/linux/drivers/gpu/drm/ttm/
A Dttm_bo.c61 bo, bo->resource->num_pages, bo->base.size >> 10, in ttm_bo_mem_space_debug()
236 bo->bdev->funcs->delete_mem_notify(bo); in ttm_bo_cleanup_memtype_use()
239 ttm_resource_free(bo, &bo->resource); in ttm_bo_cleanup_memtype_use()
246 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv()
262 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv()
424 bo->bdev->funcs->release_notify(bo); in ttm_bo_release()
470 bo->destroy(bo); in ttm_bo_release()
984 bo->base.resv = &bo->base._resv; in ttm_bo_init_reserved()
1164 bo->bdev->funcs->swap_notify(bo); in ttm_bo_swapout()
1185 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_tt_destroy()
[all …]
A Dttm_bo_util.c224 ttm_bo_get(bo); in ttm_buffer_object_transfer()
225 fbo->bo = bo; in ttm_buffer_object_transfer()
344 map->bo = bo; in ttm_bo_kmap()
350 ret = ttm_mem_io_reserve(bo->bdev, bo->resource); in ttm_bo_kmap()
382 ttm_mem_io_free(map->bo->bdev, map->bo->resource); in ttm_bo_kunmap()
460 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap()
474 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node()
538 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict()
604 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting()
608 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting()
[all …]
A Dttm_bo_vm.c71 ttm_bo_get(bo); in ttm_bo_vm_fault_idle()
75 ttm_bo_put(bo); in ttm_bo_vm_fault_idle()
91 bo->moving = NULL; in ttm_bo_vm_fault_idle()
146 ttm_bo_get(bo); in ttm_bo_vm_reserve()
151 ttm_bo_put(bo); in ttm_bo_vm_reserve()
165 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve()
231 prot = ttm_io_prot(bo, bo->resource, prot); in ttm_bo_vm_fault_reserved()
239 ttm = bo->ttm; in ttm_bo_vm_fault_reserved()
362 ttm_bo_get(bo); in ttm_bo_vm_open()
370 ttm_bo_put(bo); in ttm_bo_vm_close()
[all …]
/linux/drivers/gpu/drm/qxl/
A Dqxl_object.c49 kfree(bo); in qxl_ttm_bo_destroy()
130 kfree(bo); in qxl_bo_create()
171 r = ttm_bo_vmap(&bo->tbo, &bo->map); in qxl_bo_vmap_locked()
178 bo->kptr = (void *)bo->map.vaddr_iomem; in qxl_bo_vmap_locked()
180 bo->kptr = bo->map.vaddr; in qxl_bo_vmap_locked()
249 ttm_bo_vunmap(&bo->tbo, &bo->map); in qxl_bo_vunmap_locked()
285 *bo = NULL; in qxl_bo_unref()
291 return bo; in qxl_bo_ref()
304 qxl_ttm_placement_from_domain(bo, bo->type); in __qxl_bo_pin()
305 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in __qxl_bo_pin()
[all …]
A Dqxl_release.c128 bo = to_qxl_bo(entry->tv.bo); in qxl_release_free_list()
176 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add()
184 qxl_bo_ref(bo); in qxl_release_list_add()
185 entry->tv.bo = &bo->tbo; in qxl_release_list_add()
197 qxl_ttm_placement_from_domain(bo, bo->type); in qxl_release_validate_bo()
198 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in qxl_release_validate_bo()
208 ret = qxl_bo_check_id(to_qxl(bo->tbo.base.dev), bo); in qxl_release_validate_bo()
230 struct qxl_bo *bo = to_qxl_bo(entry->tv.bo); in qxl_release_reserve_list() local
342 *rbo = bo; in qxl_alloc_release_reserved()
418 bdev = bo->bdev; in qxl_release_fence_buffer_objects()
[all …]
A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
53 return bo->tbo.base.size; in qxl_bo_size()
64 int qxl_bo_vunmap(struct qxl_bo *bo);
65 void qxl_bo_vunmap_locked(struct qxl_bo *bo);
69 extern void qxl_bo_unref(struct qxl_bo **bo);
70 extern int qxl_bo_pin(struct qxl_bo *bo);
[all …]
/linux/drivers/gpu/drm/radeon/
A Dradeon_object.c85 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy()
87 kfree(bo); in radeon_ttm_bo_destroy()
251 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.resource->num_pages, &bo->kmap); in radeon_bo_kmap()
278 return bo; in radeon_bo_ref()
342 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
344 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
346 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted()
361 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
363 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
396 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size, in radeon_bo_force_delete()
[all …]
A Dradeon_prime.c49 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local
65 bo->prime_shared_count = 1; in radeon_gem_prime_import_sg_table()
66 return &bo->tbo.base; in radeon_gem_prime_import_sg_table()
83 if (bo->tbo.moving) { in radeon_gem_prime_pin()
86 radeon_bo_unpin(bo); in radeon_gem_prime_pin()
91 bo->prime_shared_count++; in radeon_gem_prime_pin()
93 radeon_bo_unreserve(bo); in radeon_gem_prime_pin()
106 radeon_bo_unpin(bo); in radeon_gem_prime_unpin()
108 bo->prime_shared_count--; in radeon_gem_prime_unpin()
109 radeon_bo_unreserve(bo); in radeon_gem_prime_unpin()
[all …]
A Dradeon_object.h68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset()
98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset()
107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset()
112 return bo->tbo.base.size; in radeon_bo_size()
117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages()
143 extern void radeon_bo_kunmap(struct radeon_bo *bo);
145 extern void radeon_bo_unref(struct radeon_bo **bo);
[all …]
A Dradeon_mn.c57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
63 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate()
69 r = dma_resv_wait_timeout(bo->tbo.base.resv, true, false, in radeon_mn_invalidate()
75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
79 radeon_bo_unreserve(bo); in radeon_mn_invalidate()
101 radeon_bo_size(bo), &radeon_mn_ops); in radeon_mn_register()
111 mmu_interval_read_begin(&bo->notifier); in radeon_mn_register()
122 void radeon_mn_unregister(struct radeon_bo *bo) in radeon_mn_unregister() argument
124 if (!bo->notifier.mm) in radeon_mn_unregister()
126 mmu_interval_notifier_remove(&bo->notifier); in radeon_mn_unregister()
[all …]
/linux/drivers/gpu/drm/tegra/
A Dgem.c191 bo->mm = kzalloc(sizeof(*bo->mm), GFP_KERNEL); in tegra_bo_iommu_map()
198 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0); in tegra_bo_iommu_map()
205 bo->iova = bo->mm->start; in tegra_bo_iommu_map()
207 bo->size = iommu_map_sgtable(tegra->domain, bo->iova, bo->sgt, prot); in tegra_bo_iommu_map()
232 iommu_unmap(tegra->domain, bo->iova, bo->size); in tegra_bo_iommu_unmap()
253 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in tegra_bo_alloc_object()
254 if (!bo) in tegra_bo_alloc_object()
287 dma_free_wc(drm->dev, bo->gem.size, bo->vaddr, bo->iova); in tegra_bo_free()
295 bo->pages = drm_gem_get_pages(&bo->gem); in tegra_bo_get_pages()
299 bo->num_pages = bo->gem.size >> PAGE_SHIFT; in tegra_bo_get_pages()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_object.c62 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
65 kvfree(bo); in amdgpu_bo_destroy()
567 bo->allowed_domains = bo->preferred_domains; in amdgpu_bo_create()
618 *bo_ptr = bo; in amdgpu_bo_create()
772 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.resource->num_pages, &bo->kmap); in amdgpu_bo_kmap()
806 if (bo->kmap.bo) in amdgpu_bo_kunmap()
825 return bo; in amdgpu_bo_ref()
843 *bo = NULL; in amdgpu_bo_unref()
938 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_bo_pin_restricted()
1281 && bo->base.resv != &bo->base._resv); in amdgpu_bo_release_notify()
[all …]
A Damdgpu_object.h115 struct amdgpu_bo bo; member
124 struct amdgpu_bo bo; member
187 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
192 return bo->tbo.base.size; in amdgpu_bo_size()
227 amdgpu_res_first(bo->tbo.resource, 0, amdgpu_bo_size(bo), &cursor); in amdgpu_bo_in_cpu_visible_vram()
267 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_bo_shadowed()
268 return to_amdgpu_bo_vm(bo)->shadow; in amdgpu_bo_shadowed()
299 void *amdgpu_bo_kptr(struct amdgpu_bo *bo);
300 void amdgpu_bo_kunmap(struct amdgpu_bo *bo);
302 void amdgpu_bo_unref(struct amdgpu_bo **bo);
[all …]
A Damdgpu_dma_buf.c115 if (bo->tbo.moving) { in amdgpu_dma_buf_pin()
118 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_pin()
137 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin()
163 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
174 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
186 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
268 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access()
271 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access()
274 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access()
337 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local
[all …]
A Damdgpu_amdkfd_gpuvm.c700 bo[i] = mem->bo; in kfd_mem_attach()
705 bo[i] = bo[0]; in kfd_mem_attach()
728 bo[i] = mem->bo; in kfd_mem_attach()
794 entry->bo = &bo->tbo; in add_kgd_mem_to_kfd_bo_list()
923 ctx->kfd_bo.tv.bo = &bo->tbo; in reserve_bo_and_vm()
1484 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1633 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1960 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf()
2028 bo = mem->bo; in update_invalid_user_pages()
2058 bo = mem->bo; in update_invalid_user_pages()
[all …]
/linux/drivers/gpu/drm/vc4/
A Dvc4_bo.c373 if (bo) in vc4_bo_get_from_cache()
376 return bo; in vc4_bo_get_from_cache()
392 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vc4_create_object()
393 if (!bo) in vc4_create_object()
423 if (bo) { in vc4_bo_create()
425 memset(bo->base.vaddr, 0, bo->base.base.size); in vc4_bo_create()
426 return bo; in vc4_bo_create()
473 return bo; in vc4_bo_create()
491 if (IS_ERR(bo)) in vc4_dumb_create()
776 if (IS_ERR(bo)) in vc4_create_bo_ioctl()
[all …]
/linux/include/drm/ttm/
A Dttm_bo_driver.h143 success = dma_resv_trylock(bo->base.resv); in ttm_bo_reserve()
177 dma_resv_lock_slow(bo->base.resv, ticket); in ttm_bo_reserve_slowpath()
184 spin_lock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked()
185 ttm_bo_move_to_lru_tail(bo, bo->resource, NULL); in ttm_bo_move_to_lru_tail_unlocked()
186 spin_unlock(&bo->bdev->lru_lock); in ttm_bo_move_to_lru_tail_unlocked()
192 WARN_ON(bo->resource); in ttm_bo_assign_mem()
193 bo->resource = new_mem; in ttm_bo_assign_mem()
206 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_null()
207 ttm_bo_assign_mem(bo, new_mem); in ttm_bo_move_null()
219 ttm_bo_move_to_lru_tail_unlocked(bo); in ttm_bo_unreserve()
[all …]
A Dttm_bo_api.h190 struct ttm_buffer_object *bo; member
225 kref_get(&bo->kref); in ttm_bo_get()
241 if (!kref_get_unless_zero(&bo->kref)) in ttm_bo_get_unless_zero()
243 return bo; in ttm_bo_get_unless_zero()
383 struct ttm_buffer_object *bo,
552 dma_resv_assert_held(bo->base.resv); in ttm_bo_pin()
553 WARN_ON_ONCE(!kref_read(&bo->kref)); in ttm_bo_pin()
554 ++bo->pin_count; in ttm_bo_pin()
565 dma_resv_assert_held(bo->base.resv); in ttm_bo_unpin()
567 if (bo->pin_count) in ttm_bo_unpin()
[all …]
/linux/drivers/gpu/drm/lima/
A Dlima_gem.c26 size_t new_size = bo->heap_size ? bo->heap_size * 2 : in lima_heap_alloc()
31 if (bo->heap_size >= bo->base.base.size) in lima_heap_alloc()
38 if (bo->base.pages) { in lima_heap_alloc()
71 if (bo->base.sgt) { in lima_heap_alloc()
75 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc()
109 struct lima_bo *bo; in lima_gem_create_handle() local
179 if (bo->heap_size) in lima_gem_pin()
189 if (bo->heap_size) in lima_gem_vmap()
222 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in lima_gem_create_object()
223 if (!bo) in lima_gem_create_object()
[all …]
/linux/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_bo.c113 ttm_bo_unreserve(bo); in vmw_bo_pin_in_placement()
161 ttm_bo_unreserve(bo); in vmw_bo_pin_in_vram_or_gmr()
228 bo->resource->start < bo->resource->num_pages && in vmw_bo_pin_in_start_of_vram()
338 ttm_bo_pin(bo); in vmw_bo_pin_reserved()
340 ttm_bo_unpin(bo); in vmw_bo_pin_reserved()
369 ret = ttm_bo_kmap(bo, 0, bo->resource->num_pages, &vbo->map); in vmw_bo_map_and_cache()
479 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in vmw_bo_create_kernel()
480 if (unlikely(!bo)) in vmw_bo_create_kernel()
502 ttm_bo_pin(bo); in vmw_bo_create_kernel()
504 *p_bo = bo; in vmw_bo_create_kernel()
[all …]
/linux/drivers/gpu/drm/panfrost/
A Dpanfrost_gem.c38 if (bo->sgts) { in panfrost_gem_free_object()
43 if (bo->sgts[i].sgl) { in panfrost_gem_free_object()
46 sg_free_table(&bo->sgts[i]); in panfrost_gem_free_object()
49 kvfree(bo->sgts); in panfrost_gem_free_object()
61 mutex_lock(&bo->mappings.lock); in panfrost_gem_mapping_get()
131 mapping->obj = bo; in panfrost_gem_open()
139 if (!bo->noexec) in panfrost_gem_open()
152 if (!bo->is_heap) { in panfrost_gem_open()
266 return bo; in panfrost_gem_create_with_handle()
281 bo = to_panfrost_bo(obj); in panfrost_gem_prime_import_sg_table()
[all …]
/linux/drivers/gpu/drm/v3d/
A Dv3d_bo.c36 v3d_mmu_remove_ptes(bo); in v3d_free_object()
69 struct v3d_bo *bo; in v3d_create_object() local
75 bo = kzalloc(sizeof(*bo), GFP_KERNEL); in v3d_create_object()
76 if (!bo) in v3d_create_object()
78 obj = &bo->base.base; in v3d_create_object()
84 return &bo->base.base; in v3d_create_object()
129 struct v3d_bo *bo; in v3d_bo_create() local
141 return bo; in v3d_bo_create()
182 if (IS_ERR(bo)) in v3d_create_bo_ioctl()
183 return PTR_ERR(bo); in v3d_create_bo_ioctl()
[all …]
/linux/drivers/gpu/drm/virtio/
A Dvirtgpu_object.c69 if (virtio_gpu_is_shmem(bo)) { in virtio_gpu_cleanup_object()
82 drm_gem_shmem_unpin(&bo->base.base); in virtio_gpu_cleanup_object()
86 } else if (virtio_gpu_is_vram(bo)) { in virtio_gpu_cleanup_object()
106 if (bo->created) { in virtio_gpu_free_object()
112 virtio_gpu_cleanup_object(bo); in virtio_gpu_free_object()
151 struct virtio_gpu_object *bo, in virtio_gpu_object_shmem_init() argument
172 drm_gem_shmem_unpin(&bo->base.base); in virtio_gpu_object_shmem_init()
218 struct virtio_gpu_object *bo; in virtio_gpu_object_create() local
235 bo->dumb = params->dumb; in virtio_gpu_object_create()
258 bo->guest_blob = true; in virtio_gpu_object_create()
[all …]

Completed in 87 milliseconds

1234567891011