Lines Matching refs:vma
384 struct i915_vma *vma; in close_object_list() local
386 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
387 if (!IS_ERR(vma)) in close_object_list()
388 ignored = i915_vma_unbind(vma); in close_object_list()
405 struct i915_vma *vma; in fill_hole() local
443 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
444 if (IS_ERR(vma)) in fill_hole()
453 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
460 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
461 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
463 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
469 i915_vma_unpin(vma); in fill_hole()
480 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
481 if (IS_ERR(vma)) in fill_hole()
490 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
491 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
493 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
499 err = i915_vma_unbind(vma); in fill_hole()
502 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
516 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
517 if (IS_ERR(vma)) in fill_hole()
526 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
533 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
534 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
536 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
542 i915_vma_unpin(vma); in fill_hole()
553 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
554 if (IS_ERR(vma)) in fill_hole()
563 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
564 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
566 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
572 err = i915_vma_unbind(vma); in fill_hole()
575 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
624 struct i915_vma *vma; in walk_hole() local
632 vma = i915_vma_instance(obj, vm, NULL); in walk_hole()
633 if (IS_ERR(vma)) { in walk_hole()
634 err = PTR_ERR(vma); in walk_hole()
641 err = i915_vma_pin(vma, 0, 0, addr | flags); in walk_hole()
644 __func__, addr, vma->size, in walk_hole()
648 i915_vma_unpin(vma); in walk_hole()
650 if (!drm_mm_node_allocated(&vma->node) || in walk_hole()
651 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in walk_hole()
653 __func__, addr, vma->size); in walk_hole()
658 err = i915_vma_unbind(vma); in walk_hole()
661 __func__, addr, vma->size, err); in walk_hole()
665 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole()
691 struct i915_vma *vma; in pot_hole() local
704 vma = i915_vma_instance(obj, vm, NULL); in pot_hole()
705 if (IS_ERR(vma)) { in pot_hole()
706 err = PTR_ERR(vma); in pot_hole()
720 err = i915_vma_pin(vma, 0, 0, addr | flags); in pot_hole()
730 if (!drm_mm_node_allocated(&vma->node) || in pot_hole()
731 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in pot_hole()
733 __func__, addr, vma->size); in pot_hole()
734 i915_vma_unpin(vma); in pot_hole()
735 err = i915_vma_unbind(vma); in pot_hole()
740 i915_vma_unpin(vma); in pot_hole()
741 err = i915_vma_unbind(vma); in pot_hole()
774 struct i915_vma *vma; in drunk_hole() local
809 vma = i915_vma_instance(obj, vm, NULL); in drunk_hole()
810 if (IS_ERR(vma)) { in drunk_hole()
811 err = PTR_ERR(vma); in drunk_hole()
815 GEM_BUG_ON(vma->size != BIT_ULL(size)); in drunk_hole()
820 err = i915_vma_pin(vma, 0, 0, addr | flags); in drunk_hole()
830 if (!drm_mm_node_allocated(&vma->node) || in drunk_hole()
831 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in drunk_hole()
834 i915_vma_unpin(vma); in drunk_hole()
835 err = i915_vma_unbind(vma); in drunk_hole()
840 i915_vma_unpin(vma); in drunk_hole()
841 err = i915_vma_unbind(vma); in drunk_hole()
877 struct i915_vma *vma; in __shrink_hole() local
889 vma = i915_vma_instance(obj, vm, NULL); in __shrink_hole()
890 if (IS_ERR(vma)) { in __shrink_hole()
891 err = PTR_ERR(vma); in __shrink_hole()
895 GEM_BUG_ON(vma->size != size); in __shrink_hole()
897 err = i915_vma_pin(vma, 0, 0, addr | flags); in __shrink_hole()
904 if (!drm_mm_node_allocated(&vma->node) || in __shrink_hole()
905 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in __shrink_hole()
908 i915_vma_unpin(vma); in __shrink_hole()
909 err = i915_vma_unbind(vma); in __shrink_hole()
914 i915_vma_unpin(vma); in __shrink_hole()
922 err = i915_vma_sync(vma); in __shrink_hole()
981 struct i915_vma *vma; in shrink_boom() local
987 vma = i915_vma_instance(purge, vm, NULL); in shrink_boom()
988 if (IS_ERR(vma)) { in shrink_boom()
989 err = PTR_ERR(vma); in shrink_boom()
993 err = i915_vma_pin(vma, 0, 0, flags); in shrink_boom()
998 i915_vma_unpin(vma); in shrink_boom()
1010 vma = i915_vma_instance(explode, vm, NULL); in shrink_boom()
1011 if (IS_ERR(vma)) { in shrink_boom()
1012 err = PTR_ERR(vma); in shrink_boom()
1016 err = i915_vma_pin(vma, 0, 0, flags | size); in shrink_boom()
1020 i915_vma_unpin(vma); in shrink_boom()
1272 static void track_vma_bind(struct i915_vma *vma) in track_vma_bind() argument
1274 struct drm_i915_gem_object *obj = vma->obj; in track_vma_bind()
1278 GEM_BUG_ON(vma->pages); in track_vma_bind()
1279 atomic_set(&vma->pages_count, I915_VMA_PAGES_ACTIVE); in track_vma_bind()
1281 vma->pages = obj->mm.pages; in track_vma_bind()
1283 mutex_lock(&vma->vm->mutex); in track_vma_bind()
1284 list_add_tail(&vma->vm_link, &vma->vm->bound_list); in track_vma_bind()
1285 mutex_unlock(&vma->vm->mutex); in track_vma_bind()
1357 struct i915_vma *vma; in igt_gtt_reserve() local
1374 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1375 if (IS_ERR(vma)) { in igt_gtt_reserve()
1376 err = PTR_ERR(vma); in igt_gtt_reserve()
1381 err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, in igt_gtt_reserve()
1392 track_vma_bind(vma); in igt_gtt_reserve()
1394 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1395 if (vma->node.start != total || in igt_gtt_reserve()
1396 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1398 vma->node.start, vma->node.size, in igt_gtt_reserve()
1409 struct i915_vma *vma; in igt_gtt_reserve() local
1426 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1427 if (IS_ERR(vma)) { in igt_gtt_reserve()
1428 err = PTR_ERR(vma); in igt_gtt_reserve()
1433 err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, in igt_gtt_reserve()
1444 track_vma_bind(vma); in igt_gtt_reserve()
1446 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1447 if (vma->node.start != total || in igt_gtt_reserve()
1448 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1450 vma->node.start, vma->node.size, in igt_gtt_reserve()
1459 struct i915_vma *vma; in igt_gtt_reserve() local
1462 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1463 if (IS_ERR(vma)) { in igt_gtt_reserve()
1464 err = PTR_ERR(vma); in igt_gtt_reserve()
1468 err = i915_vma_unbind(vma); in igt_gtt_reserve()
1480 err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, in igt_gtt_reserve()
1491 track_vma_bind(vma); in igt_gtt_reserve()
1493 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1494 if (vma->node.start != offset || in igt_gtt_reserve()
1495 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1497 vma->node.start, vma->node.size, in igt_gtt_reserve()
1573 struct i915_vma *vma; in igt_gtt_insert() local
1590 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1591 if (IS_ERR(vma)) { in igt_gtt_insert()
1592 err = PTR_ERR(vma); in igt_gtt_insert()
1597 err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, in igt_gtt_insert()
1612 track_vma_bind(vma); in igt_gtt_insert()
1613 __i915_vma_pin(vma); in igt_gtt_insert()
1615 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1619 struct i915_vma *vma; in igt_gtt_insert() local
1621 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1622 if (IS_ERR(vma)) { in igt_gtt_insert()
1623 err = PTR_ERR(vma); in igt_gtt_insert()
1627 if (!drm_mm_node_allocated(&vma->node)) { in igt_gtt_insert()
1633 __i915_vma_unpin(vma); in igt_gtt_insert()
1638 struct i915_vma *vma; in igt_gtt_insert() local
1641 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1642 if (IS_ERR(vma)) { in igt_gtt_insert()
1643 err = PTR_ERR(vma); in igt_gtt_insert()
1647 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1648 offset = vma->node.start; in igt_gtt_insert()
1650 err = i915_vma_unbind(vma); in igt_gtt_insert()
1657 err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, in igt_gtt_insert()
1667 track_vma_bind(vma); in igt_gtt_insert()
1669 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1670 if (vma->node.start != offset) { in igt_gtt_insert()
1672 offset, vma->node.start); in igt_gtt_insert()
1682 struct i915_vma *vma; in igt_gtt_insert() local
1699 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1700 if (IS_ERR(vma)) { in igt_gtt_insert()
1701 err = PTR_ERR(vma); in igt_gtt_insert()
1706 err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, in igt_gtt_insert()
1716 track_vma_bind(vma); in igt_gtt_insert()
1718 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1827 struct i915_vma *vma; in igt_cs_tlb() local
1911 vma = i915_vma_instance(out, vm, NULL); in igt_cs_tlb()
1912 if (IS_ERR(vma)) { in igt_cs_tlb()
1913 err = PTR_ERR(vma); in igt_cs_tlb()
1917 err = i915_vma_pin(vma, 0, 0, in igt_cs_tlb()
1923 GEM_BUG_ON(vma->node.start != vm->total - PAGE_SIZE); in igt_cs_tlb()
1950 vma = i915_vma_instance(bbe, vm, NULL); in igt_cs_tlb()
1951 if (IS_ERR(vma)) { in igt_cs_tlb()
1952 err = PTR_ERR(vma); in igt_cs_tlb()
1956 err = vma->ops->set_pages(vma); in igt_cs_tlb()
1986 vma->node.start = offset + i * PAGE_SIZE; in igt_cs_tlb()
1987 vm->insert_entries(vm, vma, I915_CACHE_NONE, 0); in igt_cs_tlb()
1989 rq = submit_batch(ce, vma->node.start); in igt_cs_tlb()
1997 vma->ops->clear_pages(vma); in igt_cs_tlb()
2006 vma = i915_vma_instance(act, vm, NULL); in igt_cs_tlb()
2007 if (IS_ERR(vma)) { in igt_cs_tlb()
2008 err = PTR_ERR(vma); in igt_cs_tlb()
2012 err = vma->ops->set_pages(vma); in igt_cs_tlb()
2022 vma->node.start = offset + i * PAGE_SIZE; in igt_cs_tlb()
2023 vm->insert_entries(vm, vma, I915_CACHE_NONE, 0); in igt_cs_tlb()
2025 addr = vma->node.start + i * 64; in igt_cs_tlb()
2050 vma->ops->clear_pages(vma); in igt_cs_tlb()