Lines Matching refs:vma
397 struct i915_vma *vma; in close_object_list() local
399 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
400 if (!IS_ERR(vma)) in close_object_list()
401 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
420 struct i915_vma *vma; in fill_hole() local
461 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
462 if (IS_ERR(vma)) in fill_hole()
471 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
478 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
479 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
481 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
487 i915_vma_unpin(vma); in fill_hole()
501 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
502 if (IS_ERR(vma)) in fill_hole()
511 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
512 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
514 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
520 err = i915_vma_unbind_unlocked(vma); in fill_hole()
523 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
540 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
541 if (IS_ERR(vma)) in fill_hole()
550 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
557 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
558 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
560 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
566 i915_vma_unpin(vma); in fill_hole()
580 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
581 if (IS_ERR(vma)) in fill_hole()
590 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
591 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
593 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
599 err = i915_vma_unbind_unlocked(vma); in fill_hole()
602 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
654 struct i915_vma *vma; in walk_hole() local
662 vma = i915_vma_instance(obj, vm, NULL); in walk_hole()
663 if (IS_ERR(vma)) { in walk_hole()
664 err = PTR_ERR(vma); in walk_hole()
671 err = i915_vma_pin(vma, 0, 0, addr | flags); in walk_hole()
674 __func__, addr, vma->size, in walk_hole()
678 i915_vma_unpin(vma); in walk_hole()
680 if (!drm_mm_node_allocated(&vma->node) || in walk_hole()
681 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in walk_hole()
683 __func__, addr, vma->size); in walk_hole()
688 err = i915_vma_unbind_unlocked(vma); in walk_hole()
691 __func__, addr, vma->size, err); in walk_hole()
695 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole()
721 struct i915_vma *vma; in pot_hole() local
737 vma = i915_vma_instance(obj, vm, NULL); in pot_hole()
738 if (IS_ERR(vma)) { in pot_hole()
739 err = PTR_ERR(vma); in pot_hole()
753 err = i915_vma_pin(vma, 0, 0, addr | flags); in pot_hole()
763 if (!drm_mm_node_allocated(&vma->node) || in pot_hole()
764 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in pot_hole()
766 __func__, addr, vma->size); in pot_hole()
767 i915_vma_unpin(vma); in pot_hole()
768 err = i915_vma_unbind_unlocked(vma); in pot_hole()
773 i915_vma_unpin(vma); in pot_hole()
774 err = i915_vma_unbind_unlocked(vma); in pot_hole()
810 struct i915_vma *vma; in drunk_hole() local
846 vma = i915_vma_instance(obj, vm, NULL); in drunk_hole()
847 if (IS_ERR(vma)) { in drunk_hole()
848 err = PTR_ERR(vma); in drunk_hole()
852 GEM_BUG_ON(vma->size != BIT_ULL(size)); in drunk_hole()
857 err = i915_vma_pin(vma, 0, 0, addr | flags); in drunk_hole()
867 if (!drm_mm_node_allocated(&vma->node) || in drunk_hole()
868 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in drunk_hole()
871 i915_vma_unpin(vma); in drunk_hole()
872 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
877 i915_vma_unpin(vma); in drunk_hole()
878 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
917 struct i915_vma *vma; in __shrink_hole() local
929 vma = i915_vma_instance(obj, vm, NULL); in __shrink_hole()
930 if (IS_ERR(vma)) { in __shrink_hole()
931 err = PTR_ERR(vma); in __shrink_hole()
935 GEM_BUG_ON(vma->size != size); in __shrink_hole()
937 err = i915_vma_pin(vma, 0, 0, addr | flags); in __shrink_hole()
944 if (!drm_mm_node_allocated(&vma->node) || in __shrink_hole()
945 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in __shrink_hole()
948 i915_vma_unpin(vma); in __shrink_hole()
949 err = i915_vma_unbind_unlocked(vma); in __shrink_hole()
954 i915_vma_unpin(vma); in __shrink_hole()
962 err = i915_vma_sync(vma); in __shrink_hole()
1021 struct i915_vma *vma; in shrink_boom() local
1027 vma = i915_vma_instance(purge, vm, NULL); in shrink_boom()
1028 if (IS_ERR(vma)) { in shrink_boom()
1029 err = PTR_ERR(vma); in shrink_boom()
1033 err = i915_vma_pin(vma, 0, 0, flags); in shrink_boom()
1038 i915_vma_unpin(vma); in shrink_boom()
1050 vma = i915_vma_instance(explode, vm, NULL); in shrink_boom()
1051 if (IS_ERR(vma)) { in shrink_boom()
1052 err = PTR_ERR(vma); in shrink_boom()
1056 err = i915_vma_pin(vma, 0, 0, flags | size); in shrink_boom()
1060 i915_vma_unpin(vma); in shrink_boom()
1083 struct i915_vma *vma; in misaligned_case() local
1097 vma = i915_vma_instance(obj, vm, NULL); in misaligned_case()
1098 if (IS_ERR(vma)) { in misaligned_case()
1099 err = PTR_ERR(vma); in misaligned_case()
1103 err = i915_vma_pin(vma, 0, 0, addr | flags); in misaligned_case()
1106 i915_vma_unpin(vma); in misaligned_case()
1108 if (!drm_mm_node_allocated(&vma->node)) { in misaligned_case()
1113 if (i915_vma_misplaced(vma, 0, 0, addr | flags)) { in misaligned_case()
1118 expected_vma_size = round_up(size, 1 << (ffs(vma->resource->page_sizes_gtt) - 1)); in misaligned_case()
1126 if (vma->size != expected_vma_size || vma->node.size != expected_node_size) { in misaligned_case()
1127 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1132 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1136 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in misaligned_case()
1436 static void track_vma_bind(struct i915_vma *vma) in track_vma_bind() argument
1438 struct drm_i915_gem_object *obj = vma->obj; in track_vma_bind()
1442 GEM_BUG_ON(atomic_read(&vma->pages_count)); in track_vma_bind()
1443 atomic_set(&vma->pages_count, I915_VMA_PAGES_ACTIVE); in track_vma_bind()
1445 vma->pages = obj->mm.pages; in track_vma_bind()
1446 vma->resource->bi.pages = vma->pages; in track_vma_bind()
1448 mutex_lock(&vma->vm->mutex); in track_vma_bind()
1449 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in track_vma_bind()
1450 mutex_unlock(&vma->vm->mutex); in track_vma_bind()
1504 static int reserve_gtt_with_resource(struct i915_vma *vma, u64 offset) in reserve_gtt_with_resource() argument
1506 struct i915_address_space *vm = vma->vm; in reserve_gtt_with_resource()
1508 struct drm_i915_gem_object *obj = vma->obj; in reserve_gtt_with_resource()
1516 err = i915_gem_gtt_reserve(vm, NULL, &vma->node, obj->base.size, in reserve_gtt_with_resource()
1521 i915_vma_resource_init_from_vma(vma_res, vma); in reserve_gtt_with_resource()
1522 vma->resource = vma_res; in reserve_gtt_with_resource()
1549 struct i915_vma *vma; in igt_gtt_reserve() local
1565 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1566 if (IS_ERR(vma)) { in igt_gtt_reserve()
1567 err = PTR_ERR(vma); in igt_gtt_reserve()
1571 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1577 track_vma_bind(vma); in igt_gtt_reserve()
1579 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1580 if (vma->node.start != total || in igt_gtt_reserve()
1581 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1583 vma->node.start, vma->node.size, in igt_gtt_reserve()
1594 struct i915_vma *vma; in igt_gtt_reserve() local
1611 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1612 if (IS_ERR(vma)) { in igt_gtt_reserve()
1613 err = PTR_ERR(vma); in igt_gtt_reserve()
1617 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1623 track_vma_bind(vma); in igt_gtt_reserve()
1625 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1626 if (vma->node.start != total || in igt_gtt_reserve()
1627 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1629 vma->node.start, vma->node.size, in igt_gtt_reserve()
1638 struct i915_vma *vma; in igt_gtt_reserve() local
1641 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1642 if (IS_ERR(vma)) { in igt_gtt_reserve()
1643 err = PTR_ERR(vma); in igt_gtt_reserve()
1647 err = i915_vma_unbind_unlocked(vma); in igt_gtt_reserve()
1658 err = reserve_gtt_with_resource(vma, offset); in igt_gtt_reserve()
1664 track_vma_bind(vma); in igt_gtt_reserve()
1666 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1667 if (vma->node.start != offset || in igt_gtt_reserve()
1668 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1670 vma->node.start, vma->node.size, in igt_gtt_reserve()
1685 static int insert_gtt_with_resource(struct i915_vma *vma) in insert_gtt_with_resource() argument
1687 struct i915_address_space *vm = vma->vm; in insert_gtt_with_resource()
1689 struct drm_i915_gem_object *obj = vma->obj; in insert_gtt_with_resource()
1697 err = i915_gem_gtt_insert(vm, NULL, &vma->node, obj->base.size, 0, in insert_gtt_with_resource()
1700 i915_vma_resource_init_from_vma(vma_res, vma); in insert_gtt_with_resource()
1701 vma->resource = vma_res; in insert_gtt_with_resource()
1771 struct i915_vma *vma; in igt_gtt_insert() local
1788 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1789 if (IS_ERR(vma)) { in igt_gtt_insert()
1790 err = PTR_ERR(vma); in igt_gtt_insert()
1794 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1805 track_vma_bind(vma); in igt_gtt_insert()
1806 __i915_vma_pin(vma); in igt_gtt_insert()
1808 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1812 struct i915_vma *vma; in igt_gtt_insert() local
1814 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1815 if (IS_ERR(vma)) { in igt_gtt_insert()
1816 err = PTR_ERR(vma); in igt_gtt_insert()
1820 if (!drm_mm_node_allocated(&vma->node)) { in igt_gtt_insert()
1826 __i915_vma_unpin(vma); in igt_gtt_insert()
1831 struct i915_vma *vma; in igt_gtt_insert() local
1834 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1835 if (IS_ERR(vma)) { in igt_gtt_insert()
1836 err = PTR_ERR(vma); in igt_gtt_insert()
1840 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1841 offset = vma->node.start; in igt_gtt_insert()
1843 err = i915_vma_unbind_unlocked(vma); in igt_gtt_insert()
1849 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1855 track_vma_bind(vma); in igt_gtt_insert()
1857 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1858 if (vma->node.start != offset) { in igt_gtt_insert()
1860 offset, vma->node.start); in igt_gtt_insert()
1870 struct i915_vma *vma; in igt_gtt_insert() local
1887 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1888 if (IS_ERR(vma)) { in igt_gtt_insert()
1889 err = PTR_ERR(vma); in igt_gtt_insert()
1893 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1899 track_vma_bind(vma); in igt_gtt_insert()
1901 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()