Lines Matching refs:vma

44 static inline void assert_vma_held_evict(const struct i915_vma *vma)  in assert_vma_held_evict()  argument
51 if (kref_read(&vma->vm->ref)) in assert_vma_held_evict()
52 assert_object_held_shared(vma->obj); in assert_vma_held_evict()
62 static void i915_vma_free(struct i915_vma *vma) in i915_vma_free() argument
64 return kmem_cache_free(slab_vmas, vma); in i915_vma_free()
71 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
75 if (!vma->node.stack) { in vma_print_allocator()
77 vma->node.start, vma->node.size, reason); in vma_print_allocator()
81 stack_depot_snprint(vma->node.stack, buf, sizeof(buf), 0); in vma_print_allocator()
83 vma->node.start, vma->node.size, reason, buf); in vma_print_allocator()
88 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
115 struct i915_vma *vma; in vma_create() local
122 vma = i915_vma_alloc(); in vma_create()
123 if (vma == NULL) in vma_create()
126 vma->ops = &vm->vma_ops; in vma_create()
127 vma->obj = obj; in vma_create()
128 vma->size = obj->base.size; in vma_create()
129 vma->display_alignment = I915_GTT_MIN_ALIGNMENT; in vma_create()
131 i915_active_init(&vma->active, __i915_vma_active, __i915_vma_retire, 0); in vma_create()
136 might_lock(&vma->active.mutex); in vma_create()
140 INIT_LIST_HEAD(&vma->closed_link); in vma_create()
141 INIT_LIST_HEAD(&vma->obj_link); in vma_create()
142 RB_CLEAR_NODE(&vma->obj_node); in vma_create()
145 vma->gtt_view = *view; in vma_create()
151 vma->size = view->partial.size; in vma_create()
152 vma->size <<= PAGE_SHIFT; in vma_create()
153 GEM_BUG_ON(vma->size > obj->base.size); in vma_create()
155 vma->size = intel_rotation_info_size(&view->rotated); in vma_create()
156 vma->size <<= PAGE_SHIFT; in vma_create()
158 vma->size = intel_remapped_info_size(&view->remapped); in vma_create()
159 vma->size <<= PAGE_SHIFT; in vma_create()
163 if (unlikely(vma->size > vm->total)) in vma_create()
166 GEM_BUG_ON(!IS_ALIGNED(vma->size, I915_GTT_PAGE_SIZE)); in vma_create()
174 vma->vm = vm; in vma_create()
175 list_add_tail(&vma->vm_link, &vm->unbound_list); in vma_create()
177 spin_lock(&obj->vma.lock); in vma_create()
179 if (unlikely(overflows_type(vma->size, u32))) in vma_create()
182 vma->fence_size = i915_gem_fence_size(vm->i915, vma->size, in vma_create()
185 if (unlikely(vma->fence_size < vma->size || /* overflow */ in vma_create()
186 vma->fence_size > vm->total)) in vma_create()
189 GEM_BUG_ON(!IS_ALIGNED(vma->fence_size, I915_GTT_MIN_ALIGNMENT)); in vma_create()
191 vma->fence_alignment = i915_gem_fence_alignment(vm->i915, vma->size, in vma_create()
194 GEM_BUG_ON(!is_power_of_2(vma->fence_alignment)); in vma_create()
196 __set_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma)); in vma_create()
200 p = &obj->vma.tree.rb_node; in vma_create()
220 rb_link_node(&vma->obj_node, rb, p); in vma_create()
221 rb_insert_color(&vma->obj_node, &obj->vma.tree); in vma_create()
223 if (i915_vma_is_ggtt(vma)) in vma_create()
230 list_add(&vma->obj_link, &obj->vma.list); in vma_create()
232 list_add_tail(&vma->obj_link, &obj->vma.list); in vma_create()
234 spin_unlock(&obj->vma.lock); in vma_create()
237 return vma; in vma_create()
240 spin_unlock(&obj->vma.lock); in vma_create()
241 list_del_init(&vma->vm_link); in vma_create()
244 i915_vma_free(vma); in vma_create()
255 rb = obj->vma.tree.rb_node; in i915_vma_lookup()
257 struct i915_vma *vma = rb_entry(rb, struct i915_vma, obj_node); in i915_vma_lookup() local
260 cmp = i915_vma_compare(vma, vm, view); in i915_vma_lookup()
262 return vma; in i915_vma_lookup()
291 struct i915_vma *vma; in i915_vma_instance() local
296 spin_lock(&obj->vma.lock); in i915_vma_instance()
297 vma = i915_vma_lookup(obj, vm, view); in i915_vma_instance()
298 spin_unlock(&obj->vma.lock); in i915_vma_instance()
301 if (unlikely(!vma)) in i915_vma_instance()
302 vma = vma_create(obj, vm, view); in i915_vma_instance()
304 GEM_BUG_ON(!IS_ERR(vma) && i915_vma_compare(vma, vm, view)); in i915_vma_instance()
305 return vma; in i915_vma_instance()
369 int i915_vma_wait_for_bind(struct i915_vma *vma) in i915_vma_wait_for_bind() argument
373 if (rcu_access_pointer(vma->active.excl.fence)) { in i915_vma_wait_for_bind()
377 fence = dma_fence_get_rcu_safe(&vma->active.excl.fence); in i915_vma_wait_for_bind()
389 static int i915_vma_verify_bind_complete(struct i915_vma *vma) in i915_vma_verify_bind_complete() argument
391 struct dma_fence *fence = i915_active_fence_get(&vma->active.excl); in i915_vma_verify_bind_complete()
412 struct i915_vma *vma) in i915_vma_resource_init_from_vma() argument
414 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_resource_init_from_vma()
416 i915_vma_resource_init(vma_res, vma->vm, vma->pages, &vma->page_sizes, in i915_vma_resource_init_from_vma()
419 vma->ops, vma->private, vma->node.start, in i915_vma_resource_init_from_vma()
420 vma->node.size, vma->size); in i915_vma_resource_init_from_vma()
436 int i915_vma_bind(struct i915_vma *vma, in i915_vma_bind() argument
446 lockdep_assert_held(&vma->vm->mutex); in i915_vma_bind()
447 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_bind()
448 GEM_BUG_ON(vma->size > vma->node.size); in i915_vma_bind()
450 if (GEM_DEBUG_WARN_ON(range_overflows(vma->node.start, in i915_vma_bind()
451 vma->node.size, in i915_vma_bind()
452 vma->vm->total))) { in i915_vma_bind()
465 vma_flags = atomic_read(&vma->flags); in i915_vma_bind()
474 GEM_BUG_ON(!atomic_read(&vma->pages_count)); in i915_vma_bind()
477 if (work && bind_flags & vma->vm->bind_async_flags) in i915_vma_bind()
478 ret = i915_vma_resource_bind_dep_await(vma->vm, in i915_vma_bind()
480 vma->node.start, in i915_vma_bind()
481 vma->node.size, in i915_vma_bind()
487 ret = i915_vma_resource_bind_dep_sync(vma->vm, vma->node.start, in i915_vma_bind()
488 vma->node.size, true); in i915_vma_bind()
494 if (vma->resource || !vma_res) { in i915_vma_bind()
499 i915_vma_resource_init_from_vma(vma_res, vma); in i915_vma_bind()
500 vma->resource = vma_res; in i915_vma_bind()
502 trace_i915_vma_bind(vma, bind_flags); in i915_vma_bind()
503 if (work && bind_flags & vma->vm->bind_async_flags) { in i915_vma_bind()
506 work->vma_res = i915_vma_resource_get(vma->resource); in i915_vma_bind()
519 prev = i915_active_set_exclusive(&vma->active, &work->base.dma); in i915_vma_bind()
528 work->obj = i915_gem_object_get(vma->obj); in i915_vma_bind()
530 ret = i915_gem_object_wait_moving_fence(vma->obj, true); in i915_vma_bind()
532 i915_vma_resource_free(vma->resource); in i915_vma_bind()
533 vma->resource = NULL; in i915_vma_bind()
537 vma->ops->bind_vma(vma->vm, NULL, vma->resource, cache_level, in i915_vma_bind()
541 atomic_or(bind_flags, &vma->flags); in i915_vma_bind()
545 void __iomem *i915_vma_pin_iomap(struct i915_vma *vma) in i915_vma_pin_iomap() argument
550 if (WARN_ON_ONCE(vma->obj->flags & I915_BO_ALLOC_GPU_ONLY)) in i915_vma_pin_iomap()
553 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_vma_pin_iomap()
554 GEM_BUG_ON(!i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)); in i915_vma_pin_iomap()
555 GEM_BUG_ON(i915_vma_verify_bind_complete(vma)); in i915_vma_pin_iomap()
557 ptr = READ_ONCE(vma->iomap); in i915_vma_pin_iomap()
565 if (i915_gem_object_is_lmem(vma->obj)) { in i915_vma_pin_iomap()
566 ptr = i915_gem_object_lmem_io_map(vma->obj, 0, in i915_vma_pin_iomap()
567 vma->obj->base.size); in i915_vma_pin_iomap()
568 } else if (i915_vma_is_map_and_fenceable(vma)) { in i915_vma_pin_iomap()
569 ptr = io_mapping_map_wc(&i915_vm_to_ggtt(vma->vm)->iomap, in i915_vma_pin_iomap()
570 vma->node.start, in i915_vma_pin_iomap()
571 vma->node.size); in i915_vma_pin_iomap()
574 i915_gem_object_pin_map(vma->obj, I915_MAP_WC); in i915_vma_pin_iomap()
587 if (unlikely(cmpxchg(&vma->iomap, NULL, ptr))) { in i915_vma_pin_iomap()
589 __i915_gem_object_release_map(vma->obj); in i915_vma_pin_iomap()
592 ptr = vma->iomap; in i915_vma_pin_iomap()
596 __i915_vma_pin(vma); in i915_vma_pin_iomap()
598 err = i915_vma_pin_fence(vma); in i915_vma_pin_iomap()
602 i915_vma_set_ggtt_write(vma); in i915_vma_pin_iomap()
608 __i915_vma_unpin(vma); in i915_vma_pin_iomap()
613 void i915_vma_flush_writes(struct i915_vma *vma) in i915_vma_flush_writes() argument
615 if (i915_vma_unset_ggtt_write(vma)) in i915_vma_flush_writes()
616 intel_gt_flush_ggtt_writes(vma->vm->gt); in i915_vma_flush_writes()
619 void i915_vma_unpin_iomap(struct i915_vma *vma) in i915_vma_unpin_iomap() argument
621 GEM_BUG_ON(vma->iomap == NULL); in i915_vma_unpin_iomap()
625 i915_vma_flush_writes(vma); in i915_vma_unpin_iomap()
627 i915_vma_unpin_fence(vma); in i915_vma_unpin_iomap()
628 i915_vma_unpin(vma); in i915_vma_unpin_iomap()
633 struct i915_vma *vma; in i915_vma_unpin_and_release() local
636 vma = fetch_and_zero(p_vma); in i915_vma_unpin_and_release()
637 if (!vma) in i915_vma_unpin_and_release()
640 obj = vma->obj; in i915_vma_unpin_and_release()
643 i915_vma_unpin(vma); in i915_vma_unpin_and_release()
651 bool i915_vma_misplaced(const struct i915_vma *vma, in i915_vma_misplaced() argument
654 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_misplaced()
657 if (test_bit(I915_VMA_ERROR_BIT, __i915_vma_flags(vma))) in i915_vma_misplaced()
660 if (vma->node.size < size) in i915_vma_misplaced()
664 if (alignment && !IS_ALIGNED(vma->node.start, alignment)) in i915_vma_misplaced()
667 if (flags & PIN_MAPPABLE && !i915_vma_is_map_and_fenceable(vma)) in i915_vma_misplaced()
671 vma->node.start < (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
675 vma->node.start != (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
681 void __i915_vma_set_map_and_fenceable(struct i915_vma *vma) in __i915_vma_set_map_and_fenceable() argument
685 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in __i915_vma_set_map_and_fenceable()
686 GEM_BUG_ON(!vma->fence_size); in __i915_vma_set_map_and_fenceable()
688 fenceable = (vma->node.size >= vma->fence_size && in __i915_vma_set_map_and_fenceable()
689 IS_ALIGNED(vma->node.start, vma->fence_alignment)); in __i915_vma_set_map_and_fenceable()
691 mappable = vma->node.start + vma->fence_size <= i915_vm_to_ggtt(vma->vm)->mappable_end; in __i915_vma_set_map_and_fenceable()
694 set_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_set_map_and_fenceable()
696 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_set_map_and_fenceable()
699 bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long color) in i915_gem_valid_gtt_space() argument
701 struct drm_mm_node *node = &vma->node; in i915_gem_valid_gtt_space()
711 if (!i915_vm_has_cache_coloring(vma->vm)) in i915_gem_valid_gtt_space()
746 i915_vma_insert(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_vma_insert() argument
753 GEM_BUG_ON(i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND)); in i915_vma_insert()
754 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
756 size = max(size, vma->size); in i915_vma_insert()
757 alignment = max(alignment, vma->display_alignment); in i915_vma_insert()
759 size = max_t(typeof(size), size, vma->fence_size); in i915_vma_insert()
761 alignment, vma->fence_alignment); in i915_vma_insert()
771 end = vma->vm->total; in i915_vma_insert()
773 end = min_t(u64, end, i915_vm_to_ggtt(vma->vm)->mappable_end); in i915_vma_insert()
778 alignment = max(alignment, i915_vm_obj_min_alignment(vma->vm, vma->obj)); in i915_vma_insert()
783 if (NEEDS_COMPACT_PT(vma->vm->i915)) in i915_vma_insert()
799 if (i915_vm_has_cache_coloring(vma->vm)) in i915_vma_insert()
800 color = vma->obj->cache_level; in i915_vma_insert()
808 ret = i915_gem_gtt_reserve(vma->vm, ww, &vma->node, in i915_vma_insert()
823 vma->page_sizes.sg > I915_GTT_PAGE_SIZE) { in i915_vma_insert()
831 rounddown_pow_of_two(vma->page_sizes.sg | in i915_vma_insert()
839 GEM_BUG_ON(i915_vma_is_ggtt(vma)); in i915_vma_insert()
843 if (vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K) in i915_vma_insert()
847 ret = i915_gem_gtt_insert(vma->vm, ww, &vma->node, in i915_vma_insert()
853 GEM_BUG_ON(vma->node.start < start); in i915_vma_insert()
854 GEM_BUG_ON(vma->node.start + vma->node.size > end); in i915_vma_insert()
856 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
857 GEM_BUG_ON(!i915_gem_valid_gtt_space(vma, color)); in i915_vma_insert()
859 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in i915_vma_insert()
865 i915_vma_detach(struct i915_vma *vma) in i915_vma_detach() argument
867 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_detach()
868 GEM_BUG_ON(i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND)); in i915_vma_detach()
875 list_move_tail(&vma->vm_link, &vma->vm->unbound_list); in i915_vma_detach()
878 static bool try_qad_pin(struct i915_vma *vma, unsigned int flags) in try_qad_pin() argument
882 bound = atomic_read(&vma->flags); in try_qad_pin()
900 } while (!atomic_try_cmpxchg(&vma->flags, &bound, bound + 1)); in try_qad_pin()
1238 __i915_vma_get_pages(struct i915_vma *vma) in __i915_vma_get_pages() argument
1248 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj)); in __i915_vma_get_pages()
1250 switch (vma->gtt_view.type) { in __i915_vma_get_pages()
1252 GEM_BUG_ON(vma->gtt_view.type); in __i915_vma_get_pages()
1255 pages = vma->obj->mm.pages; in __i915_vma_get_pages()
1260 intel_rotate_pages(&vma->gtt_view.rotated, vma->obj); in __i915_vma_get_pages()
1265 intel_remap_pages(&vma->gtt_view.remapped, vma->obj); in __i915_vma_get_pages()
1269 pages = intel_partial_pages(&vma->gtt_view, vma->obj); in __i915_vma_get_pages()
1274 drm_err(&vma->vm->i915->drm, in __i915_vma_get_pages()
1276 vma->gtt_view.type, PTR_ERR(pages)); in __i915_vma_get_pages()
1280 vma->pages = pages; in __i915_vma_get_pages()
1285 I915_SELFTEST_EXPORT int i915_vma_get_pages(struct i915_vma *vma) in i915_vma_get_pages() argument
1289 if (atomic_add_unless(&vma->pages_count, 1, 0)) in i915_vma_get_pages()
1292 err = i915_gem_object_pin_pages(vma->obj); in i915_vma_get_pages()
1296 err = __i915_vma_get_pages(vma); in i915_vma_get_pages()
1300 vma->page_sizes = vma->obj->mm.page_sizes; in i915_vma_get_pages()
1301 atomic_inc(&vma->pages_count); in i915_vma_get_pages()
1306 __i915_gem_object_unpin_pages(vma->obj); in i915_vma_get_pages()
1324 static void __vma_put_pages(struct i915_vma *vma, unsigned int count) in __vma_put_pages() argument
1327 GEM_BUG_ON(atomic_read(&vma->pages_count) < count); in __vma_put_pages()
1329 if (atomic_sub_return(count, &vma->pages_count) == 0) { in __vma_put_pages()
1330 if (vma->pages != vma->obj->mm.pages) { in __vma_put_pages()
1331 sg_free_table(vma->pages); in __vma_put_pages()
1332 kfree(vma->pages); in __vma_put_pages()
1334 vma->pages = NULL; in __vma_put_pages()
1336 i915_gem_object_unpin_pages(vma->obj); in __vma_put_pages()
1340 I915_SELFTEST_EXPORT void i915_vma_put_pages(struct i915_vma *vma) in i915_vma_put_pages() argument
1342 if (atomic_add_unless(&vma->pages_count, -1, 1)) in i915_vma_put_pages()
1345 __vma_put_pages(vma, 1); in i915_vma_put_pages()
1348 static void vma_unbind_pages(struct i915_vma *vma) in vma_unbind_pages() argument
1352 lockdep_assert_held(&vma->vm->mutex); in vma_unbind_pages()
1355 count = atomic_read(&vma->pages_count); in vma_unbind_pages()
1359 __vma_put_pages(vma, count | count << I915_VMA_PAGES_BIAS); in vma_unbind_pages()
1362 int i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_vma_pin_ww() argument
1372 assert_vma_held(vma); in i915_vma_pin_ww()
1381 if (try_qad_pin(vma, flags)) in i915_vma_pin_ww()
1384 err = i915_vma_get_pages(vma); in i915_vma_pin_ww()
1389 wakeref = intel_runtime_pm_get(&vma->vm->i915->runtime_pm); in i915_vma_pin_ww()
1391 if (flags & vma->vm->bind_async_flags) { in i915_vma_pin_ww()
1393 err = i915_vm_lock_objects(vma->vm, ww); in i915_vma_pin_ww()
1403 work->vm = vma->vm; in i915_vma_pin_ww()
1405 err = i915_gem_object_get_moving_fence(vma->obj, &moving); in i915_vma_pin_ww()
1412 if (vma->vm->allocate_va_range) { in i915_vma_pin_ww()
1413 err = i915_vm_alloc_pt_stash(vma->vm, in i915_vma_pin_ww()
1415 vma->size); in i915_vma_pin_ww()
1419 err = i915_vm_map_pt_stash(vma->vm, &work->stash); in i915_vma_pin_ww()
1448 err = mutex_lock_interruptible_nested(&vma->vm->mutex, in i915_vma_pin_ww()
1455 if (unlikely(i915_vma_is_closed(vma))) { in i915_vma_pin_ww()
1460 bound = atomic_read(&vma->flags); in i915_vma_pin_ww()
1473 __i915_vma_pin(vma); in i915_vma_pin_ww()
1477 err = i915_active_acquire(&vma->active); in i915_vma_pin_ww()
1482 err = i915_vma_insert(vma, ww, size, alignment, flags); in i915_vma_pin_ww()
1486 if (i915_is_ggtt(vma->vm)) in i915_vma_pin_ww()
1487 __i915_vma_set_map_and_fenceable(vma); in i915_vma_pin_ww()
1490 GEM_BUG_ON(!vma->pages); in i915_vma_pin_ww()
1491 err = i915_vma_bind(vma, in i915_vma_pin_ww()
1492 vma->obj->cache_level, in i915_vma_pin_ww()
1500 atomic_add(I915_VMA_PAGES_ACTIVE, &vma->pages_count); in i915_vma_pin_ww()
1501 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in i915_vma_pin_ww()
1504 __i915_vma_pin(vma); in i915_vma_pin_ww()
1505 GEM_BUG_ON(!i915_vma_is_pinned(vma)); in i915_vma_pin_ww()
1507 GEM_BUG_ON(!i915_vma_is_bound(vma, flags)); in i915_vma_pin_ww()
1508 GEM_BUG_ON(i915_vma_misplaced(vma, size, alignment, flags)); in i915_vma_pin_ww()
1511 if (!i915_vma_is_bound(vma, I915_VMA_BIND_MASK)) { in i915_vma_pin_ww()
1512 i915_vma_detach(vma); in i915_vma_pin_ww()
1513 drm_mm_remove_node(&vma->node); in i915_vma_pin_ww()
1516 i915_active_release(&vma->active); in i915_vma_pin_ww()
1518 mutex_unlock(&vma->vm->mutex); in i915_vma_pin_ww()
1526 intel_runtime_pm_put(&vma->vm->i915->runtime_pm, wakeref); in i915_vma_pin_ww()
1531 i915_vma_put_pages(vma); in i915_vma_pin_ww()
1546 static int __i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in __i915_ggtt_pin() argument
1549 struct i915_address_space *vm = vma->vm; in __i915_ggtt_pin()
1553 err = i915_vma_pin_ww(vma, ww, 0, align, flags | PIN_GLOBAL); in __i915_ggtt_pin()
1557 err = i915_vma_wait_for_bind(vma); in __i915_ggtt_pin()
1559 i915_vma_unpin(vma); in __i915_ggtt_pin()
1578 int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_ggtt_pin() argument
1584 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_ggtt_pin()
1587 return __i915_ggtt_pin(vma, ww, align, flags); in i915_ggtt_pin()
1589 lockdep_assert_not_held(&vma->obj->base.resv->lock.base); in i915_ggtt_pin()
1592 err = i915_gem_object_lock(vma->obj, &_ww); in i915_ggtt_pin()
1594 err = __i915_ggtt_pin(vma, &_ww, align, flags); in i915_ggtt_pin()
1600 static void __vma_close(struct i915_vma *vma, struct intel_gt *gt) in __vma_close() argument
1614 GEM_BUG_ON(i915_vma_is_closed(vma)); in __vma_close()
1615 list_add(&vma->closed_link, &gt->closed_vma); in __vma_close()
1618 void i915_vma_close(struct i915_vma *vma) in i915_vma_close() argument
1620 struct intel_gt *gt = vma->vm->gt; in i915_vma_close()
1623 if (i915_vma_is_ggtt(vma)) in i915_vma_close()
1626 GEM_BUG_ON(!atomic_read(&vma->open_count)); in i915_vma_close()
1627 if (atomic_dec_and_lock_irqsave(&vma->open_count, in i915_vma_close()
1630 __vma_close(vma, gt); in i915_vma_close()
1635 static void __i915_vma_remove_closed(struct i915_vma *vma) in __i915_vma_remove_closed() argument
1637 list_del_init(&vma->closed_link); in __i915_vma_remove_closed()
1640 void i915_vma_reopen(struct i915_vma *vma) in i915_vma_reopen() argument
1642 struct intel_gt *gt = vma->vm->gt; in i915_vma_reopen()
1645 if (i915_vma_is_closed(vma)) in i915_vma_reopen()
1646 __i915_vma_remove_closed(vma); in i915_vma_reopen()
1650 static void force_unbind(struct i915_vma *vma) in force_unbind() argument
1652 if (!drm_mm_node_allocated(&vma->node)) in force_unbind()
1655 atomic_and(~I915_VMA_PIN_MASK, &vma->flags); in force_unbind()
1656 WARN_ON(__i915_vma_unbind(vma)); in force_unbind()
1657 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in force_unbind()
1660 static void release_references(struct i915_vma *vma, struct intel_gt *gt, in release_references() argument
1663 struct drm_i915_gem_object *obj = vma->obj; in release_references()
1665 GEM_BUG_ON(i915_vma_is_active(vma)); in release_references()
1667 spin_lock(&obj->vma.lock); in release_references()
1668 list_del(&vma->obj_link); in release_references()
1669 if (!RB_EMPTY_NODE(&vma->obj_node)) in release_references()
1670 rb_erase(&vma->obj_node, &obj->vma.tree); in release_references()
1672 spin_unlock(&obj->vma.lock); in release_references()
1675 __i915_vma_remove_closed(vma); in release_references()
1679 i915_vm_resv_put(vma->vm); in release_references()
1681 i915_active_fini(&vma->active); in release_references()
1682 GEM_WARN_ON(vma->resource); in release_references()
1683 i915_vma_free(vma); in release_references()
1712 void i915_vma_destroy_locked(struct i915_vma *vma) in i915_vma_destroy_locked() argument
1714 lockdep_assert_held(&vma->vm->mutex); in i915_vma_destroy_locked()
1716 force_unbind(vma); in i915_vma_destroy_locked()
1717 list_del_init(&vma->vm_link); in i915_vma_destroy_locked()
1718 release_references(vma, vma->vm->gt, false); in i915_vma_destroy_locked()
1721 void i915_vma_destroy(struct i915_vma *vma) in i915_vma_destroy() argument
1726 mutex_lock(&vma->vm->mutex); in i915_vma_destroy()
1727 force_unbind(vma); in i915_vma_destroy()
1728 list_del_init(&vma->vm_link); in i915_vma_destroy()
1729 vm_ddestroy = vma->vm_ddestroy; in i915_vma_destroy()
1730 vma->vm_ddestroy = false; in i915_vma_destroy()
1733 gt = vma->vm->gt; in i915_vma_destroy()
1734 mutex_unlock(&vma->vm->mutex); in i915_vma_destroy()
1735 release_references(vma, gt, vm_ddestroy); in i915_vma_destroy()
1740 struct i915_vma *vma, *next; in i915_vma_parked() local
1744 list_for_each_entry_safe(vma, next, &gt->closed_vma, closed_link) { in i915_vma_parked()
1745 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_parked()
1746 struct i915_address_space *vm = vma->vm; in i915_vma_parked()
1758 list_move(&vma->closed_link, &closed); in i915_vma_parked()
1763 list_for_each_entry_safe(vma, next, &closed, closed_link) { in i915_vma_parked()
1764 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_parked()
1765 struct i915_address_space *vm = vma->vm; in i915_vma_parked()
1768 INIT_LIST_HEAD(&vma->closed_link); in i915_vma_parked()
1769 i915_vma_destroy(vma); in i915_vma_parked()
1774 list_add(&vma->closed_link, &gt->closed_vma); in i915_vma_parked()
1783 static void __i915_vma_iounmap(struct i915_vma *vma) in __i915_vma_iounmap() argument
1785 GEM_BUG_ON(i915_vma_is_pinned(vma)); in __i915_vma_iounmap()
1787 if (vma->iomap == NULL) in __i915_vma_iounmap()
1790 if (page_unmask_bits(vma->iomap)) in __i915_vma_iounmap()
1791 __i915_gem_object_release_map(vma->obj); in __i915_vma_iounmap()
1793 io_mapping_unmap(vma->iomap); in __i915_vma_iounmap()
1794 vma->iomap = NULL; in __i915_vma_iounmap()
1797 void i915_vma_revoke_mmap(struct i915_vma *vma) in i915_vma_revoke_mmap() argument
1802 if (!i915_vma_has_userfault(vma)) in i915_vma_revoke_mmap()
1805 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma)); in i915_vma_revoke_mmap()
1806 GEM_BUG_ON(!vma->obj->userfault_count); in i915_vma_revoke_mmap()
1808 node = &vma->mmo->vma_node; in i915_vma_revoke_mmap()
1809 vma_offset = vma->gtt_view.partial.offset << PAGE_SHIFT; in i915_vma_revoke_mmap()
1810 unmap_mapping_range(vma->vm->i915->drm.anon_inode->i_mapping, in i915_vma_revoke_mmap()
1812 vma->size, in i915_vma_revoke_mmap()
1815 i915_vma_unset_userfault(vma); in i915_vma_revoke_mmap()
1816 if (!--vma->obj->userfault_count) in i915_vma_revoke_mmap()
1817 list_del(&vma->obj->userfault_link); in i915_vma_revoke_mmap()
1821 __i915_request_await_bind(struct i915_request *rq, struct i915_vma *vma) in __i915_request_await_bind() argument
1823 return __i915_request_await_exclusive(rq, &vma->active); in __i915_request_await_bind()
1826 static int __i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq) in __i915_vma_move_to_active() argument
1831 err = __i915_request_await_bind(rq, vma); in __i915_vma_move_to_active()
1835 return i915_active_add_request(&vma->active, rq); in __i915_vma_move_to_active()
1838 int _i915_vma_move_to_active(struct i915_vma *vma, in _i915_vma_move_to_active() argument
1843 struct drm_i915_gem_object *obj = vma->obj; in _i915_vma_move_to_active()
1848 GEM_BUG_ON(!vma->pages); in _i915_vma_move_to_active()
1850 err = __i915_vma_move_to_active(vma, rq); in _i915_vma_move_to_active()
1864 err = dma_resv_reserve_fences(vma->obj->base.resv, idx); in _i915_vma_move_to_active()
1895 dma_resv_add_fence(vma->obj->base.resv, curr, usage); in _i915_vma_move_to_active()
1898 if (flags & EXEC_OBJECT_NEEDS_FENCE && vma->fence) in _i915_vma_move_to_active()
1899 i915_active_add_request(&vma->fence->active, rq); in _i915_vma_move_to_active()
1904 GEM_BUG_ON(!i915_vma_is_active(vma)); in _i915_vma_move_to_active()
1908 struct dma_fence *__i915_vma_evict(struct i915_vma *vma, bool async) in __i915_vma_evict() argument
1910 struct i915_vma_resource *vma_res = vma->resource; in __i915_vma_evict()
1913 GEM_BUG_ON(i915_vma_is_pinned(vma)); in __i915_vma_evict()
1914 assert_vma_held_evict(vma); in __i915_vma_evict()
1916 if (i915_vma_is_map_and_fenceable(vma)) { in __i915_vma_evict()
1918 i915_vma_revoke_mmap(vma); in __i915_vma_evict()
1933 i915_vma_flush_writes(vma); in __i915_vma_evict()
1936 i915_vma_revoke_fence(vma); in __i915_vma_evict()
1938 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_evict()
1941 __i915_vma_iounmap(vma); in __i915_vma_evict()
1943 GEM_BUG_ON(vma->fence); in __i915_vma_evict()
1944 GEM_BUG_ON(i915_vma_has_userfault(vma)); in __i915_vma_evict()
1947 GEM_WARN_ON(async && !vma->resource->bi.pages_rsgt); in __i915_vma_evict()
1950 vma_res->needs_wakeref = i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND) && in __i915_vma_evict()
1951 kref_read(&vma->vm->ref); in __i915_vma_evict()
1952 vma_res->skip_pte_rewrite = !kref_read(&vma->vm->ref) || in __i915_vma_evict()
1953 vma->vm->skip_pte_rewrite; in __i915_vma_evict()
1954 trace_i915_vma_unbind(vma); in __i915_vma_evict()
1958 &vma->obj->mm.tlb); in __i915_vma_evict()
1962 vma->resource = NULL; in __i915_vma_evict()
1965 &vma->flags); in __i915_vma_evict()
1967 i915_vma_detach(vma); in __i915_vma_evict()
1975 vma_invalidate_tlb(vma->vm, &vma->obj->mm.tlb); in __i915_vma_evict()
1984 vma_unbind_pages(vma); in __i915_vma_evict()
1988 int __i915_vma_unbind(struct i915_vma *vma) in __i915_vma_unbind() argument
1992 lockdep_assert_held(&vma->vm->mutex); in __i915_vma_unbind()
1993 assert_vma_held_evict(vma); in __i915_vma_unbind()
1995 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind()
1998 if (i915_vma_is_pinned(vma)) { in __i915_vma_unbind()
1999 vma_print_allocator(vma, "is pinned"); in __i915_vma_unbind()
2008 ret = i915_vma_sync(vma); in __i915_vma_unbind()
2012 GEM_BUG_ON(i915_vma_is_active(vma)); in __i915_vma_unbind()
2013 __i915_vma_evict(vma, false); in __i915_vma_unbind()
2015 drm_mm_remove_node(&vma->node); /* pairs with i915_vma_release() */ in __i915_vma_unbind()
2019 static struct dma_fence *__i915_vma_unbind_async(struct i915_vma *vma) in __i915_vma_unbind_async() argument
2023 lockdep_assert_held(&vma->vm->mutex); in __i915_vma_unbind_async()
2025 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind_async()
2028 if (i915_vma_is_pinned(vma) || in __i915_vma_unbind_async()
2029 &vma->obj->mm.rsgt->table != vma->resource->bi.pages) in __i915_vma_unbind_async()
2041 if (i915_sw_fence_await_active(&vma->resource->chain, &vma->active, in __i915_vma_unbind_async()
2047 fence = __i915_vma_evict(vma, true); in __i915_vma_unbind_async()
2049 drm_mm_remove_node(&vma->node); /* pairs with i915_vma_release() */ in __i915_vma_unbind_async()
2054 int i915_vma_unbind(struct i915_vma *vma) in i915_vma_unbind() argument
2056 struct i915_address_space *vm = vma->vm; in i915_vma_unbind()
2060 assert_object_held_shared(vma->obj); in i915_vma_unbind()
2063 err = i915_vma_sync(vma); in i915_vma_unbind()
2067 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_unbind()
2070 if (i915_vma_is_pinned(vma)) { in i915_vma_unbind()
2071 vma_print_allocator(vma, "is pinned"); in i915_vma_unbind()
2075 if (i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)) in i915_vma_unbind()
2079 err = mutex_lock_interruptible_nested(&vma->vm->mutex, !wakeref); in i915_vma_unbind()
2083 err = __i915_vma_unbind(vma); in i915_vma_unbind()
2092 int i915_vma_unbind_async(struct i915_vma *vma, bool trylock_vm) in i915_vma_unbind_async() argument
2094 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_unbind_async()
2095 struct i915_address_space *vm = vma->vm; in i915_vma_unbind_async()
2106 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_unbind_async()
2109 if (i915_vma_is_pinned(vma)) { in i915_vma_unbind_async()
2110 vma_print_allocator(vma, "is pinned"); in i915_vma_unbind_async()
2126 if (i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)) in i915_vma_unbind_async()
2138 fence = __i915_vma_unbind_async(vma); in i915_vma_unbind_async()
2154 int i915_vma_unbind_unlocked(struct i915_vma *vma) in i915_vma_unbind_unlocked() argument
2158 i915_gem_object_lock(vma->obj, NULL); in i915_vma_unbind_unlocked()
2159 err = i915_vma_unbind(vma); in i915_vma_unbind_unlocked()
2160 i915_gem_object_unlock(vma->obj); in i915_vma_unbind_unlocked()
2165 struct i915_vma *i915_vma_make_unshrinkable(struct i915_vma *vma) in i915_vma_make_unshrinkable() argument
2167 i915_gem_object_make_unshrinkable(vma->obj); in i915_vma_make_unshrinkable()
2168 return vma; in i915_vma_make_unshrinkable()
2171 void i915_vma_make_shrinkable(struct i915_vma *vma) in i915_vma_make_shrinkable() argument
2173 i915_gem_object_make_shrinkable(vma->obj); in i915_vma_make_shrinkable()
2176 void i915_vma_make_purgeable(struct i915_vma *vma) in i915_vma_make_purgeable() argument
2178 i915_gem_object_make_purgeable(vma->obj); in i915_vma_make_purgeable()