Lines Matching refs:prange
81 static void svm_range_unlink(struct svm_range *prange) in svm_range_unlink() argument
83 pr_debug("svms 0x%p prange 0x%p [0x%lx 0x%lx]\n", prange->svms, in svm_range_unlink()
84 prange, prange->start, prange->last); in svm_range_unlink()
86 if (prange->svm_bo) { in svm_range_unlink()
87 spin_lock(&prange->svm_bo->list_lock); in svm_range_unlink()
88 list_del(&prange->svm_bo_list); in svm_range_unlink()
89 spin_unlock(&prange->svm_bo->list_lock); in svm_range_unlink()
92 list_del(&prange->list); in svm_range_unlink()
93 if (prange->it_node.start != 0 && prange->it_node.last != 0) in svm_range_unlink()
94 interval_tree_remove(&prange->it_node, &prange->svms->objects); in svm_range_unlink()
98 svm_range_add_notifier_locked(struct mm_struct *mm, struct svm_range *prange) in svm_range_add_notifier_locked() argument
100 pr_debug("svms 0x%p prange 0x%p [0x%lx 0x%lx]\n", prange->svms, in svm_range_add_notifier_locked()
101 prange, prange->start, prange->last); in svm_range_add_notifier_locked()
103 mmu_interval_notifier_insert_locked(&prange->notifier, mm, in svm_range_add_notifier_locked()
104 prange->start << PAGE_SHIFT, in svm_range_add_notifier_locked()
105 prange->npages << PAGE_SHIFT, in svm_range_add_notifier_locked()
117 static void svm_range_add_to_svms(struct svm_range *prange) in svm_range_add_to_svms() argument
119 pr_debug("svms 0x%p prange 0x%p [0x%lx 0x%lx]\n", prange->svms, in svm_range_add_to_svms()
120 prange, prange->start, prange->last); in svm_range_add_to_svms()
122 list_move_tail(&prange->list, &prange->svms->list); in svm_range_add_to_svms()
123 prange->it_node.start = prange->start; in svm_range_add_to_svms()
124 prange->it_node.last = prange->last; in svm_range_add_to_svms()
125 interval_tree_insert(&prange->it_node, &prange->svms->objects); in svm_range_add_to_svms()
128 static void svm_range_remove_notifier(struct svm_range *prange) in svm_range_remove_notifier() argument
131 prange->svms, prange, in svm_range_remove_notifier()
132 prange->notifier.interval_tree.start >> PAGE_SHIFT, in svm_range_remove_notifier()
133 prange->notifier.interval_tree.last >> PAGE_SHIFT); in svm_range_remove_notifier()
135 if (prange->notifier.interval_tree.start != 0 && in svm_range_remove_notifier()
136 prange->notifier.interval_tree.last != 0) in svm_range_remove_notifier()
137 mmu_interval_notifier_remove(&prange->notifier); in svm_range_remove_notifier()
148 svm_range_dma_map_dev(struct amdgpu_device *adev, struct svm_range *prange, in svm_range_dma_map_dev() argument
153 dma_addr_t *addr = prange->dma_addr[gpuidx]; in svm_range_dma_map_dev()
159 addr = kvcalloc(prange->npages, sizeof(*addr), GFP_KERNEL); in svm_range_dma_map_dev()
162 prange->dma_addr[gpuidx] = addr; in svm_range_dma_map_dev()
173 amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_dma_map_dev()
195 svm_range_dma_map(struct svm_range *prange, unsigned long *bitmap, in svm_range_dma_map() argument
203 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_dma_map()
215 r = svm_range_dma_map_dev(pdd->dev->adev, prange, offset, npages, in svm_range_dma_map()
242 void svm_range_free_dma_mappings(struct svm_range *prange) in svm_range_free_dma_mappings() argument
250 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_free_dma_mappings()
253 dma_addr = prange->dma_addr[gpuidx]; in svm_range_free_dma_mappings()
263 svm_range_dma_unmap(dev, dma_addr, 0, prange->npages); in svm_range_free_dma_mappings()
265 prange->dma_addr[gpuidx] = NULL; in svm_range_free_dma_mappings()
269 static void svm_range_free(struct svm_range *prange, bool update_mem_usage) in svm_range_free() argument
271 uint64_t size = (prange->last - prange->start + 1) << PAGE_SHIFT; in svm_range_free()
272 struct kfd_process *p = container_of(prange->svms, struct kfd_process, svms); in svm_range_free()
274 pr_debug("svms 0x%p prange 0x%p [0x%lx 0x%lx]\n", prange->svms, prange, in svm_range_free()
275 prange->start, prange->last); in svm_range_free()
277 svm_range_vram_node_free(prange); in svm_range_free()
278 svm_range_free_dma_mappings(prange); in svm_range_free()
281 pr_debug("unreserve prange 0x%p size: 0x%llx\n", prange, size); in svm_range_free()
285 mutex_destroy(&prange->lock); in svm_range_free()
286 mutex_destroy(&prange->migrate_mutex); in svm_range_free()
287 kfree(prange); in svm_range_free()
306 struct svm_range *prange; in svm_range_new() local
309 prange = kzalloc(sizeof(*prange), GFP_KERNEL); in svm_range_new()
310 if (!prange) in svm_range_new()
318 kfree(prange); in svm_range_new()
321 prange->npages = size; in svm_range_new()
322 prange->svms = svms; in svm_range_new()
323 prange->start = start; in svm_range_new()
324 prange->last = last; in svm_range_new()
325 INIT_LIST_HEAD(&prange->list); in svm_range_new()
326 INIT_LIST_HEAD(&prange->update_list); in svm_range_new()
327 INIT_LIST_HEAD(&prange->svm_bo_list); in svm_range_new()
328 INIT_LIST_HEAD(&prange->deferred_list); in svm_range_new()
329 INIT_LIST_HEAD(&prange->child_list); in svm_range_new()
330 atomic_set(&prange->invalid, 0); in svm_range_new()
331 prange->validate_timestamp = 0; in svm_range_new()
332 mutex_init(&prange->migrate_mutex); in svm_range_new()
333 mutex_init(&prange->lock); in svm_range_new()
336 bitmap_copy(prange->bitmap_access, svms->bitmap_supported, in svm_range_new()
339 svm_range_set_default_attributes(&prange->preferred_loc, in svm_range_new()
340 &prange->prefetch_loc, in svm_range_new()
341 &prange->granularity, &prange->flags); in svm_range_new()
345 return prange; in svm_range_new()
365 struct svm_range *prange = in svm_range_bo_release() local
371 list_del_init(&prange->svm_bo_list); in svm_range_bo_release()
374 pr_debug("svms 0x%p [0x%lx 0x%lx]\n", prange->svms, in svm_range_bo_release()
375 prange->start, prange->last); in svm_range_bo_release()
376 mutex_lock(&prange->lock); in svm_range_bo_release()
377 prange->svm_bo = NULL; in svm_range_bo_release()
378 mutex_unlock(&prange->lock); in svm_range_bo_release()
426 svm_range_validate_svm_bo(struct amdgpu_device *adev, struct svm_range *prange) in svm_range_validate_svm_bo() argument
430 mutex_lock(&prange->lock); in svm_range_validate_svm_bo()
431 if (!prange->svm_bo) { in svm_range_validate_svm_bo()
432 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
435 if (prange->ttm_res) { in svm_range_validate_svm_bo()
437 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
440 if (svm_bo_ref_unless_zero(prange->svm_bo)) { in svm_range_validate_svm_bo()
446 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_validate_svm_bo()
448 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
450 spin_lock(&prange->svm_bo->list_lock); in svm_range_validate_svm_bo()
451 list_del_init(&prange->svm_bo_list); in svm_range_validate_svm_bo()
452 spin_unlock(&prange->svm_bo->list_lock); in svm_range_validate_svm_bo()
454 svm_range_bo_unref(prange->svm_bo); in svm_range_validate_svm_bo()
457 if (READ_ONCE(prange->svm_bo->evicting)) { in svm_range_validate_svm_bo()
463 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
464 svm_bo = prange->svm_bo; in svm_range_validate_svm_bo()
466 svm_range_bo_unref(prange->svm_bo); in svm_range_validate_svm_bo()
476 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
478 prange->svms, prange->start, prange->last); in svm_range_validate_svm_bo()
480 prange->ttm_res = prange->svm_bo->bo->tbo.resource; in svm_range_validate_svm_bo()
485 mutex_unlock(&prange->lock); in svm_range_validate_svm_bo()
493 while (!list_empty_careful(&prange->svm_bo_list)) in svm_range_validate_svm_bo()
515 svm_range_vram_node_new(struct amdgpu_device *adev, struct svm_range *prange, in svm_range_vram_node_new() argument
526 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_vram_node_new()
527 pr_debug("pasid: %x svms 0x%p [0x%lx 0x%lx]\n", p->pasid, prange->svms, in svm_range_vram_node_new()
528 prange->start, prange->last); in svm_range_vram_node_new()
530 if (svm_range_validate_svm_bo(adev, prange)) in svm_range_vram_node_new()
552 bp.size = prange->npages * PAGE_SIZE; in svm_range_vram_node_new()
593 prange->svm_bo = svm_bo; in svm_range_vram_node_new()
594 prange->ttm_res = bo->tbo.resource; in svm_range_vram_node_new()
595 prange->offset = 0; in svm_range_vram_node_new()
598 list_add(&prange->svm_bo_list, &svm_bo->range_list); in svm_range_vram_node_new()
608 prange->ttm_res = NULL; in svm_range_vram_node_new()
613 void svm_range_vram_node_free(struct svm_range *prange) in svm_range_vram_node_free() argument
615 svm_range_bo_unref(prange->svm_bo); in svm_range_vram_node_free()
616 prange->ttm_res = NULL; in svm_range_vram_node_free()
620 svm_range_get_adev_by_id(struct svm_range *prange, uint32_t gpu_id) in svm_range_get_adev_by_id() argument
626 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_get_adev_by_id()
643 svm_range_get_pdd_by_adev(struct svm_range *prange, struct amdgpu_device *adev) in svm_range_get_pdd_by_adev() argument
649 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_get_pdd_by_adev()
719 svm_range_apply_attrs(struct kfd_process *p, struct svm_range *prange, in svm_range_apply_attrs() argument
729 prange->preferred_loc = attrs[i].value; in svm_range_apply_attrs()
732 prange->prefetch_loc = attrs[i].value; in svm_range_apply_attrs()
741 bitmap_clear(prange->bitmap_access, gpuidx, 1); in svm_range_apply_attrs()
742 bitmap_clear(prange->bitmap_aip, gpuidx, 1); in svm_range_apply_attrs()
744 bitmap_set(prange->bitmap_access, gpuidx, 1); in svm_range_apply_attrs()
745 bitmap_clear(prange->bitmap_aip, gpuidx, 1); in svm_range_apply_attrs()
747 bitmap_clear(prange->bitmap_access, gpuidx, 1); in svm_range_apply_attrs()
748 bitmap_set(prange->bitmap_aip, gpuidx, 1); in svm_range_apply_attrs()
753 prange->flags |= attrs[i].value; in svm_range_apply_attrs()
757 prange->flags &= ~attrs[i].value; in svm_range_apply_attrs()
760 prange->granularity = attrs[i].value; in svm_range_apply_attrs()
769 svm_range_is_same_attrs(struct kfd_process *p, struct svm_range *prange, in svm_range_is_same_attrs() argument
778 if (prange->preferred_loc != attrs[i].value) in svm_range_is_same_attrs()
792 if (test_bit(gpuidx, prange->bitmap_access) || in svm_range_is_same_attrs()
793 test_bit(gpuidx, prange->bitmap_aip)) in svm_range_is_same_attrs()
796 if (!test_bit(gpuidx, prange->bitmap_access)) in svm_range_is_same_attrs()
799 if (!test_bit(gpuidx, prange->bitmap_aip)) in svm_range_is_same_attrs()
804 if ((prange->flags & attrs[i].value) != attrs[i].value) in svm_range_is_same_attrs()
808 if ((prange->flags & attrs[i].value) != 0) in svm_range_is_same_attrs()
812 if (prange->granularity != attrs[i].value) in svm_range_is_same_attrs()
835 struct svm_range *prange; in svm_range_debug_dump() local
840 list_for_each_entry(prange, &svms->list, list) { in svm_range_debug_dump()
842 prange, prange->start, prange->npages, in svm_range_debug_dump()
843 prange->start + prange->npages - 1, in svm_range_debug_dump()
844 prange->actual_loc); in svm_range_debug_dump()
851 prange = container_of(node, struct svm_range, it_node); in svm_range_debug_dump()
853 prange, prange->start, prange->npages, in svm_range_debug_dump()
854 prange->start + prange->npages - 1, in svm_range_debug_dump()
855 prange->actual_loc); in svm_range_debug_dump()
1017 svm_range_split(struct svm_range *prange, uint64_t start, uint64_t last, in svm_range_split() argument
1020 uint64_t old_start = prange->start; in svm_range_split()
1021 uint64_t old_last = prange->last; in svm_range_split()
1025 pr_debug("svms 0x%p [0x%llx 0x%llx] to [0x%llx 0x%llx]\n", prange->svms, in svm_range_split()
1033 svms = prange->svms; in svm_range_split()
1041 r = svm_range_split_adjust(*new, prange, start, last); in svm_range_split()
1053 svm_range_split_tail(struct svm_range *prange, in svm_range_split_tail() argument
1057 int r = svm_range_split(prange, prange->start, new_last, &tail); in svm_range_split_tail()
1065 svm_range_split_head(struct svm_range *prange, in svm_range_split_head() argument
1069 int r = svm_range_split(prange, new_start, prange->last, &head); in svm_range_split_head()
1077 svm_range_add_child(struct svm_range *prange, struct mm_struct *mm, in svm_range_add_child() argument
1081 pchild, pchild->start, pchild->last, prange, op); in svm_range_add_child()
1085 list_add_tail(&pchild->child_list, &prange->child_list); in svm_range_add_child()
1108 struct svm_range *prange) in svm_range_split_by_granularity() argument
1118 size = 1UL << prange->granularity; in svm_range_split_by_granularity()
1123 prange->svms, prange->start, prange->last, start, last, size); in svm_range_split_by_granularity()
1125 if (start > prange->start) { in svm_range_split_by_granularity()
1126 r = svm_range_split(prange, start, prange->last, &head); in svm_range_split_by_granularity()
1132 if (last < prange->last) { in svm_range_split_by_granularity()
1133 r = svm_range_split(prange, prange->start, last, &tail); in svm_range_split_by_granularity()
1140 if (p->xnack_enabled && prange->work_item.op == SVM_OP_ADD_RANGE) { in svm_range_split_by_granularity()
1141 prange->work_item.op = SVM_OP_ADD_RANGE_AND_MAP; in svm_range_split_by_granularity()
1143 prange, prange->start, prange->last, in svm_range_split_by_granularity()
1150 svm_range_get_pte_flags(struct amdgpu_device *adev, struct svm_range *prange, in svm_range_get_pte_flags() argument
1154 uint32_t flags = prange->flags; in svm_range_get_pte_flags()
1161 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_get_pte_flags()
1233 svm_range_unmap_from_gpus(struct svm_range *prange, unsigned long start, in svm_range_unmap_from_gpus() argument
1243 if (!prange->mapped_to_gpu) { in svm_range_unmap_from_gpus()
1245 prange, prange->start, prange->last); in svm_range_unmap_from_gpus()
1249 if (prange->start == start && prange->last == last) { in svm_range_unmap_from_gpus()
1250 pr_debug("unmap svms 0x%p prange 0x%p\n", prange->svms, prange); in svm_range_unmap_from_gpus()
1251 prange->mapped_to_gpu = false; in svm_range_unmap_from_gpus()
1254 bitmap_or(bitmap, prange->bitmap_access, prange->bitmap_aip, in svm_range_unmap_from_gpus()
1256 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_unmap_from_gpus()
1289 svm_range_map_to_gpu(struct kfd_process_device *pdd, struct svm_range *prange, in svm_range_map_to_gpu() argument
1302 last_start = prange->start + offset; in svm_range_map_to_gpu()
1304 pr_debug("svms 0x%p [0x%lx 0x%lx] readonly %d\n", prange->svms, in svm_range_map_to_gpu()
1319 last_start, prange->start + i, last_domain ? "GPU" : "CPU"); in svm_range_map_to_gpu()
1321 pte_flags = svm_range_get_pte_flags(adev, prange, last_domain); in svm_range_map_to_gpu()
1326 prange->svms, last_start, prange->start + i, in svm_range_map_to_gpu()
1331 last_start, prange->start + i, in svm_range_map_to_gpu()
1333 (last_start - prange->start) << PAGE_SHIFT, in svm_range_map_to_gpu()
1337 for (j = last_start - prange->start; j <= i; j++) in svm_range_map_to_gpu()
1341 pr_debug("failed %d to map to gpu 0x%lx\n", r, prange->start); in svm_range_map_to_gpu()
1344 last_start = prange->start + i + 1; in svm_range_map_to_gpu()
1350 prange->start); in svm_range_map_to_gpu()
1362 svm_range_map_to_gpus(struct svm_range *prange, unsigned long offset, in svm_range_map_to_gpus() argument
1373 if (prange->svm_bo && prange->ttm_res) in svm_range_map_to_gpus()
1374 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_map_to_gpus()
1378 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_map_to_gpus()
1397 r = svm_range_map_to_gpu(pdd, prange, offset, npages, readonly, in svm_range_map_to_gpus()
1398 prange->dma_addr[gpuidx], in svm_range_map_to_gpus()
1422 struct svm_range *prange; member
1521 struct svm_range *prange, int32_t gpuidx, in svm_range_validate_and_map() argument
1531 ctx.process = container_of(prange->svms, struct kfd_process, svms); in svm_range_validate_and_map()
1532 ctx.prange = prange; in svm_range_validate_and_map()
1539 bitmap_copy(ctx.bitmap, prange->bitmap_aip, MAX_GPU_INSTANCE); in svm_range_validate_and_map()
1545 if (prange->actual_loc) { in svm_range_validate_and_map()
1547 prange->actual_loc); in svm_range_validate_and_map()
1550 prange->actual_loc); in svm_range_validate_and_map()
1553 if (test_bit(gpuidx, prange->bitmap_access)) in svm_range_validate_and_map()
1557 bitmap_or(ctx.bitmap, prange->bitmap_access, in svm_range_validate_and_map()
1558 prange->bitmap_aip, MAX_GPU_INSTANCE); in svm_range_validate_and_map()
1562 if (!prange->mapped_to_gpu) in svm_range_validate_and_map()
1565 bitmap_copy(ctx.bitmap, prange->bitmap_access, MAX_GPU_INSTANCE); in svm_range_validate_and_map()
1568 if (prange->actual_loc && !prange->ttm_res) { in svm_range_validate_and_map()
1578 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_validate_and_map()
1588 start = prange->start << PAGE_SHIFT; in svm_range_validate_and_map()
1589 end = (prange->last + 1) << PAGE_SHIFT; in svm_range_validate_and_map()
1608 r = amdgpu_hmm_range_get_pages(&prange->notifier, mm, NULL, in svm_range_validate_and_map()
1618 r = svm_range_dma_map(prange, ctx.bitmap, offset, npages, in svm_range_validate_and_map()
1625 svm_range_lock(prange); in svm_range_validate_and_map()
1631 if (!list_empty(&prange->child_list)) { in svm_range_validate_and_map()
1637 r = svm_range_map_to_gpus(prange, offset, npages, readonly, in svm_range_validate_and_map()
1641 svm_range_unlock(prange); in svm_range_validate_and_map()
1647 prange->validated_once = true; in svm_range_validate_and_map()
1648 prange->mapped_to_gpu = true; in svm_range_validate_and_map()
1655 prange->validate_timestamp = ktime_get_boottime(); in svm_range_validate_and_map()
1689 struct svm_range *prange; in svm_range_restore_work() local
1719 list_for_each_entry(prange, &svms->list, list) { in svm_range_restore_work()
1720 invalid = atomic_read(&prange->invalid); in svm_range_restore_work()
1725 prange->svms, prange, prange->start, prange->last, in svm_range_restore_work()
1731 mutex_lock(&prange->migrate_mutex); in svm_range_restore_work()
1733 r = svm_range_validate_and_map(mm, prange, MAX_GPU_INSTANCE, in svm_range_restore_work()
1737 prange->start); in svm_range_restore_work()
1739 mutex_unlock(&prange->migrate_mutex); in svm_range_restore_work()
1743 if (atomic_cmpxchg(&prange->invalid, invalid, 0) != invalid) in svm_range_restore_work()
1795 svm_range_evict(struct svm_range *prange, struct mm_struct *mm, in svm_range_evict() argument
1799 struct svm_range_list *svms = prange->svms; in svm_range_evict()
1807 svms, prange->start, prange->last, start, last); in svm_range_evict()
1810 (prange->flags & KFD_IOCTL_SVM_FLAG_GPU_ALWAYS_MAPPED)) { in svm_range_evict()
1812 bool mapped = prange->mapped_to_gpu; in svm_range_evict()
1814 list_for_each_entry(pchild, &prange->child_list, child_list) { in svm_range_evict()
1830 if (prange->start <= last && prange->last >= start) in svm_range_evict()
1831 atomic_inc(&prange->invalid); in svm_range_evict()
1838 prange->svms, prange->start, prange->last); in svm_range_evict()
1858 prange->svms, start, last); in svm_range_evict()
1859 list_for_each_entry(pchild, &prange->child_list, child_list) { in svm_range_evict()
1867 s = max(start, prange->start); in svm_range_evict()
1868 l = min(last, prange->last); in svm_range_evict()
1870 svm_range_unmap_from_gpus(prange, s, l, trigger); in svm_range_evict()
1924 struct svm_range *prange; in svm_range_split_new() local
1933 prange = svm_range_new(svms, start, l, true); in svm_range_split_new()
1934 if (!prange) in svm_range_split_new()
1936 list_add(&prange->list, insert_list); in svm_range_split_new()
1937 list_add(&prange->update_list, update_list); in svm_range_split_new()
1982 struct svm_range *prange; in svm_range_add() local
2002 prange = container_of(node, struct svm_range, it_node); in svm_range_add()
2006 if (svm_range_is_same_attrs(p, prange, nattr, attrs)) { in svm_range_add()
2013 struct svm_range *old = prange; in svm_range_add()
2015 prange = svm_range_clone(old); in svm_range_add()
2016 if (!prange) { in svm_range_add()
2022 list_add(&prange->list, insert_list); in svm_range_add()
2023 list_add(&prange->update_list, update_list); in svm_range_add()
2027 r = svm_range_split_head(prange, start, in svm_range_add()
2034 r = svm_range_split_tail(prange, last, in svm_range_add()
2043 list_add(&prange->update_list, update_list); in svm_range_add()
2067 list_for_each_entry_safe(prange, tmp, insert_list, list) in svm_range_add()
2068 svm_range_free(prange, false); in svm_range_add()
2069 list_for_each_entry_safe(prange, tmp, &new_list, list) in svm_range_add()
2070 svm_range_free(prange, true); in svm_range_add()
2080 struct svm_range *prange) in svm_range_update_notifier_and_interval_tree() argument
2085 start = prange->notifier.interval_tree.start >> PAGE_SHIFT; in svm_range_update_notifier_and_interval_tree()
2086 last = prange->notifier.interval_tree.last >> PAGE_SHIFT; in svm_range_update_notifier_and_interval_tree()
2088 if (prange->start == start && prange->last == last) in svm_range_update_notifier_and_interval_tree()
2092 prange->svms, prange, start, last, prange->start, in svm_range_update_notifier_and_interval_tree()
2093 prange->last); in svm_range_update_notifier_and_interval_tree()
2096 interval_tree_remove(&prange->it_node, &prange->svms->objects); in svm_range_update_notifier_and_interval_tree()
2097 svm_range_remove_notifier(prange); in svm_range_update_notifier_and_interval_tree()
2099 prange->it_node.start = prange->start; in svm_range_update_notifier_and_interval_tree()
2100 prange->it_node.last = prange->last; in svm_range_update_notifier_and_interval_tree()
2102 interval_tree_insert(&prange->it_node, &prange->svms->objects); in svm_range_update_notifier_and_interval_tree()
2103 svm_range_add_notifier_locked(mm, prange); in svm_range_update_notifier_and_interval_tree()
2107 svm_range_handle_list_op(struct svm_range_list *svms, struct svm_range *prange, in svm_range_handle_list_op() argument
2110 switch (prange->work_item.op) { in svm_range_handle_list_op()
2113 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
2117 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
2118 svm_range_unlink(prange); in svm_range_handle_list_op()
2119 svm_range_remove_notifier(prange); in svm_range_handle_list_op()
2120 svm_range_free(prange, true); in svm_range_handle_list_op()
2124 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
2125 svm_range_update_notifier_and_interval_tree(mm, prange); in svm_range_handle_list_op()
2129 svms, prange, prange->start, prange->last); in svm_range_handle_list_op()
2130 svm_range_update_notifier_and_interval_tree(mm, prange); in svm_range_handle_list_op()
2134 pr_debug("add 0x%p prange 0x%p [0x%lx 0x%lx]\n", svms, prange, in svm_range_handle_list_op()
2135 prange->start, prange->last); in svm_range_handle_list_op()
2136 svm_range_add_to_svms(prange); in svm_range_handle_list_op()
2137 svm_range_add_notifier_locked(mm, prange); in svm_range_handle_list_op()
2141 prange, prange->start, prange->last); in svm_range_handle_list_op()
2142 svm_range_add_to_svms(prange); in svm_range_handle_list_op()
2143 svm_range_add_notifier_locked(mm, prange); in svm_range_handle_list_op()
2147 WARN_ONCE(1, "Unknown prange 0x%p work op %d\n", prange, in svm_range_handle_list_op()
2148 prange->work_item.op); in svm_range_handle_list_op()
2184 struct svm_range *prange; in svm_range_deferred_list_work() local
2192 prange = list_first_entry(&svms->deferred_range_list, in svm_range_deferred_list_work()
2196 pr_debug("prange 0x%p [0x%lx 0x%lx] op %d\n", prange, in svm_range_deferred_list_work()
2197 prange->start, prange->last, prange->work_item.op); in svm_range_deferred_list_work()
2199 mm = prange->work_item.mm; in svm_range_deferred_list_work()
2221 list_del_init(&prange->deferred_list); in svm_range_deferred_list_work()
2225 mutex_lock(&prange->migrate_mutex); in svm_range_deferred_list_work()
2226 while (!list_empty(&prange->child_list)) { in svm_range_deferred_list_work()
2229 pchild = list_first_entry(&prange->child_list, in svm_range_deferred_list_work()
2236 mutex_unlock(&prange->migrate_mutex); in svm_range_deferred_list_work()
2238 svm_range_handle_list_op(svms, prange, mm); in svm_range_deferred_list_work()
2252 svm_range_add_list_work(struct svm_range_list *svms, struct svm_range *prange, in svm_range_add_list_work() argument
2257 if (!list_empty(&prange->deferred_list)) { in svm_range_add_list_work()
2258 pr_debug("update exist prange 0x%p work op %d\n", prange, op); in svm_range_add_list_work()
2259 WARN_ONCE(prange->work_item.mm != mm, "unmatch mm\n"); in svm_range_add_list_work()
2261 prange->work_item.op != SVM_OP_UNMAP_RANGE) in svm_range_add_list_work()
2262 prange->work_item.op = op; in svm_range_add_list_work()
2264 prange->work_item.op = op; in svm_range_add_list_work()
2268 prange->work_item.mm = mm; in svm_range_add_list_work()
2269 list_add_tail(&prange->deferred_list, in svm_range_add_list_work()
2270 &prange->svms->deferred_range_list); in svm_range_add_list_work()
2272 prange, prange->start, prange->last, op); in svm_range_add_list_work()
2287 struct svm_range *prange, unsigned long start, in svm_range_unmap_split() argument
2293 if (prange->work_item.op == SVM_OP_UNMAP_RANGE) { in svm_range_unmap_split()
2294 pr_debug("prange 0x%p [0x%lx 0x%lx] is already freed\n", prange, in svm_range_unmap_split()
2295 prange->start, prange->last); in svm_range_unmap_split()
2298 if (start > prange->last || last < prange->start) in svm_range_unmap_split()
2301 head = tail = prange; in svm_range_unmap_split()
2302 if (start > prange->start) in svm_range_unmap_split()
2303 svm_range_split(prange, prange->start, start - 1, &tail); in svm_range_unmap_split()
2307 if (head != prange && tail != prange) { in svm_range_unmap_split()
2310 } else if (tail != prange) { in svm_range_unmap_split()
2312 } else if (head != prange) { in svm_range_unmap_split()
2314 } else if (parent != prange) { in svm_range_unmap_split()
2315 prange->work_item.op = SVM_OP_UNMAP_RANGE; in svm_range_unmap_split()
2320 svm_range_unmap_from_cpu(struct mm_struct *mm, struct svm_range *prange, in svm_range_unmap_from_cpu() argument
2336 prange, prange->start, prange->last, start, last); in svm_range_unmap_from_cpu()
2344 unmap_parent = start <= prange->start && last >= prange->last; in svm_range_unmap_from_cpu()
2346 list_for_each_entry(pchild, &prange->child_list, child_list) { in svm_range_unmap_from_cpu()
2352 svm_range_unmap_split(mm, prange, pchild, start, last); in svm_range_unmap_from_cpu()
2355 s = max(start, prange->start); in svm_range_unmap_from_cpu()
2356 l = min(last, prange->last); in svm_range_unmap_from_cpu()
2358 svm_range_unmap_from_gpus(prange, s, l, trigger); in svm_range_unmap_from_cpu()
2359 svm_range_unmap_split(mm, prange, prange, start, last); in svm_range_unmap_from_cpu()
2362 svm_range_add_list_work(svms, prange, mm, SVM_OP_UNMAP_RANGE); in svm_range_unmap_from_cpu()
2364 svm_range_add_list_work(svms, prange, mm, in svm_range_unmap_from_cpu()
2396 struct svm_range *prange; in svm_range_cpu_invalidate_pagetables() local
2415 prange = container_of(mni, struct svm_range, notifier); in svm_range_cpu_invalidate_pagetables()
2417 svm_range_lock(prange); in svm_range_cpu_invalidate_pagetables()
2422 svm_range_unmap_from_cpu(mni->mm, prange, start, last); in svm_range_cpu_invalidate_pagetables()
2425 svm_range_evict(prange, mni->mm, start, last, range->event); in svm_range_cpu_invalidate_pagetables()
2429 svm_range_unlock(prange); in svm_range_cpu_invalidate_pagetables()
2450 struct svm_range *prange; in svm_range_from_addr() local
2457 prange = container_of(node, struct svm_range, it_node); in svm_range_from_addr()
2459 addr, prange->start, prange->last, node->start, node->last); in svm_range_from_addr()
2461 if (addr >= prange->start && addr <= prange->last) { in svm_range_from_addr()
2463 *parent = prange; in svm_range_from_addr()
2464 return prange; in svm_range_from_addr()
2466 list_for_each_entry(pchild, &prange->child_list, child_list) in svm_range_from_addr()
2471 *parent = prange; in svm_range_from_addr()
2500 svm_range_best_restore_location(struct svm_range *prange, in svm_range_best_restore_location() argument
2509 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_best_restore_location()
2517 if (prange->preferred_loc == gpuid || in svm_range_best_restore_location()
2518 prange->preferred_loc == KFD_IOCTL_SVM_LOCATION_SYSMEM) { in svm_range_best_restore_location()
2519 return prange->preferred_loc; in svm_range_best_restore_location()
2520 } else if (prange->preferred_loc != KFD_IOCTL_SVM_LOCATION_UNDEFINED) { in svm_range_best_restore_location()
2521 preferred_adev = svm_range_get_adev_by_id(prange, in svm_range_best_restore_location()
2522 prange->preferred_loc); in svm_range_best_restore_location()
2524 return prange->preferred_loc; in svm_range_best_restore_location()
2528 if (test_bit(*gpuidx, prange->bitmap_access)) in svm_range_best_restore_location()
2531 if (test_bit(*gpuidx, prange->bitmap_aip)) { in svm_range_best_restore_location()
2532 if (!prange->actual_loc) in svm_range_best_restore_location()
2535 bo_adev = svm_range_get_adev_by_id(prange, prange->actual_loc); in svm_range_best_restore_location()
2537 return prange->actual_loc; in svm_range_best_restore_location()
2658 struct svm_range *prange = NULL; in svm_range_create_unregistered_range() local
2683 prange = svm_range_new(&p->svms, start, last, true); in svm_range_create_unregistered_range()
2684 if (!prange) { in svm_range_create_unregistered_range()
2690 svm_range_free(prange, true); in svm_range_create_unregistered_range()
2695 prange->preferred_loc = KFD_IOCTL_SVM_LOCATION_SYSMEM; in svm_range_create_unregistered_range()
2697 svm_range_add_to_svms(prange); in svm_range_create_unregistered_range()
2698 svm_range_add_notifier_locked(mm, prange); in svm_range_create_unregistered_range()
2700 return prange; in svm_range_create_unregistered_range()
2715 static bool svm_range_skip_recover(struct svm_range *prange) in svm_range_skip_recover() argument
2717 struct svm_range_list *svms = prange->svms; in svm_range_skip_recover()
2720 if (list_empty(&prange->deferred_list) && in svm_range_skip_recover()
2721 list_empty(&prange->child_list)) { in svm_range_skip_recover()
2727 if (prange->work_item.op == SVM_OP_UNMAP_RANGE) { in svm_range_skip_recover()
2729 svms, prange, prange->start, prange->last); in svm_range_skip_recover()
2732 if (prange->work_item.op == SVM_OP_ADD_RANGE_AND_MAP || in svm_range_skip_recover()
2733 prange->work_item.op == SVM_OP_ADD_RANGE) { in svm_range_skip_recover()
2735 svms, prange, prange->start, prange->last); in svm_range_skip_recover()
2787 struct svm_range *prange; in svm_range_restore_pages() local
2836 prange = svm_range_from_addr(svms, addr, NULL); in svm_range_restore_pages()
2837 if (!prange) { in svm_range_restore_pages()
2851 prange = svm_range_create_unregistered_range(adev, p, mm, addr); in svm_range_restore_pages()
2852 if (!prange) { in svm_range_restore_pages()
2863 mutex_lock(&prange->migrate_mutex); in svm_range_restore_pages()
2865 if (svm_range_skip_recover(prange)) { in svm_range_restore_pages()
2872 if (ktime_before(timestamp, ktime_add_ns(prange->validate_timestamp, in svm_range_restore_pages()
2875 svms, prange->start, prange->last); in svm_range_restore_pages()
2897 best_loc = svm_range_best_restore_location(prange, adev, &gpuidx); in svm_range_restore_pages()
2900 svms, prange->start, prange->last); in svm_range_restore_pages()
2906 svms, prange->start, prange->last, best_loc, in svm_range_restore_pages()
2907 prange->actual_loc); in svm_range_restore_pages()
2912 if (prange->actual_loc != best_loc) { in svm_range_restore_pages()
2915 r = svm_migrate_to_vram(prange, best_loc, mm, in svm_range_restore_pages()
2923 if (prange->actual_loc) in svm_range_restore_pages()
2924 r = svm_migrate_vram_to_ram(prange, mm, in svm_range_restore_pages()
2931 r = svm_migrate_vram_to_ram(prange, mm, in svm_range_restore_pages()
2937 r, svms, prange->start, prange->last); in svm_range_restore_pages()
2942 r = svm_range_validate_and_map(mm, prange, gpuidx, false, false, false); in svm_range_restore_pages()
2945 r, svms, prange->start, prange->last); in svm_range_restore_pages()
2951 mutex_unlock(&prange->migrate_mutex); in svm_range_restore_pages()
2973 struct svm_range *prange, *pchild; in svm_range_switch_xnack_reserve_mem() local
2982 list_for_each_entry(prange, &p->svms.list, list) { in svm_range_switch_xnack_reserve_mem()
2983 svm_range_lock(prange); in svm_range_switch_xnack_reserve_mem()
2984 list_for_each_entry(pchild, &prange->child_list, child_list) { in svm_range_switch_xnack_reserve_mem()
2998 size = (prange->last - prange->start + 1) << PAGE_SHIFT; in svm_range_switch_xnack_reserve_mem()
3010 svm_range_unlock(prange); in svm_range_switch_xnack_reserve_mem()
3030 struct svm_range *prange; in svm_range_list_fini() local
3047 list_for_each_entry_safe(prange, next, &p->svms.list, list) { in svm_range_list_fini()
3048 svm_range_unlink(prange); in svm_range_list_fini()
3049 svm_range_remove_notifier(prange); in svm_range_list_fini()
3050 svm_range_free(prange, true); in svm_range_list_fini()
3202 svm_range_best_prefetch_location(struct svm_range *prange) in svm_range_best_prefetch_location() argument
3205 uint32_t best_loc = prange->prefetch_loc; in svm_range_best_prefetch_location()
3211 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_best_prefetch_location()
3216 bo_adev = svm_range_get_adev_by_id(prange, best_loc); in svm_range_best_prefetch_location()
3224 bitmap_copy(bitmap, prange->bitmap_aip, MAX_GPU_INSTANCE); in svm_range_best_prefetch_location()
3226 bitmap_or(bitmap, prange->bitmap_access, prange->bitmap_aip, in svm_range_best_prefetch_location()
3247 p->xnack_enabled, &p->svms, prange->start, prange->last, in svm_range_best_prefetch_location()
3278 svm_range_trigger_migration(struct mm_struct *mm, struct svm_range *prange, in svm_range_trigger_migration() argument
3285 best_loc = svm_range_best_prefetch_location(prange); in svm_range_trigger_migration()
3288 best_loc == prange->actual_loc) in svm_range_trigger_migration()
3292 r = svm_migrate_vram_to_ram(prange, mm, in svm_range_trigger_migration()
3298 r = svm_migrate_to_vram(prange, best_loc, mm, KFD_MIGRATE_TRIGGER_PREFETCH); in svm_range_trigger_migration()
3340 struct svm_range *prange = in svm_range_evict_svm_bo_worker() local
3345 list_del_init(&prange->svm_bo_list); in svm_range_evict_svm_bo_worker()
3348 pr_debug("svms 0x%p [0x%lx 0x%lx]\n", prange->svms, in svm_range_evict_svm_bo_worker()
3349 prange->start, prange->last); in svm_range_evict_svm_bo_worker()
3351 mutex_lock(&prange->migrate_mutex); in svm_range_evict_svm_bo_worker()
3353 r = svm_migrate_vram_to_ram(prange, mm, in svm_range_evict_svm_bo_worker()
3355 } while (!r && prange->actual_loc && --retries); in svm_range_evict_svm_bo_worker()
3357 if (!r && prange->actual_loc) in svm_range_evict_svm_bo_worker()
3360 if (!prange->actual_loc) { in svm_range_evict_svm_bo_worker()
3361 mutex_lock(&prange->lock); in svm_range_evict_svm_bo_worker()
3362 prange->svm_bo = NULL; in svm_range_evict_svm_bo_worker()
3363 mutex_unlock(&prange->lock); in svm_range_evict_svm_bo_worker()
3365 mutex_unlock(&prange->migrate_mutex); in svm_range_evict_svm_bo_worker()
3392 struct svm_range *prange; in svm_range_set_attr() local
3429 list_for_each_entry_safe(prange, next, &insert_list, list) { in svm_range_set_attr()
3430 svm_range_add_to_svms(prange); in svm_range_set_attr()
3431 svm_range_add_notifier_locked(mm, prange); in svm_range_set_attr()
3433 list_for_each_entry(prange, &update_list, update_list) { in svm_range_set_attr()
3434 svm_range_apply_attrs(p, prange, nattr, attrs, &update_mapping); in svm_range_set_attr()
3437 list_for_each_entry_safe(prange, next, &remove_list, update_list) { in svm_range_set_attr()
3439 prange->svms, prange, prange->start, in svm_range_set_attr()
3440 prange->last); in svm_range_set_attr()
3441 svm_range_unlink(prange); in svm_range_set_attr()
3442 svm_range_remove_notifier(prange); in svm_range_set_attr()
3443 svm_range_free(prange, false); in svm_range_set_attr()
3452 list_for_each_entry(prange, &update_list, update_list) { in svm_range_set_attr()
3455 mutex_lock(&prange->migrate_mutex); in svm_range_set_attr()
3457 r = svm_range_trigger_migration(mm, prange, &migrated); in svm_range_set_attr()
3462 (prange->flags & KFD_IOCTL_SVM_FLAG_GPU_ALWAYS_MAPPED)) && in svm_range_set_attr()
3463 prange->mapped_to_gpu) { in svm_range_set_attr()
3465 mutex_unlock(&prange->migrate_mutex); in svm_range_set_attr()
3470 mutex_unlock(&prange->migrate_mutex); in svm_range_set_attr()
3474 flush_tlb = !migrated && update_mapping && prange->mapped_to_gpu; in svm_range_set_attr()
3476 r = svm_range_validate_and_map(mm, prange, MAX_GPU_INSTANCE, in svm_range_set_attr()
3482 mutex_unlock(&prange->migrate_mutex); in svm_range_set_attr()
3516 struct svm_range *prange; in svm_range_get_attr() local
3595 prange = container_of(node, struct svm_range, it_node); in svm_range_get_attr()
3599 if (prange->preferred_loc == in svm_range_get_attr()
3602 location != prange->preferred_loc)) { in svm_range_get_attr()
3606 location = prange->preferred_loc; in svm_range_get_attr()
3610 if (prange->prefetch_loc == in svm_range_get_attr()
3613 prefetch_loc != prange->prefetch_loc)) { in svm_range_get_attr()
3617 prefetch_loc = prange->prefetch_loc; in svm_range_get_attr()
3622 prange->bitmap_access, MAX_GPU_INSTANCE); in svm_range_get_attr()
3624 prange->bitmap_aip, MAX_GPU_INSTANCE); in svm_range_get_attr()
3627 flags_and &= prange->flags; in svm_range_get_attr()
3628 flags_or |= prange->flags; in svm_range_get_attr()
3631 if (get_granularity && prange->granularity < granularity) in svm_range_get_attr()
3632 granularity = prange->granularity; in svm_range_get_attr()
3840 struct svm_range *prange; in svm_range_get_info() local
3850 list_for_each_entry(prange, &svms->list, list) { in svm_range_get_info()
3852 prange, prange->start, prange->npages, in svm_range_get_info()
3853 prange->start + prange->npages - 1); in svm_range_get_info()
3902 struct svm_range *prange; in kfd_criu_checkpoint_svm() local
3946 list_for_each_entry(prange, &svms->list, list) { in kfd_criu_checkpoint_svm()
3949 svm_priv->start_addr = prange->start; in kfd_criu_checkpoint_svm()
3950 svm_priv->size = prange->npages; in kfd_criu_checkpoint_svm()
3953 prange, prange->start, prange->npages, in kfd_criu_checkpoint_svm()
3954 prange->start + prange->npages - 1, in kfd_criu_checkpoint_svm()
3955 prange->npages * PAGE_SIZE); in kfd_criu_checkpoint_svm()