Lines Matching refs:bo_va

1007 static void amdgpu_vm_bo_get_memory(struct amdgpu_bo_va *bo_va,  in amdgpu_vm_bo_get_memory()  argument
1010 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_get_memory()
1011 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_get_memory()
1032 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_get_memory() local
1035 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) in amdgpu_vm_get_memory()
1036 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1038 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) in amdgpu_vm_get_memory()
1039 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1041 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) in amdgpu_vm_get_memory()
1042 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1044 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) in amdgpu_vm_get_memory()
1045 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1047 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) in amdgpu_vm_get_memory()
1048 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1050 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) in amdgpu_vm_get_memory()
1051 amdgpu_vm_bo_get_memory(bo_va, stats); in amdgpu_vm_get_memory()
1067 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1070 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1071 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1089 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1123 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1125 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1127 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1129 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1130 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1133 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1167 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1169 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1171 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1174 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1175 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1176 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1179 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1383 struct amdgpu_bo_va *bo_va; in amdgpu_vm_handle_moved() local
1390 bo_va = list_first_entry(&vm->moved, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1395 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
1402 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1404 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1414 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
1446 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
1448 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
1449 if (bo_va == NULL) { in amdgpu_vm_bo_add()
1452 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
1454 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
1455 bo_va->last_pt_update = dma_fence_get_stub(); in amdgpu_vm_bo_add()
1456 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
1457 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
1460 return bo_va; in amdgpu_vm_bo_add()
1464 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
1469 return bo_va; in amdgpu_vm_bo_add()
1483 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
1486 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
1487 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
1489 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
1490 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
1497 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
1498 amdgpu_vm_bo_moved(&bo_va->base); in amdgpu_vm_bo_insert_map()
1500 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
1521 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
1526 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
1527 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
1563 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
1587 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
1592 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
1613 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
1627 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
1647 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
1651 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
1656 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
1661 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
1664 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
1669 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
1675 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
1676 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
1682 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
1734 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1735 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1745 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1746 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1765 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
1772 struct amdgpu_bo *bo = before->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings()
1779 !before->bo_va->base.moved) in amdgpu_vm_bo_clear_mappings()
1780 amdgpu_vm_bo_moved(&before->bo_va->base); in amdgpu_vm_bo_clear_mappings()
1787 struct amdgpu_bo *bo = after->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings()
1794 !after->bo_va->base.moved) in amdgpu_vm_bo_clear_mappings()
1795 amdgpu_vm_bo_moved(&after->bo_va->base); in amdgpu_vm_bo_clear_mappings()
1838 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
1841 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
1862 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_del() argument
1865 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_del()
1866 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_del()
1876 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_del()
1878 if (*base != &bo_va->base) in amdgpu_vm_bo_del()
1881 *base = bo_va->base.next; in amdgpu_vm_bo_del()
1887 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_del()
1890 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_del()
1893 mapping->bo_va = NULL; in amdgpu_vm_bo_del()
1894 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_del()
1897 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_del()
1901 bo_va->last_pt_update); in amdgpu_vm_bo_del()
1904 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_del()
1906 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_del()
1909 kfree(bo_va); in amdgpu_vm_bo_del()
2652 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_debugfs_vm_bo_info() local
2669 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2670 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2672 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2678 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2679 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2681 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2687 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2688 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2690 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2696 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2697 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2699 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2705 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2706 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2708 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2714 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2715 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2717 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()