Lines Matching refs:mgr

48 to_amdgpu_device(struct amdgpu_vram_mgr *mgr)  in to_amdgpu_device()  argument
50 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device()
271 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_do_reserve() local
272 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_do_reserve()
273 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_do_reserve()
278 list_for_each_entry_safe(rsv, temp, &mgr->reservations_pending, blocks) { in amdgpu_vram_mgr_do_reserve()
292 atomic64_add(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_do_reserve()
296 list_move(&rsv->blocks, &mgr->reserved_pages); in amdgpu_vram_mgr_do_reserve()
309 int amdgpu_vram_mgr_reserve_range(struct amdgpu_vram_mgr *mgr, in amdgpu_vram_mgr_reserve_range() argument
324 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_reserve_range()
325 list_add_tail(&rsv->blocks, &mgr->reservations_pending); in amdgpu_vram_mgr_reserve_range()
326 amdgpu_vram_mgr_do_reserve(&mgr->manager); in amdgpu_vram_mgr_reserve_range()
327 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_reserve_range()
343 int amdgpu_vram_mgr_query_page_status(struct amdgpu_vram_mgr *mgr, in amdgpu_vram_mgr_query_page_status() argument
349 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_query_page_status()
351 list_for_each_entry(rsv, &mgr->reservations_pending, blocks) { in amdgpu_vram_mgr_query_page_status()
359 list_for_each_entry(rsv, &mgr->reserved_pages, blocks) { in amdgpu_vram_mgr_query_page_status()
369 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_query_page_status()
389 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_new() local
390 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_new()
393 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_new()
438 if (fpfn || lpfn != mgr->mm.size) in amdgpu_vram_mgr_new()
444 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_new()
449 min_block_size = mgr->default_page_size; in amdgpu_vram_mgr_new()
492 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_new()
518 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_new()
522 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_new()
553 atomic64_add(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_new()
559 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_new()
579 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_del() local
580 struct amdgpu_device *adev = to_amdgpu_device(mgr); in amdgpu_vram_mgr_del()
581 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_del()
585 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_del()
592 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_del()
594 atomic64_sub(vis_usage, &mgr->vis_usage); in amdgpu_vram_mgr_del()
718 uint64_t amdgpu_vram_mgr_vis_usage(struct amdgpu_vram_mgr *mgr) in amdgpu_vram_mgr_vis_usage() argument
720 return atomic64_read(&mgr->vis_usage); in amdgpu_vram_mgr_vis_usage()
738 struct amdgpu_vram_mgr_resource *mgr = to_amdgpu_vram_mgr_resource(res); in amdgpu_vram_mgr_intersects() local
742 list_for_each_entry(block, &mgr->blocks, link) { in amdgpu_vram_mgr_intersects()
771 struct amdgpu_vram_mgr_resource *mgr = to_amdgpu_vram_mgr_resource(res); in amdgpu_vram_mgr_compatible() local
775 list_for_each_entry(block, &mgr->blocks, link) { in amdgpu_vram_mgr_compatible()
800 struct amdgpu_vram_mgr *mgr = to_vram_mgr(man); in amdgpu_vram_mgr_debug() local
801 struct drm_buddy *mm = &mgr->mm; in amdgpu_vram_mgr_debug()
805 amdgpu_vram_mgr_vis_usage(mgr)); in amdgpu_vram_mgr_debug()
807 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_debug()
809 mgr->default_page_size >> 10); in amdgpu_vram_mgr_debug()
814 list_for_each_entry(block, &mgr->reserved_pages, link) in amdgpu_vram_mgr_debug()
816 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_debug()
836 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() local
837 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_vram_mgr_init()
845 err = drm_buddy_init(&mgr->mm, man->size, PAGE_SIZE); in amdgpu_vram_mgr_init()
849 mutex_init(&mgr->lock); in amdgpu_vram_mgr_init()
850 INIT_LIST_HEAD(&mgr->reservations_pending); in amdgpu_vram_mgr_init()
851 INIT_LIST_HEAD(&mgr->reserved_pages); in amdgpu_vram_mgr_init()
852 mgr->default_page_size = PAGE_SIZE; in amdgpu_vram_mgr_init()
854 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init()
869 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() local
870 struct ttm_resource_manager *man = &mgr->manager; in amdgpu_vram_mgr_fini()
880 mutex_lock(&mgr->lock); in amdgpu_vram_mgr_fini()
881 list_for_each_entry_safe(rsv, temp, &mgr->reservations_pending, blocks) in amdgpu_vram_mgr_fini()
884 list_for_each_entry_safe(rsv, temp, &mgr->reserved_pages, blocks) { in amdgpu_vram_mgr_fini()
885 drm_buddy_free_list(&mgr->mm, &rsv->allocated); in amdgpu_vram_mgr_fini()
888 drm_buddy_fini(&mgr->mm); in amdgpu_vram_mgr_fini()
889 mutex_unlock(&mgr->lock); in amdgpu_vram_mgr_fini()