Lines Matching refs:dmem
113 struct nouveau_dmem *dmem = chunk->drm->dmem; in nouveau_dmem_page_free() local
115 spin_lock(&dmem->lock); in nouveau_dmem_page_free()
116 page->zone_device_data = dmem->free_pages; in nouveau_dmem_page_free()
117 dmem->free_pages = page; in nouveau_dmem_page_free()
125 spin_unlock(&dmem->lock); in nouveau_dmem_page_free()
152 if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_HOST, *dma_addr, in nouveau_dmem_copy_one()
164 struct nouveau_dmem *dmem = drm->dmem; in nouveau_dmem_migrate_to_ram() local
212 nouveau_fence_new(&fence, dmem->migrate.chan); in nouveau_dmem_migrate_to_ram()
274 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
275 list_add(&chunk->list, &drm->dmem->chunks); in nouveau_dmem_chunk_alloc()
276 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
280 spin_lock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
282 page->zone_device_data = drm->dmem->free_pages; in nouveau_dmem_chunk_alloc()
283 drm->dmem->free_pages = page; in nouveau_dmem_chunk_alloc()
287 spin_unlock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
313 spin_lock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
314 if (drm->dmem->free_pages) { in nouveau_dmem_page_alloc_locked()
315 page = drm->dmem->free_pages; in nouveau_dmem_page_alloc_locked()
316 drm->dmem->free_pages = page->zone_device_data; in nouveau_dmem_page_alloc_locked()
319 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
321 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
344 if (drm->dmem == NULL) in nouveau_dmem_resume()
347 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_resume()
348 list_for_each_entry(chunk, &drm->dmem->chunks, list) { in nouveau_dmem_resume()
353 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_resume()
361 if (drm->dmem == NULL) in nouveau_dmem_suspend()
364 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_suspend()
365 list_for_each_entry(chunk, &drm->dmem->chunks, list) in nouveau_dmem_suspend()
367 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_suspend()
405 nouveau_fence_new(&fence, chunk->drm->dmem->migrate.chan); in nouveau_dmem_evict_chunk()
421 if (drm->dmem == NULL) in nouveau_dmem_fini()
424 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_fini()
426 list_for_each_entry_safe(chunk, tmp, &drm->dmem->chunks, list) { in nouveau_dmem_fini()
438 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_fini()
446 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_copy()
519 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_clear()
579 drm->dmem->migrate.copy_func = nvc0b5_migrate_copy; in nouveau_dmem_migrate_init()
580 drm->dmem->migrate.clear_func = nvc0b5_migrate_clear; in nouveau_dmem_migrate_init()
581 drm->dmem->migrate.chan = drm->ttm.chan; in nouveau_dmem_migrate_init()
598 if (!(drm->dmem = kzalloc(sizeof(*drm->dmem), GFP_KERNEL))) in nouveau_dmem_init()
601 drm->dmem->drm = drm; in nouveau_dmem_init()
602 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
603 INIT_LIST_HEAD(&drm->dmem->chunks); in nouveau_dmem_init()
604 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
605 spin_lock_init(&drm->dmem->lock); in nouveau_dmem_init()
610 kfree(drm->dmem); in nouveau_dmem_init()
611 drm->dmem = NULL; in nouveau_dmem_init()
637 if (drm->dmem->migrate.copy_func(drm, 1, in nouveau_dmem_migrate_copy_one()
642 if (drm->dmem->migrate.clear_func(drm, page_size(dpage), in nouveau_dmem_migrate_copy_one()
678 nouveau_fence_new(&fence, drm->dmem->migrate.chan); in nouveau_dmem_migrate_chunk()
710 if (drm->dmem == NULL) in nouveau_dmem_migrate_vma()