Lines Matching refs:dmem

114 	struct nouveau_dmem *dmem = chunk->drm->dmem;  in nouveau_dmem_page_free()  local
116 spin_lock(&dmem->lock); in nouveau_dmem_page_free()
117 page->zone_device_data = dmem->free_pages; in nouveau_dmem_page_free()
118 dmem->free_pages = page; in nouveau_dmem_page_free()
126 spin_unlock(&dmem->lock); in nouveau_dmem_page_free()
153 if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_HOST, *dma_addr, in nouveau_dmem_copy_one()
165 struct nouveau_dmem *dmem = drm->dmem; in nouveau_dmem_migrate_to_ram() local
213 nouveau_fence_new(dmem->migrate.chan, false, &fence); in nouveau_dmem_migrate_to_ram()
275 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
276 list_add(&chunk->list, &drm->dmem->chunks); in nouveau_dmem_chunk_alloc()
277 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
281 spin_lock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
283 page->zone_device_data = drm->dmem->free_pages; in nouveau_dmem_chunk_alloc()
284 drm->dmem->free_pages = page; in nouveau_dmem_chunk_alloc()
288 spin_unlock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
314 spin_lock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
315 if (drm->dmem->free_pages) { in nouveau_dmem_page_alloc_locked()
316 page = drm->dmem->free_pages; in nouveau_dmem_page_alloc_locked()
317 drm->dmem->free_pages = page->zone_device_data; in nouveau_dmem_page_alloc_locked()
320 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
322 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
345 if (drm->dmem == NULL) in nouveau_dmem_resume()
348 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_resume()
349 list_for_each_entry(chunk, &drm->dmem->chunks, list) { in nouveau_dmem_resume()
354 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_resume()
362 if (drm->dmem == NULL) in nouveau_dmem_suspend()
365 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_suspend()
366 list_for_each_entry(chunk, &drm->dmem->chunks, list) in nouveau_dmem_suspend()
368 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_suspend()
406 nouveau_fence_new(chunk->drm->dmem->migrate.chan, false, &fence); in nouveau_dmem_evict_chunk()
422 if (drm->dmem == NULL) in nouveau_dmem_fini()
425 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_fini()
427 list_for_each_entry_safe(chunk, tmp, &drm->dmem->chunks, list) { in nouveau_dmem_fini()
439 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_fini()
447 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_copy()
520 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_clear()
580 drm->dmem->migrate.copy_func = nvc0b5_migrate_copy; in nouveau_dmem_migrate_init()
581 drm->dmem->migrate.clear_func = nvc0b5_migrate_clear; in nouveau_dmem_migrate_init()
582 drm->dmem->migrate.chan = drm->ttm.chan; in nouveau_dmem_migrate_init()
599 if (!(drm->dmem = kzalloc(sizeof(*drm->dmem), GFP_KERNEL))) in nouveau_dmem_init()
602 drm->dmem->drm = drm; in nouveau_dmem_init()
603 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
604 INIT_LIST_HEAD(&drm->dmem->chunks); in nouveau_dmem_init()
605 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
606 spin_lock_init(&drm->dmem->lock); in nouveau_dmem_init()
611 kfree(drm->dmem); in nouveau_dmem_init()
612 drm->dmem = NULL; in nouveau_dmem_init()
638 if (drm->dmem->migrate.copy_func(drm, 1, in nouveau_dmem_migrate_copy_one()
643 if (drm->dmem->migrate.clear_func(drm, page_size(dpage), in nouveau_dmem_migrate_copy_one()
679 nouveau_fence_new(drm->dmem->migrate.chan, false, &fence); in nouveau_dmem_migrate_chunk()
711 if (drm->dmem == NULL) in nouveau_dmem_migrate_vma()