• Home
  • Raw
  • Download

Lines Matching refs:dmem

112 	struct nouveau_dmem *dmem = chunk->drm->dmem;  in nouveau_dmem_page_free()  local
114 spin_lock(&dmem->lock); in nouveau_dmem_page_free()
115 page->zone_device_data = dmem->free_pages; in nouveau_dmem_page_free()
116 dmem->free_pages = page; in nouveau_dmem_page_free()
124 spin_unlock(&dmem->lock); in nouveau_dmem_page_free()
164 if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_HOST, *dma_addr, in nouveau_dmem_fault_copy_one()
183 struct nouveau_dmem *dmem = drm->dmem; in nouveau_dmem_migrate_to_ram() local
212 nouveau_fence_new(dmem->migrate.chan, false, &fence); in nouveau_dmem_migrate_to_ram()
274 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
275 list_add(&chunk->list, &drm->dmem->chunks); in nouveau_dmem_chunk_alloc()
276 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_chunk_alloc()
280 spin_lock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
282 page->zone_device_data = drm->dmem->free_pages; in nouveau_dmem_chunk_alloc()
283 drm->dmem->free_pages = page; in nouveau_dmem_chunk_alloc()
287 spin_unlock(&drm->dmem->lock); in nouveau_dmem_chunk_alloc()
313 spin_lock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
314 if (drm->dmem->free_pages) { in nouveau_dmem_page_alloc_locked()
315 page = drm->dmem->free_pages; in nouveau_dmem_page_alloc_locked()
316 drm->dmem->free_pages = page->zone_device_data; in nouveau_dmem_page_alloc_locked()
319 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
321 spin_unlock(&drm->dmem->lock); in nouveau_dmem_page_alloc_locked()
345 if (drm->dmem == NULL) in nouveau_dmem_resume()
348 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_resume()
349 list_for_each_entry(chunk, &drm->dmem->chunks, list) { in nouveau_dmem_resume()
354 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_resume()
362 if (drm->dmem == NULL) in nouveau_dmem_suspend()
365 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_suspend()
366 list_for_each_entry(chunk, &drm->dmem->chunks, list) in nouveau_dmem_suspend()
368 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_suspend()
376 if (drm->dmem == NULL) in nouveau_dmem_fini()
379 mutex_lock(&drm->dmem->mutex); in nouveau_dmem_fini()
381 list_for_each_entry_safe(chunk, tmp, &drm->dmem->chunks, list) { in nouveau_dmem_fini()
391 mutex_unlock(&drm->dmem->mutex); in nouveau_dmem_fini()
399 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_copy()
472 struct nvif_push *push = drm->dmem->migrate.chan->chan.push; in nvc0b5_migrate_clear()
532 drm->dmem->migrate.copy_func = nvc0b5_migrate_copy; in nouveau_dmem_migrate_init()
533 drm->dmem->migrate.clear_func = nvc0b5_migrate_clear; in nouveau_dmem_migrate_init()
534 drm->dmem->migrate.chan = drm->ttm.chan; in nouveau_dmem_migrate_init()
551 if (!(drm->dmem = kzalloc(sizeof(*drm->dmem), GFP_KERNEL))) in nouveau_dmem_init()
554 drm->dmem->drm = drm; in nouveau_dmem_init()
555 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
556 INIT_LIST_HEAD(&drm->dmem->chunks); in nouveau_dmem_init()
557 mutex_init(&drm->dmem->mutex); in nouveau_dmem_init()
558 spin_lock_init(&drm->dmem->lock); in nouveau_dmem_init()
563 kfree(drm->dmem); in nouveau_dmem_init()
564 drm->dmem = NULL; in nouveau_dmem_init()
590 if (drm->dmem->migrate.copy_func(drm, 1, in nouveau_dmem_migrate_copy_one()
595 if (drm->dmem->migrate.clear_func(drm, page_size(dpage), in nouveau_dmem_migrate_copy_one()
631 nouveau_fence_new(drm->dmem->migrate.chan, false, &fence); in nouveau_dmem_migrate_chunk()
663 if (drm->dmem == NULL) in nouveau_dmem_migrate_vma()