Home
last modified time | relevance | path

Searched refs:mman (Results 1 – 25 of 27) sorted by relevance

12

/drivers/gpu/drm/qxl/
Dqxl_ttm.c41 struct qxl_mman *mman; in qxl_get_qdev() local
44 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev()
45 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev()
64 qdev->mman.mem_global_referenced = false; in qxl_ttm_global_init()
65 global_ref = &qdev->mman.mem_global_ref; in qxl_ttm_global_init()
78 qdev->mman.bo_global_ref.mem_glob = in qxl_ttm_global_init()
79 qdev->mman.mem_global_ref.object; in qxl_ttm_global_init()
80 global_ref = &qdev->mman.bo_global_ref.ref; in qxl_ttm_global_init()
88 drm_global_item_unref(&qdev->mman.mem_global_ref); in qxl_ttm_global_init()
92 qdev->mman.mem_global_referenced = true; in qxl_ttm_global_init()
[all …]
Dqxl_object.c111 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create()
351 return ttm_bo_evict_mm(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_surf_evict()
356 return ttm_bo_evict_mm(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_vram_evict()
Dqxl_release.c451 qdev = container_of(bdev, struct qxl_device, mman.bdev); in qxl_release_fence_buffer_objects()
Dqxl_drv.h256 struct qxl_mman mman; member
/drivers/gpu/drm/virtio/
Dvirtgpu_ttm.c45 struct virtio_gpu_mman *mman; in virtio_gpu_get_vgdev() local
48 mman = container_of(bdev, struct virtio_gpu_mman, bdev); in virtio_gpu_get_vgdev()
49 vgdev = container_of(mman, struct virtio_gpu_device, mman); in virtio_gpu_get_vgdev()
68 vgdev->mman.mem_global_referenced = false; in virtio_gpu_ttm_global_init()
69 global_ref = &vgdev->mman.mem_global_ref; in virtio_gpu_ttm_global_init()
82 vgdev->mman.bo_global_ref.mem_glob = in virtio_gpu_ttm_global_init()
83 vgdev->mman.mem_global_ref.object; in virtio_gpu_ttm_global_init()
84 global_ref = &vgdev->mman.bo_global_ref.ref; in virtio_gpu_ttm_global_init()
92 drm_global_item_unref(&vgdev->mman.mem_global_ref); in virtio_gpu_ttm_global_init()
96 vgdev->mman.mem_global_referenced = true; in virtio_gpu_ttm_global_init()
[all …]
Dvirtgpu_object.c76 acc_size = ttm_bo_dma_acc_size(&vgdev->mman.bdev, size, in virtio_gpu_object_create()
91 ret = ttm_bo_init(&vgdev->mman.bdev, &bo->tbo, size, type, in virtio_gpu_object_create()
Dvirtgpu_drv.h172 struct virtio_gpu_mman mman; member
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ttm.c80 adev->mman.mem_global_referenced = false; in amdgpu_ttm_global_init()
81 global_ref = &adev->mman.mem_global_ref; in amdgpu_ttm_global_init()
93 adev->mman.bo_global_ref.mem_glob = in amdgpu_ttm_global_init()
94 adev->mman.mem_global_ref.object; in amdgpu_ttm_global_init()
95 global_ref = &adev->mman.bo_global_ref.ref; in amdgpu_ttm_global_init()
106 mutex_init(&adev->mman.gtt_window_lock); in amdgpu_ttm_global_init()
108 ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_global_init()
110 r = amd_sched_entity_init(&ring->sched, &adev->mman.entity, in amdgpu_ttm_global_init()
117 adev->mman.mem_global_referenced = true; in amdgpu_ttm_global_init()
122 drm_global_item_unref(&adev->mman.bo_global_ref.ref); in amdgpu_ttm_global_init()
[all …]
Damdgpu_benchmark.c41 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_benchmark_do_move()
110 if (adev->mman.buffer_funcs) { in amdgpu_benchmark_move()
Damdgpu_kms.c423 ui64 = amdgpu_vram_mgr_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_info_ioctl()
426 ui64 = amdgpu_vram_mgr_vis_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_info_ioctl()
429 ui64 = amdgpu_gtt_mgr_usage(&adev->mman.bdev.man[TTM_PL_TT]); in amdgpu_info_ioctl()
452 vram_gtt.gtt_size = adev->mman.bdev.man[TTM_PL_TT].size; in amdgpu_info_ioctl()
466 amdgpu_vram_mgr_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_info_ioctl()
475 amdgpu_vram_mgr_vis_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_info_ioctl()
479 mem.gtt.total_heap_size = adev->mman.bdev.man[TTM_PL_TT].size; in amdgpu_info_ioctl()
484 amdgpu_gtt_mgr_usage(&adev->mman.bdev.man[TTM_PL_TT]); in amdgpu_info_ioctl()
Damdgpu_test.c33 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_do_test_moves()
239 if (adev->mman.buffer_funcs) in amdgpu_test_moves()
Dsi_dma.c136 if (adev->mman.buffer_funcs_ring == ring) in si_dma_stop()
199 if (adev->mman.buffer_funcs_ring == ring) in si_dma_start()
883 if (adev->mman.buffer_funcs == NULL) { in si_dma_set_buffer_funcs()
884 adev->mman.buffer_funcs = &si_dma_buffer_funcs; in si_dma_set_buffer_funcs()
885 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in si_dma_set_buffer_funcs()
Dcik_sdma.c318 if ((adev->mman.buffer_funcs_ring == sdma0) || in cik_sdma_gfx_stop()
319 (adev->mman.buffer_funcs_ring == sdma1)) in cik_sdma_gfx_stop()
519 if (adev->mman.buffer_funcs_ring == ring) in cik_sdma_gfx_resume()
1383 if (adev->mman.buffer_funcs == NULL) { in cik_sdma_set_buffer_funcs()
1384 adev->mman.buffer_funcs = &cik_sdma_buffer_funcs; in cik_sdma_set_buffer_funcs()
1385 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in cik_sdma_set_buffer_funcs()
Dsdma_v2_4.c349 if ((adev->mman.buffer_funcs_ring == sdma0) || in sdma_v2_4_gfx_stop()
350 (adev->mman.buffer_funcs_ring == sdma1)) in sdma_v2_4_gfx_stop()
495 if (adev->mman.buffer_funcs_ring == ring) in sdma_v2_4_gfx_resume()
1320 if (adev->mman.buffer_funcs == NULL) { in sdma_v2_4_set_buffer_funcs()
1321 adev->mman.buffer_funcs = &sdma_v2_4_buffer_funcs; in sdma_v2_4_set_buffer_funcs()
1322 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v2_4_set_buffer_funcs()
Dsdma_v4_0.c458 if ((adev->mman.buffer_funcs_ring == sdma0) || in sdma_v4_0_gfx_stop()
459 (adev->mman.buffer_funcs_ring == sdma1)) in sdma_v4_0_gfx_stop()
703 if (adev->mman.buffer_funcs_ring == ring) in sdma_v4_0_gfx_resume()
1710 if (adev->mman.buffer_funcs == NULL) { in sdma_v4_0_set_buffer_funcs()
1711 adev->mman.buffer_funcs = &sdma_v4_0_buffer_funcs; in sdma_v4_0_set_buffer_funcs()
1712 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v4_0_set_buffer_funcs()
Dsdma_v3_0.c516 if ((adev->mman.buffer_funcs_ring == sdma0) || in sdma_v3_0_gfx_stop()
517 (adev->mman.buffer_funcs_ring == sdma1)) in sdma_v3_0_gfx_stop()
738 if (adev->mman.buffer_funcs_ring == ring) in sdma_v3_0_gfx_resume()
1727 if (adev->mman.buffer_funcs == NULL) { in sdma_v3_0_set_buffer_funcs()
1728 adev->mman.buffer_funcs = &sdma_v3_0_buffer_funcs; in sdma_v3_0_set_buffer_funcs()
1729 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
Damdgpu_object.c336 acc_size = ttm_bo_dma_acc_size(&adev->mman.bdev, size, in amdgpu_bo_create_restricted()
393 r = ttm_bo_init_reserved(&adev->mman.bdev, &bo->tbo, size, type, in amdgpu_bo_create_restricted()
808 return ttm_bo_evict_mm(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_bo_evict_vram()
Damdgpu_device.c1976 adev->mman.buffer_funcs = NULL; in amdgpu_device_init()
1977 adev->mman.buffer_funcs_ring = NULL; in amdgpu_device_init()
2658 resched = ttm_bo_lock_delayed_workqueue(&adev->mman.bdev); in amdgpu_sriov_gpu_reset()
2717 ring = adev->mman.buffer_funcs_ring; in amdgpu_sriov_gpu_reset()
2758 ttm_bo_unlock_delayed_workqueue(&adev->mman.bdev, resched); in amdgpu_sriov_gpu_reset()
2793 resched = ttm_bo_lock_delayed_workqueue(&adev->mman.bdev); in amdgpu_gpu_reset()
2864 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_gpu_reset()
2913 ttm_bo_unlock_delayed_workqueue(&adev->mman.bdev, resched); in amdgpu_gpu_reset()
Damdgpu.h1484 struct amdgpu_mman mman; member
1595 return container_of(bdev, struct amdgpu_device, mman.bdev); in amdgpu_ttm_adev()
1772 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_copy_buffer((ib)…
1773 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib)…
Damdgpu_cs.c256 used_vram = amdgpu_vram_mgr_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_cs_get_threshold_for_moves()
303 amdgpu_vram_mgr_vis_usage(&adev->mman.bdev.man[TTM_PL_VRAM]); in amdgpu_cs_get_threshold_for_moves()
/drivers/gpu/drm/radeon/
Dradeon_ttm.c55 struct radeon_mman *mman; in radeon_get_rdev() local
58 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev()
59 rdev = container_of(mman, struct radeon_device, mman); in radeon_get_rdev()
82 rdev->mman.mem_global_referenced = false; in radeon_ttm_global_init()
83 global_ref = &rdev->mman.mem_global_ref; in radeon_ttm_global_init()
95 rdev->mman.bo_global_ref.mem_glob = in radeon_ttm_global_init()
96 rdev->mman.mem_global_ref.object; in radeon_ttm_global_init()
97 global_ref = &rdev->mman.bo_global_ref.ref; in radeon_ttm_global_init()
105 drm_global_item_unref(&rdev->mman.mem_global_ref); in radeon_ttm_global_init()
109 rdev->mman.mem_global_referenced = true; in radeon_ttm_global_init()
[all …]
Dradeon_object.c203 acc_size = ttm_bo_dma_acc_size(&rdev->mman.bdev, size, in radeon_bo_create()
263 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create()
432 return ttm_bo_evict_mm(&rdev->mman.bdev, TTM_PL_VRAM); in radeon_bo_evict_vram()
Dradeon_device.c1833 resched = ttm_bo_lock_delayed_workqueue(&rdev->mman.bdev); in radeon_gpu_reset()
1892 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched); in radeon_gpu_reset()
Dradeon_gem.c223 man = &rdev->mman.bdev.man[TTM_PL_VRAM]; in radeon_gem_info_ioctl()
Dradeon_pm.c1800 resched = ttm_bo_lock_delayed_workqueue(&rdev->mman.bdev); in radeon_dynpm_idle_work_handler()
1851 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched); in radeon_dynpm_idle_work_handler()

12