/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vm.c | 106 return adev->vm_manager.max_pfn >> in amdgpu_vm_num_entries() 107 (adev->vm_manager.block_size * in amdgpu_vm_num_entries() 108 adev->vm_manager.num_level); in amdgpu_vm_num_entries() 109 else if (level == adev->vm_manager.num_level) in amdgpu_vm_num_entries() 114 return 1 << adev->vm_manager.block_size; in amdgpu_vm_num_entries() 250 unsigned shift = (adev->vm_manager.num_level - level) * in amdgpu_vm_alloc_levels() 251 adev->vm_manager.block_size; in amdgpu_vm_alloc_levels() 291 if (level != adev->vm_manager.num_level - 1) in amdgpu_vm_alloc_levels() 328 if (level < adev->vm_manager.num_level) { in amdgpu_vm_alloc_levels() 365 if (last_pfn >= adev->vm_manager.max_pfn) { in amdgpu_vm_alloc_pts() [all …]
|
D | gmc_v9_0.c | 366 addr = adev->vm_manager.vram_base_offset + addr - adev->mc.vram_start; in gmc_v9_0_get_vm_pde() 437 adev->vm_manager.vram_base_offset = gfxhub_v1_0_get_mc_fb_offset(adev); in gmc_v9_0_vram_gtt_location() 439 adev->vm_manager.vram_base_offset = 0; in gmc_v9_0_vram_gtt_location() 567 adev->vm_manager.vm_size = 1U << 18; in gmc_v9_0_sw_init() 568 adev->vm_manager.block_size = 9; in gmc_v9_0_sw_init() 569 adev->vm_manager.num_level = 3; in gmc_v9_0_sw_init() 574 adev->vm_manager.num_level = 1; in gmc_v9_0_sw_init() 585 adev->vm_manager.vm_size = 1U << 18; in gmc_v9_0_sw_init() 586 adev->vm_manager.block_size = 9; in gmc_v9_0_sw_init() 587 adev->vm_manager.num_level = 3; in gmc_v9_0_sw_init() [all …]
|
D | gfxhub_v1_0.c | 45 + adev->vm_manager.vram_base_offset; in gfxhub_v1_0_init_gart_pt_regs() 88 + adev->vm_manager.vram_base_offset; in gfxhub_v1_0_init_system_aperture_regs() 193 adev->vm_manager.num_level); in gfxhub_v1_0_setup_vmid_config() 210 adev->vm_manager.block_size - 9); in gfxhub_v1_0_setup_vmid_config() 218 lower_32_bits(adev->vm_manager.max_pfn - 1)); in gfxhub_v1_0_setup_vmid_config() 220 upper_32_bits(adev->vm_manager.max_pfn - 1)); in gfxhub_v1_0_setup_vmid_config()
|
D | gmc_v6_0.c | 456 uint32_t high = adev->vm_manager.max_pfn; in gmc_v6_0_set_prt() 510 field = adev->vm_manager.fragment_size; in gmc_v6_0_gart_enable() 534 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v6_0_gart_enable() 555 ((adev->vm_manager.block_size - 9) in gmc_v6_0_gart_enable() 835 adev->vm_manager.max_pfn = adev->vm_manager.vm_size << 18; in gmc_v6_0_sw_init() 879 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v6_0_sw_init() 880 adev->vm_manager.num_level = 1; in gmc_v6_0_sw_init() 888 adev->vm_manager.vram_base_offset = tmp; in gmc_v6_0_sw_init() 890 adev->vm_manager.vram_base_offset = 0; in gmc_v6_0_sw_init()
|
D | gmc_v7_0.c | 549 uint32_t high = adev->vm_manager.max_pfn; in gmc_v7_0_set_prt() 616 field = adev->vm_manager.fragment_size; in gmc_v7_0_gart_enable() 645 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v7_0_gart_enable() 663 adev->vm_manager.block_size - 9); in gmc_v7_0_gart_enable() 974 adev->vm_manager.max_pfn = adev->vm_manager.vm_size << 18; in gmc_v7_0_sw_init() 1028 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v7_0_sw_init() 1029 adev->vm_manager.num_level = 1; in gmc_v7_0_sw_init() 1037 adev->vm_manager.vram_base_offset = tmp; in gmc_v7_0_sw_init() 1039 adev->vm_manager.vram_base_offset = 0; in gmc_v7_0_sw_init()
|
D | mmhub_v1_0.c | 56 adev->vm_manager.vram_base_offset; in mmhub_v1_0_init_gart_pt_regs() 100 adev->vm_manager.vram_base_offset; in mmhub_v1_0_init_system_aperture_regs() 209 PAGE_TABLE_DEPTH, adev->vm_manager.num_level); in mmhub_v1_0_setup_vmid_config() 226 adev->vm_manager.block_size - 9); in mmhub_v1_0_setup_vmid_config() 234 lower_32_bits(adev->vm_manager.max_pfn - 1)); in mmhub_v1_0_setup_vmid_config() 236 upper_32_bits(adev->vm_manager.max_pfn - 1)); in mmhub_v1_0_setup_vmid_config()
|
D | gmc_v8_0.c | 777 uint32_t high = adev->vm_manager.max_pfn; in gmc_v8_0_set_prt() 845 field = adev->vm_manager.fragment_size; in gmc_v8_0_gart_enable() 889 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v8_0_gart_enable() 914 adev->vm_manager.block_size - 9); in gmc_v8_0_gart_enable() 1100 adev->vm_manager.max_pfn = adev->vm_manager.vm_size << 18; in gmc_v8_0_sw_init() 1154 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v8_0_sw_init() 1155 adev->vm_manager.num_level = 1; in gmc_v8_0_sw_init() 1163 adev->vm_manager.vram_base_offset = tmp; in gmc_v8_0_sw_init() 1165 adev->vm_manager.vram_base_offset = 0; in gmc_v8_0_sw_init()
|
D | si_dma.c | 899 if (adev->vm_manager.vm_pte_funcs == NULL) { in si_dma_set_vm_pte_funcs() 900 adev->vm_manager.vm_pte_funcs = &si_dma_vm_pte_funcs; in si_dma_set_vm_pte_funcs() 902 adev->vm_manager.vm_pte_rings[i] = in si_dma_set_vm_pte_funcs() 905 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in si_dma_set_vm_pte_funcs()
|
D | cik_sdma.c | 1399 if (adev->vm_manager.vm_pte_funcs == NULL) { in cik_sdma_set_vm_pte_funcs() 1400 adev->vm_manager.vm_pte_funcs = &cik_sdma_vm_pte_funcs; in cik_sdma_set_vm_pte_funcs() 1402 adev->vm_manager.vm_pte_rings[i] = in cik_sdma_set_vm_pte_funcs() 1405 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in cik_sdma_set_vm_pte_funcs()
|
D | sdma_v2_4.c | 1336 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v2_4_set_vm_pte_funcs() 1337 adev->vm_manager.vm_pte_funcs = &sdma_v2_4_vm_pte_funcs; in sdma_v2_4_set_vm_pte_funcs() 1339 adev->vm_manager.vm_pte_rings[i] = in sdma_v2_4_set_vm_pte_funcs() 1342 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v2_4_set_vm_pte_funcs()
|
D | amdgpu_vm.h | 48 #define AMDGPU_VM_PTE_COUNT(adev) (1 << (adev)->vm_manager.block_size)
|
D | sdma_v4_0.c | 1726 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v4_0_set_vm_pte_funcs() 1727 adev->vm_manager.vm_pte_funcs = &sdma_v4_0_vm_pte_funcs; in sdma_v4_0_set_vm_pte_funcs() 1729 adev->vm_manager.vm_pte_rings[i] = in sdma_v4_0_set_vm_pte_funcs() 1732 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v4_0_set_vm_pte_funcs()
|
D | sdma_v3_0.c | 1743 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v3_0_set_vm_pte_funcs() 1744 adev->vm_manager.vm_pte_funcs = &sdma_v3_0_vm_pte_funcs; in sdma_v3_0_set_vm_pte_funcs() 1746 adev->vm_manager.vm_pte_rings[i] = in sdma_v3_0_set_vm_pte_funcs() 1749 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v3_0_set_vm_pte_funcs()
|
D | amdgpu.h | 1480 struct amdgpu_vm_manager vm_manager; member 1732 #define amdgpu_vm_copy_pte(adev, ib, pe, src, count) ((adev)->vm_manager.vm_pte_funcs->copy_pte((ib… 1733 #define amdgpu_vm_write_pte(adev, ib, pe, value, count, incr) ((adev)->vm_manager.vm_pte_funcs->wri… 1734 #define amdgpu_vm_set_pte_pde(adev, ib, pe, addr, count, incr, flags) ((adev)->vm_manager.vm_pte_fu…
|
D | amdgpu_kms.c | 559 dev_info.virtual_address_max = (uint64_t)adev->vm_manager.max_pfn * AMDGPU_GPU_PAGE_SIZE; in amdgpu_info_ioctl() 561 dev_info.pte_fragment_size = (1 << adev->vm_manager.fragment_size) * AMDGPU_GPU_PAGE_SIZE; in amdgpu_info_ioctl()
|
D | amdgpu_device.c | 1978 adev->vm_manager.vm_pte_funcs = NULL; in amdgpu_device_init() 1979 adev->vm_manager.vm_pte_num_rings = 0; in amdgpu_device_init()
|
D | gfx_v7_0.c | 1927 for (i = 0; i < adev->vm_manager.id_mgr[0].num_ids; i++) { in gfx_v7_0_gpu_init()
|
/drivers/gpu/drm/radeon/ |
D | radeon_vm.c | 62 return rdev->vm_manager.max_pfn >> radeon_vm_block_size; in radeon_vm_num_pdes() 89 if (!rdev->vm_manager.enabled) { in radeon_vm_manager_init() 94 rdev->vm_manager.enabled = true; in radeon_vm_manager_init() 110 if (!rdev->vm_manager.enabled) in radeon_vm_manager_fini() 114 radeon_fence_unref(&rdev->vm_manager.active[i]); in radeon_vm_manager_fini() 116 rdev->vm_manager.enabled = false; in radeon_vm_manager_fini() 188 vm_id->last_id_use == rdev->vm_manager.active[vm_id->id]) in radeon_vm_grab_id() 195 for (i = 1; i < rdev->vm_manager.nvm; ++i) { in radeon_vm_grab_id() 196 struct radeon_fence *fence = rdev->vm_manager.active[i]; in radeon_vm_grab_id() 215 return rdev->vm_manager.active[choices[i]]; in radeon_vm_grab_id() [all …]
|
D | ni.c | 1322 rdev->vm_manager.max_pfn - 1); in cayman_pcie_gart_enable() 1324 rdev->vm_manager.saved_table_addr[i]); in cayman_pcie_gart_enable() 1359 rdev->vm_manager.saved_table_addr[i] = RREG32( in cayman_pcie_gart_disable() 2505 rdev->vm_manager.nvm = 8; in cayman_vm_init() 2510 rdev->vm_manager.vram_base_offset = tmp; in cayman_vm_init() 2512 rdev->vm_manager.vram_base_offset = 0; in cayman_vm_init()
|
D | radeon_gem.c | 606 if (!rdev->vm_manager.enabled) { in radeon_gem_va_ioctl()
|
D | radeon_cs.c | 365 !p->rdev->vm_manager.enabled) { in radeon_cs_parser_init()
|
D | si.c | 4328 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in si_pcie_gart_enable() 4336 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable() 4339 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable() 4379 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in si_pcie_gart_disable() 4797 rdev->vm_manager.nvm = 16; in si_vm_init() 4799 rdev->vm_manager.vram_base_offset = 0; in si_vm_init()
|
D | cik.c | 5508 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in cik_pcie_gart_enable() 5512 rdev->vm_manager.saved_table_addr[i]); in cik_pcie_gart_enable() 5515 rdev->vm_manager.saved_table_addr[i]); in cik_pcie_gart_enable() 5590 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in cik_pcie_gart_disable() 5663 rdev->vm_manager.nvm = RADEON_NUM_OF_VMIDS; in cik_vm_init() 5668 rdev->vm_manager.vram_base_offset = tmp; in cik_vm_init() 5670 rdev->vm_manager.vram_base_offset = 0; in cik_vm_init()
|
D | radeon_device.c | 1362 rdev->vm_manager.max_pfn = radeon_vm_size << 18; in radeon_device_init()
|
D | radeon.h | 2433 struct radeon_vm_manager vm_manager; member
|