/kernel/linux/linux-5.10/drivers/iommu/ |
D | tegra-gart.c | 58 #define FLUSH_GART_REGS(gart) readl_relaxed((gart)->regs + GART_CONFIG) argument 60 #define for_each_gart_pte(gart, iova) \ argument 61 for (iova = gart->iovmm_base; \ 62 iova < gart->iovmm_end; \ 65 static inline void gart_set_pte(struct gart_device *gart, in gart_set_pte() argument 68 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_set_pte() 69 writel_relaxed(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte() 72 static inline unsigned long gart_read_pte(struct gart_device *gart, in gart_read_pte() argument 77 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_read_pte() 78 pte = readl_relaxed(gart->regs + GART_ENTRY_DATA); in gart_read_pte() [all …]
|
D | Makefile | 23 obj-$(CONFIG_TEGRA_IOMMU_GART) += tegra-gart.o
|
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
D | radeon_gart.c | 75 ptr = dma_alloc_coherent(&rdev->pdev->dev, rdev->gart.table_size, in radeon_gart_table_ram_alloc() 76 &rdev->gart.table_addr, GFP_KERNEL); in radeon_gart_table_ram_alloc() 84 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc() 87 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc() 102 if (rdev->gart.ptr == NULL) { in radeon_gart_table_ram_free() 108 set_memory_wb((unsigned long)rdev->gart.ptr, in radeon_gart_table_ram_free() 109 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_free() 112 dma_free_coherent(&rdev->pdev->dev, rdev->gart.table_size, in radeon_gart_table_ram_free() 113 (void *)rdev->gart.ptr, rdev->gart.table_addr); in radeon_gart_table_ram_free() 114 rdev->gart.ptr = NULL; in radeon_gart_table_ram_free() [all …]
|
D | rs400.c | 85 if (rdev->gart.ptr) { in rs400_gart_init() 108 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rs400_gart_init() 166 tmp = (u32)rdev->gart.table_addr & 0xfffff000; in rs400_gart_enable() 167 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; in rs400_gart_enable() 194 (unsigned long long)rdev->gart.table_addr); in rs400_gart_enable() 195 rdev->gart.ready = true; in rs400_gart_enable() 238 u32 *gtt = rdev->gart.ptr; in rs400_gart_set_page()
|
D | radeon_asic.c | 167 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in radeon_agp_disable() 168 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in radeon_agp_disable() 169 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in radeon_agp_disable() 173 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in radeon_agp_disable() 174 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in radeon_agp_disable() 175 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in radeon_agp_disable() 209 .gart = { 277 .gart = { 373 .gart = { 441 .gart = { [all …]
|
D | rs600.c | 543 if (rdev->gart.robj) { in rs600_gart_init() 552 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in rs600_gart_init() 561 if (rdev->gart.robj == NULL) { in rs600_gart_enable() 598 rdev->gart.table_addr); in rs600_gart_enable() 615 (unsigned long long)rdev->gart.table_addr); in rs600_gart_enable() 616 rdev->gart.ready = true; in rs600_gart_enable() 656 void __iomem *ptr = (void *)rdev->gart.ptr; in rs600_gart_set_page()
|
D | r300.c | 123 void __iomem *ptr = rdev->gart.ptr; in rv370_pcie_gart_set_page() 135 if (rdev->gart.robj) { in rv370_pcie_gart_init() 146 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rv370_pcie_gart_init() 147 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in rv370_pcie_gart_init() 148 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in rv370_pcie_gart_init() 149 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in rv370_pcie_gart_init() 159 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable() 174 table_addr = rdev->gart.table_addr; in rv370_pcie_gart_enable() 189 rdev->gart.ready = true; in rv370_pcie_gart_enable()
|
D | radeon_ttm.c | 1051 if (p >= rdev->gart.num_cpu_pages) in radeon_ttm_gtt_read() 1054 page = rdev->gart.pages[p]; in radeon_ttm_gtt_read() 1060 kunmap(rdev->gart.pages[p]); in radeon_ttm_gtt_read()
|
D | rv770.c | 902 if (rdev->gart.robj == NULL) { in rv770_pcie_gart_enable() 931 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in rv770_pcie_gart_enable() 942 (unsigned long long)rdev->gart.table_addr); in rv770_pcie_gart_enable() 943 rdev->gart.ready = true; in rv770_pcie_gart_enable()
|
D | r100.c | 645 if (rdev->gart.ptr) { in r100_pci_gart_init() 653 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in r100_pci_gart_init() 654 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in r100_pci_gart_init() 655 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in r100_pci_gart_init() 656 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in r100_pci_gart_init() 671 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); in r100_pci_gart_enable() 677 (unsigned long long)rdev->gart.table_addr); in r100_pci_gart_enable() 678 rdev->gart.ready = true; in r100_pci_gart_enable() 701 u32 *gtt = rdev->gart.ptr; in r100_pci_gart_set_page()
|
D | radeon_vm.c | 368 uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8; in radeon_vm_set_pages() 600 result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT]; in radeon_vm_map_gart()
|
D | ni.c | 1278 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable() 1307 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable() 1353 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable() 1354 rdev->gart.ready = true; in cayman_pcie_gart_enable()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gart.c | 118 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_alloc() 122 bp.size = adev->gart.table_size; in amdgpu_gart_table_vram_alloc() 129 r = amdgpu_bo_create(adev, &bp, &adev->gart.bo); in amdgpu_gart_table_vram_alloc() 151 r = amdgpu_bo_reserve(adev->gart.bo, false); in amdgpu_gart_table_vram_pin() 154 r = amdgpu_bo_pin(adev->gart.bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gart_table_vram_pin() 156 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin() 159 r = amdgpu_bo_kmap(adev->gart.bo, &adev->gart.ptr); in amdgpu_gart_table_vram_pin() 161 amdgpu_bo_unpin(adev->gart.bo); in amdgpu_gart_table_vram_pin() 162 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin() 178 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_unpin() [all …]
|
D | gmc_v10_0.c | 333 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in gmc_v10_0_flush_gpu_tlb() 766 if (adev->gart.bo) { in gmc_v10_0_gart_init() 776 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v10_0_gart_init() 777 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(MTYPE_UC) | in gmc_v10_0_gart_init() 942 if (adev->gart.bo == NULL) { in gmc_v10_0_gart_enable() 979 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v10_0_gart_enable() 981 adev->gart.ready = true; in gmc_v10_0_gart_enable()
|
D | gmc_v6_0.c | 474 if (adev->gart.bo == NULL) { in gmc_v6_0_gart_enable() 482 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v6_0_gart_enable() 560 adev->gart.ready = true; in gmc_v6_0_gart_enable() 568 if (adev->gart.bo) { in gmc_v6_0_gart_init() 575 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v6_0_gart_init() 576 adev->gart.gart_pte_flags = 0; in gmc_v6_0_gart_init()
|
D | gmc_v9_0.c | 1319 if (adev->gart.bo) { in gmc_v9_0_gart_init() 1327 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v9_0_gart_init() 1328 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(MTYPE_UC) | in gmc_v9_0_gart_init() 1574 if (adev->gart.bo == NULL) { in gmc_v9_0_gart_enable() 1592 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v9_0_gart_enable() 1593 adev->gart.ready = true; in gmc_v9_0_gart_enable()
|
D | gmc_v7_0.c | 614 if (adev->gart.bo == NULL) { in gmc_v7_0_gart_enable() 622 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v7_0_gart_enable() 710 adev->gart.ready = true; in gmc_v7_0_gart_enable() 718 if (adev->gart.bo) { in gmc_v7_0_gart_init() 726 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v7_0_gart_init() 727 adev->gart.gart_pte_flags = 0; in gmc_v7_0_gart_init()
|
D | gmc_v8_0.c | 847 if (adev->gart.bo == NULL) { in gmc_v8_0_gart_enable() 855 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v8_0_gart_enable() 960 adev->gart.ready = true; in gmc_v8_0_gart_enable() 968 if (adev->gart.bo) { in gmc_v8_0_gart_init() 976 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v8_0_gart_init() 977 adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE; in gmc_v8_0_gart_init()
|
D | amdgpu_ttm.c | 280 dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in amdgpu_ttm_map_buffer() 1553 flags |= adev->gart.gart_pte_flags; in amdgpu_ttm_tt_pte_flags() 2137 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in amdgpu_copy_buffer() 2405 if (p >= adev->gart.num_cpu_pages) in amdgpu_ttm_gtt_read() 2408 page = adev->gart.pages[p]; in amdgpu_ttm_gtt_read() 2414 kunmap(adev->gart.pages[p]); in amdgpu_ttm_gtt_read()
|
D | gfxhub_v1_0.c | 54 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v1_0_init_gart_aperture_regs()
|
/kernel/linux/linux-5.10/include/soc/tegra/ |
D | mc.h | 100 int tegra_gart_suspend(struct gart_device *gart); 101 int tegra_gart_resume(struct gart_device *gart); 109 static inline int tegra_gart_suspend(struct gart_device *gart) in tegra_gart_suspend() argument 114 static inline int tegra_gart_resume(struct gart_device *gart) in tegra_gart_resume() argument 168 struct gart_device *gart; member
|
/kernel/linux/linux-5.10/drivers/memory/tegra/ |
D | mc.c | 694 mc->gart = tegra_gart_probe(&pdev->dev, mc); in tegra_mc_probe() 695 if (IS_ERR(mc->gart)) { in tegra_mc_probe() 697 PTR_ERR(mc->gart)); in tegra_mc_probe() 698 mc->gart = NULL; in tegra_mc_probe() 710 if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) { in tegra_mc_suspend() 711 err = tegra_gart_suspend(mc->gart); in tegra_mc_suspend() 724 if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) { in tegra_mc_resume() 725 err = tegra_gart_resume(mc->gart); in tegra_mc_resume()
|
/kernel/linux/linux-5.10/Documentation/devicetree/bindings/memory-controllers/ |
D | nvidia,tegra20-mc.txt | 4 - compatible : "nvidia,tegra20-mc-gart" 22 compatible = "nvidia,tegra20-mc-gart";
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
D | nouveau_chan.h | 24 struct nvif_object gart; member
|
D | nouveau_chan.c | 103 nvif_object_dtor(&chan->gart); in nouveau_channel_del() 389 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart) in nouveau_channel_init() argument 451 ret = nvif_object_ctor(&chan->user, "abi16ChanGartCtxDma", gart, in nouveau_channel_init() 453 &chan->gart); in nouveau_channel_init()
|