Home
last modified time | relevance | path

Searched refs:pgtable (Results 1 – 9 of 9) sorted by relevance

/drivers/iommu/amd/
Dio_pgtable.c316 static u64 *fetch_pte(struct amd_io_pgtable *pgtable, in fetch_pte() argument
325 if (address > PM_LEVEL_SIZE(pgtable->mode)) in fetch_pte()
328 level = pgtable->mode - 1; in fetch_pte()
329 pte = &pgtable->root[PM_LEVEL_INDEX(level, address)]; in fetch_pte()
462 struct amd_io_pgtable *pgtable = io_pgtable_ops_to_data(ops); in iommu_v1_unmap_page() local
472 pte = fetch_pte(pgtable, iova, &unmap_size); in iommu_v1_unmap_page()
492 struct amd_io_pgtable *pgtable = io_pgtable_ops_to_data(ops); in iommu_v1_iova_to_phys() local
496 pte = fetch_pte(pgtable, iova, &pte_pgsize); in iommu_v1_iova_to_phys()
512 struct amd_io_pgtable *pgtable = container_of(iop, struct amd_io_pgtable, iop); in v1_free_pgtable() local
517 if (pgtable->mode == PAGE_MODE_NONE) in v1_free_pgtable()
[all …]
Diommu.c1909 int pgtable = amd_iommu_pgtable; in protection_domain_alloc() local
1922 pgtable = AMD_IOMMU_V1; in protection_domain_alloc()
1925 pgtable = AMD_IOMMU_V1; in protection_domain_alloc()
1928 switch (pgtable) { in protection_domain_alloc()
1939 pgtbl_ops = alloc_io_pgtable_ops(pgtable, &domain->iop.pgtbl_cfg, domain); in protection_domain_alloc()
/drivers/gpu/drm/etnaviv/
Detnaviv_iommu.c93 u32 pgtable; in etnaviv_iommuv1_restore() local
107 pgtable = (u32)v1_context->pgtable_dma; in etnaviv_iommuv1_restore()
109 gpu_write(gpu, VIVS_MC_MMU_FE_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
110 gpu_write(gpu, VIVS_MC_MMU_TX_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
111 gpu_write(gpu, VIVS_MC_MMU_PE_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
112 gpu_write(gpu, VIVS_MC_MMU_PEZ_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
113 gpu_write(gpu, VIVS_MC_MMU_RA_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
/drivers/iommu/
Dexynos-iommu.c181 static sysmmu_pte_t *section_entry(sysmmu_pte_t *pgtable, sysmmu_iova_t iova) in section_entry() argument
183 return pgtable + lv1ent_offset(iova); in section_entry()
246 sysmmu_pte_t *pgtable; /* lv1 page table, 16KB */ member
273 phys_addr_t pgtable; /* assigned page table structure */ member
393 dev_dbg(data->sysmmu, "Page table base: %pa\n", &data->pgtable); in show_fault_information()
394 ent = section_entry(phys_to_virt(data->pgtable), fault_addr); in show_fault_information()
498 __sysmmu_set_ptbase(data, data->pgtable); in __sysmmu_enable()
748 domain->pgtable = (sysmmu_pte_t *)__get_free_pages(GFP_KERNEL, 2); in exynos_iommu_domain_alloc()
749 if (!domain->pgtable) in exynos_iommu_domain_alloc()
758 domain->pgtable[i] = ZERO_LV2LINK; in exynos_iommu_domain_alloc()
[all …]
DMakefile8 obj-$(CONFIG_IOMMU_IO_PGTABLE) += io-pgtable.o
9 obj-$(CONFIG_IOMMU_IO_PGTABLE_ARMV7S) += io-pgtable-arm-v7s.o
10 obj-$(CONFIG_IOMMU_IO_PGTABLE_LPAE) += io-pgtable-arm.o
Domap-iommu.h34 u32 *pgtable; member
Domap-iommu.c1426 iommu->pgtable = kzalloc(IOPGD_TABLE_SIZE, GFP_ATOMIC); in omap_iommu_attach_init()
1427 if (!iommu->pgtable) in omap_iommu_attach_init()
1434 if (WARN_ON(!IS_ALIGNED((long)iommu->pgtable, in omap_iommu_attach_init()
1448 kfree(iommu->pgtable); in omap_iommu_detach_fini()
1490 ret = omap_iommu_attach(oiommu, iommu->pgtable); in omap_iommu_attach_dev()
/drivers/iommu/arm/arm-smmu/
Darm-smmu-qcom.c108 struct io_pgtable *pgtable = in qcom_adreno_smmu_get_ttbr1_cfg() local
110 return &pgtable->cfg; in qcom_adreno_smmu_get_ttbr1_cfg()
123 struct io_pgtable *pgtable = io_pgtable_ops_to_pgtable(smmu_domain->pgtbl_ops); in qcom_adreno_smmu_set_ttbr0_cfg() local
138 cb->tcr[0] = arm_smmu_lpae_tcr(&pgtable->cfg); in qcom_adreno_smmu_set_ttbr0_cfg()
Dqcom_iommu.c469 struct io_pgtable *pgtable = container_of(qcom_domain->pgtbl_ops, in qcom_iommu_flush_iotlb_all() local
475 qcom_iommu_tlb_sync(pgtable->cookie); in qcom_iommu_flush_iotlb_all()