Lines Matching refs:mmu
96 static void etnaviv_iommu_remove_mapping(struct etnaviv_iommu *mmu, in etnaviv_iommu_remove_mapping() argument
101 etnaviv_iommu_unmap(mmu, mapping->vram_node.start, in etnaviv_iommu_remove_mapping()
106 static int etnaviv_iommu_find_iova(struct etnaviv_iommu *mmu, in etnaviv_iommu_find_iova() argument
112 lockdep_assert_held(&mmu->lock); in etnaviv_iommu_find_iova()
124 ret = drm_mm_insert_node_in_range(&mmu->mm, node, in etnaviv_iommu_find_iova()
125 size, 0, mmu->last_iova, ~0UL, in etnaviv_iommu_find_iova()
126 mmu->last_iova ? DRM_MM_SEARCH_DEFAULT : DRM_MM_SEARCH_BELOW); in etnaviv_iommu_find_iova()
135 if (mmu->last_iova) { in etnaviv_iommu_find_iova()
136 mmu->last_iova = 0; in etnaviv_iommu_find_iova()
137 mmu->need_flush = true; in etnaviv_iommu_find_iova()
142 drm_mm_init_scan(&mmu->mm, size, 0, 0); in etnaviv_iommu_find_iova()
146 list_for_each_entry(free, &mmu->mappings, mmu_node) { in etnaviv_iommu_find_iova()
188 etnaviv_iommu_remove_mapping(mmu, m); in etnaviv_iommu_find_iova()
189 m->mmu = NULL; in etnaviv_iommu_find_iova()
200 mmu->need_flush = true; in etnaviv_iommu_find_iova()
206 int etnaviv_iommu_map_gem(struct etnaviv_iommu *mmu, in etnaviv_iommu_map_gem() argument
216 mutex_lock(&mmu->lock); in etnaviv_iommu_map_gem()
219 if (mmu->version == ETNAVIV_IOMMU_V1 && in etnaviv_iommu_map_gem()
226 list_add_tail(&mapping->mmu_node, &mmu->mappings); in etnaviv_iommu_map_gem()
227 mutex_unlock(&mmu->lock); in etnaviv_iommu_map_gem()
234 ret = etnaviv_iommu_find_iova(mmu, node, etnaviv_obj->base.size); in etnaviv_iommu_map_gem()
236 mutex_unlock(&mmu->lock); in etnaviv_iommu_map_gem()
240 mmu->last_iova = node->start + etnaviv_obj->base.size; in etnaviv_iommu_map_gem()
242 ret = etnaviv_iommu_map(mmu, node->start, sgt, etnaviv_obj->base.size, in etnaviv_iommu_map_gem()
247 mutex_unlock(&mmu->lock); in etnaviv_iommu_map_gem()
251 list_add_tail(&mapping->mmu_node, &mmu->mappings); in etnaviv_iommu_map_gem()
252 mutex_unlock(&mmu->lock); in etnaviv_iommu_map_gem()
257 void etnaviv_iommu_unmap_gem(struct etnaviv_iommu *mmu, in etnaviv_iommu_unmap_gem() argument
262 mutex_lock(&mmu->lock); in etnaviv_iommu_unmap_gem()
265 if (mapping->vram_node.mm == &mmu->mm) in etnaviv_iommu_unmap_gem()
266 etnaviv_iommu_remove_mapping(mmu, mapping); in etnaviv_iommu_unmap_gem()
269 mutex_unlock(&mmu->lock); in etnaviv_iommu_unmap_gem()
272 void etnaviv_iommu_destroy(struct etnaviv_iommu *mmu) in etnaviv_iommu_destroy() argument
274 drm_mm_takedown(&mmu->mm); in etnaviv_iommu_destroy()
275 iommu_domain_free(mmu->domain); in etnaviv_iommu_destroy()
276 kfree(mmu); in etnaviv_iommu_destroy()
282 struct etnaviv_iommu *mmu; in etnaviv_iommu_new() local
284 mmu = kzalloc(sizeof(*mmu), GFP_KERNEL); in etnaviv_iommu_new()
285 if (!mmu) in etnaviv_iommu_new()
289 mmu->domain = etnaviv_iommuv1_domain_alloc(gpu); in etnaviv_iommu_new()
292 mmu->domain = etnaviv_iommuv2_domain_alloc(gpu); in etnaviv_iommu_new()
296 if (!mmu->domain) { in etnaviv_iommu_new()
298 kfree(mmu); in etnaviv_iommu_new()
302 mmu->gpu = gpu; in etnaviv_iommu_new()
303 mmu->version = version; in etnaviv_iommu_new()
304 mutex_init(&mmu->lock); in etnaviv_iommu_new()
305 INIT_LIST_HEAD(&mmu->mappings); in etnaviv_iommu_new()
307 drm_mm_init(&mmu->mm, mmu->domain->geometry.aperture_start, in etnaviv_iommu_new()
308 mmu->domain->geometry.aperture_end - in etnaviv_iommu_new()
309 mmu->domain->geometry.aperture_start + 1); in etnaviv_iommu_new()
311 iommu_set_fault_handler(mmu->domain, etnaviv_fault_handler, gpu->dev); in etnaviv_iommu_new()
313 return mmu; in etnaviv_iommu_new()
318 if (gpu->mmu->version == ETNAVIV_IOMMU_V1) in etnaviv_iommu_restore()
327 struct etnaviv_iommu *mmu = gpu->mmu; in etnaviv_iommu_get_cmdbuf_va() local
329 if (mmu->version == ETNAVIV_IOMMU_V1) { in etnaviv_iommu_get_cmdbuf_va()
337 mutex_lock(&mmu->lock); in etnaviv_iommu_get_cmdbuf_va()
338 ret = etnaviv_iommu_find_iova(mmu, &buf->vram_node, in etnaviv_iommu_get_cmdbuf_va()
341 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_cmdbuf_va()
344 ret = iommu_map(mmu->domain, buf->vram_node.start, buf->paddr, in etnaviv_iommu_get_cmdbuf_va()
348 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_cmdbuf_va()
356 mmu->last_iova = buf->vram_node.start + buf->size + SZ_64K; in etnaviv_iommu_get_cmdbuf_va()
357 gpu->mmu->need_flush = true; in etnaviv_iommu_get_cmdbuf_va()
358 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_cmdbuf_va()
367 struct etnaviv_iommu *mmu = gpu->mmu; in etnaviv_iommu_put_cmdbuf_va() local
369 if (mmu->version == ETNAVIV_IOMMU_V2 && buf->vram_node.allocated) { in etnaviv_iommu_put_cmdbuf_va()
370 mutex_lock(&mmu->lock); in etnaviv_iommu_put_cmdbuf_va()
371 iommu_unmap(mmu->domain, buf->vram_node.start, buf->size); in etnaviv_iommu_put_cmdbuf_va()
373 mutex_unlock(&mmu->lock); in etnaviv_iommu_put_cmdbuf_va()