Home
last modified time | relevance | path

Searched refs:aspace (Results 1 – 25 of 44) sorted by relevance

12

/drivers/gpu/drm/msm/
Dmsm_gem_vma.c14 struct msm_gem_address_space *aspace = container_of(kref, in msm_gem_address_space_destroy() local
17 drm_mm_takedown(&aspace->mm); in msm_gem_address_space_destroy()
18 if (aspace->mmu) in msm_gem_address_space_destroy()
19 aspace->mmu->funcs->destroy(aspace->mmu); in msm_gem_address_space_destroy()
20 put_pid(aspace->pid); in msm_gem_address_space_destroy()
21 kfree(aspace); in msm_gem_address_space_destroy()
25 void msm_gem_address_space_put(struct msm_gem_address_space *aspace) in msm_gem_address_space_put() argument
27 if (aspace) in msm_gem_address_space_put()
28 kref_put(&aspace->kref, msm_gem_address_space_destroy); in msm_gem_address_space_put()
32 msm_gem_address_space_get(struct msm_gem_address_space *aspace) in msm_gem_address_space_get() argument
[all …]
Dmsm_gem.c298 struct msm_gem_address_space *aspace) in add_vma() argument
309 vma->aspace = aspace; in add_vma()
317 struct msm_gem_address_space *aspace) in lookup_vma() argument
325 if (vma->aspace == aspace) in lookup_vma()
356 if (vma->aspace) { in put_iova_spaces()
357 msm_gem_purge_vma(vma->aspace, vma); in put_iova_spaces()
359 msm_gem_close_vma(vma->aspace, vma); in put_iova_spaces()
379 struct msm_gem_address_space *aspace, uint64_t *iova, in get_iova_locked() argument
387 vma = lookup_vma(obj, aspace); in get_iova_locked()
390 vma = add_vma(obj, aspace); in get_iova_locked()
[all …]
Dmsm_gem.h43 struct msm_gem_address_space *aspace; member
111 struct msm_gem_address_space *aspace, uint64_t *iova);
113 struct msm_gem_address_space *aspace, uint64_t *iova,
116 struct msm_gem_address_space *aspace, uint64_t *iova);
118 struct msm_gem_address_space *aspace, uint64_t *iova);
120 struct msm_gem_address_space *aspace);
122 struct msm_gem_address_space *aspace);
124 struct msm_gem_address_space *aspace);
147 uint32_t flags, struct msm_gem_address_space *aspace,
150 struct msm_gem_address_space *aspace);
[all …]
Dmsm_gpu.c490 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in fault_worker()
805 struct msm_gem_address_space *aspace = NULL; in msm_gpu_create_private_address_space() local
814 aspace = gpu->funcs->create_private_address_space(gpu); in msm_gpu_create_private_address_space()
815 if (!IS_ERR(aspace)) in msm_gpu_create_private_address_space()
816 aspace->pid = get_pid(task_pid(task)); in msm_gpu_create_private_address_space()
819 if (IS_ERR_OR_NULL(aspace)) in msm_gpu_create_private_address_space()
820 aspace = msm_gem_address_space_get(gpu->aspace); in msm_gpu_create_private_address_space()
822 return aspace; in msm_gpu_create_private_address_space()
909 gpu->aspace = gpu->funcs->create_address_space(gpu, pdev); in msm_gpu_init()
911 if (gpu->aspace == NULL) in msm_gpu_init()
[all …]
Dmsm_drv.h261 int msm_gem_init_vma(struct msm_gem_address_space *aspace,
264 void msm_gem_purge_vma(struct msm_gem_address_space *aspace,
266 void msm_gem_unmap_vma(struct msm_gem_address_space *aspace,
268 int msm_gem_map_vma(struct msm_gem_address_space *aspace,
271 void msm_gem_close_vma(struct msm_gem_address_space *aspace,
276 msm_gem_address_space_get(struct msm_gem_address_space *aspace);
278 void msm_gem_address_space_put(struct msm_gem_address_space *aspace);
309 struct msm_gem_address_space *aspace, bool needs_dirtyfb);
311 struct msm_gem_address_space *aspace, bool needed_dirtyfb);
313 struct msm_gem_address_space *aspace, int plane);
Dmsm_fb.c74 struct msm_gem_address_space *aspace, in msm_framebuffer_prepare() argument
85 ret = msm_gem_get_and_pin_iova(fb->obj[i], aspace, &iova); in msm_framebuffer_prepare()
95 struct msm_gem_address_space *aspace, in msm_framebuffer_cleanup() argument
105 msm_gem_unpin_iova(fb->obj[i], aspace); in msm_framebuffer_cleanup()
109 struct msm_gem_address_space *aspace, int plane) in msm_framebuffer_iova() argument
113 return msm_gem_iova(fb->obj[plane], aspace) + fb->offsets[plane]; in msm_framebuffer_iova()
Dmsm_ringbuffer.c82 gpu->aspace, &ring->bo, &ring->iova); in msm_ringbuffer_new()
133 msm_gem_kernel_put(ring->bo, ring->gpu->aspace); in msm_ringbuffer_destroy()
/drivers/vme/bridges/
Dvme_fake.c48 u32 aspace; member
56 u32 aspace; member
156 dma_addr_t buf_base, u32 aspace, u32 cycle) in fake_slave_set() argument
168 switch (aspace) { in fake_slave_set()
212 bridge->slaves[i].aspace = aspace; in fake_slave_set()
225 dma_addr_t *buf_base, u32 *aspace, u32 *cycle) in fake_slave_get() argument
240 *aspace = bridge->slaves[i].aspace; in fake_slave_get()
253 u32 aspace, u32 cycle, u32 dwidth) in fake_master_set() argument
296 switch (aspace) { in fake_master_set()
320 bridge->masters[i].aspace = aspace; in fake_master_set()
[all …]
Dvme_tsi148.c473 dma_addr_t pci_base, u32 aspace, u32 cycle) in tsi148_slave_set() argument
489 switch (aspace) { in tsi148_slave_set()
617 dma_addr_t *pci_base, u32 *aspace, u32 *cycle) in tsi148_slave_get() argument
655 *aspace = 0; in tsi148_slave_get()
663 *aspace |= VME_A16; in tsi148_slave_get()
667 *aspace |= VME_A24; in tsi148_slave_get()
671 *aspace |= VME_A32; in tsi148_slave_get()
675 *aspace |= VME_A64; in tsi148_slave_get()
807 unsigned long long vme_base, unsigned long long size, u32 aspace, in tsi148_master_set() argument
964 switch (aspace) { in tsi148_master_set()
[all …]
Dvme_ca91cx42.c335 dma_addr_t pci_base, u32 aspace, u32 cycle) in ca91cx42_slave_set() argument
349 switch (aspace) { in ca91cx42_slave_set()
441 dma_addr_t *pci_base, u32 *aspace, u32 *cycle) in ca91cx42_slave_get() argument
467 *aspace = 0; in ca91cx42_slave_get()
474 *aspace = VME_A16; in ca91cx42_slave_get()
476 *aspace = VME_A24; in ca91cx42_slave_get()
478 *aspace = VME_A32; in ca91cx42_slave_get()
480 *aspace = VME_USER1; in ca91cx42_slave_get()
482 *aspace = VME_USER2; in ca91cx42_slave_get()
590 unsigned long long vme_base, unsigned long long size, u32 aspace, in ca91cx42_master_set() argument
[all …]
/drivers/vme/
Dvme.c167 u32 aspace, cycle, dwidth; in vme_get_size() local
172 &aspace, &cycle, &dwidth); in vme_get_size()
179 &buf_base, &aspace, &cycle); in vme_get_size()
193 int vme_check_window(u32 aspace, unsigned long long vme_base, in vme_check_window() argument
201 switch (aspace) { in vme_check_window()
363 dma_addr_t buf_base, u32 aspace, u32 cycle) in vme_slave_set() argument
381 if (!(((image->address_attr & aspace) == aspace) && in vme_slave_set()
387 retval = vme_check_window(aspace, vme_base, size); in vme_slave_set()
392 aspace, cycle); in vme_slave_set()
413 dma_addr_t *buf_base, u32 *aspace, u32 *cycle) in vme_slave_get() argument
[all …]
Dvme_bridge.h49 u32 aspace; member
86 u32 aspace; /* Address space of error window*/ member
186 struct vme_bridge *bridge, u32 aspace,
/drivers/gpu/drm/msm/disp/mdp4/
Dmdp4_kms.c130 struct msm_gem_address_space *aspace = kms->aspace; in mdp4_destroy() local
133 msm_gem_unpin_iova(mdp4_kms->blank_cursor_bo, kms->aspace); in mdp4_destroy()
136 if (aspace) { in mdp4_destroy()
137 aspace->mmu->funcs->detach(aspace->mmu); in mdp4_destroy()
138 msm_gem_address_space_put(aspace); in mdp4_destroy()
407 struct msm_gem_address_space *aspace; in mdp4_kms_init() local
519 aspace = msm_gem_address_space_create(mmu, in mdp4_kms_init()
522 if (IS_ERR(aspace)) { in mdp4_kms_init()
525 ret = PTR_ERR(aspace); in mdp4_kms_init()
529 kms->aspace = aspace; in mdp4_kms_init()
[all …]
Dmdp4_plane.c105 return msm_framebuffer_prepare(new_state->fb, kms->aspace, false); in mdp4_plane_prepare_fb()
120 msm_framebuffer_cleanup(fb, kms->aspace, false); in mdp4_plane_cleanup_fb()
171 msm_framebuffer_iova(fb, kms->aspace, 0)); in mdp4_plane_set_scanout()
173 msm_framebuffer_iova(fb, kms->aspace, 1)); in mdp4_plane_set_scanout()
175 msm_framebuffer_iova(fb, kms->aspace, 2)); in mdp4_plane_set_scanout()
177 msm_framebuffer_iova(fb, kms->aspace, 3)); in mdp4_plane_set_scanout()
/drivers/gpu/drm/msm/adreno/
Da5xx_preempt.c234 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->aspace, &bo, &iova); in preempt_init_ring()
242 MSM_BO_WC, gpu->aspace, &counters_bo, &counters_iova); in preempt_init_ring()
244 msm_gem_kernel_put(bo, gpu->aspace); in preempt_init_ring()
275 msm_gem_kernel_put(a5xx_gpu->preempt_bo[i], gpu->aspace); in a5xx_preempt_fini()
276 msm_gem_kernel_put(a5xx_gpu->preempt_counters_bo[i], gpu->aspace); in a5xx_preempt_fini()
Da2xx_gpu.c109 msm_gpummu_params(gpu->aspace->mmu, &pt_base, &tran_error); in a2xx_hw_init()
455 struct msm_gem_address_space *aspace; in a2xx_create_address_space() local
457 aspace = msm_gem_address_space_create(mmu, "gpu", SZ_16M, in a2xx_create_address_space()
460 if (IS_ERR(aspace) && !IS_ERR(mmu)) in a2xx_create_address_space()
463 return aspace; in a2xx_create_address_space()
535 if (!gpu->aspace) { in a2xx_gpu_init()
Da6xx_gmu.c1137 msm_gem_kernel_put(gmu->hfi.obj, gmu->aspace); in a6xx_gmu_memory_free()
1138 msm_gem_kernel_put(gmu->debug.obj, gmu->aspace); in a6xx_gmu_memory_free()
1139 msm_gem_kernel_put(gmu->icache.obj, gmu->aspace); in a6xx_gmu_memory_free()
1140 msm_gem_kernel_put(gmu->dcache.obj, gmu->aspace); in a6xx_gmu_memory_free()
1141 msm_gem_kernel_put(gmu->dummy.obj, gmu->aspace); in a6xx_gmu_memory_free()
1142 msm_gem_kernel_put(gmu->log.obj, gmu->aspace); in a6xx_gmu_memory_free()
1144 gmu->aspace->mmu->funcs->detach(gmu->aspace->mmu); in a6xx_gmu_memory_free()
1145 msm_gem_address_space_put(gmu->aspace); in a6xx_gmu_memory_free()
1174 ret = msm_gem_get_and_pin_iova_range(bo->obj, gmu->aspace, &bo->iova, in a6xx_gmu_memory_alloc()
1197 gmu->aspace = msm_gem_address_space_create(mmu, "gmu", 0x0, 0x80000000); in a6xx_gmu_memory_probe()
[all …]
Da6xx_gpu.c112 if (msm_iommu_pagetable_params(ctx->aspace->mmu, &ttbr, &asid)) in a6xx_set_pagetable()
848 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_ucode_init()
1073 gpu->aspace, &a6xx_gpu->shadow_bo, in hw_init()
1249 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in a6xx_fault_handler()
1603 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
1608 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
1666 struct msm_gem_address_space *aspace; in a6xx_create_address_space() local
1694 aspace = msm_gem_address_space_create(mmu, "gpu", in a6xx_create_address_space()
1697 if (IS_ERR(aspace) && !IS_ERR(mmu)) in a6xx_create_address_space()
1700 return aspace; in a6xx_create_address_space()
[all …]
Da5xx_gpu.c925 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init()
1034 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
1039 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
1044 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
1049 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
1438 SZ_1M, MSM_BO_WC, gpu->aspace, in a5xx_crashdumper_init()
1539 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1547 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1785 if (gpu->aspace) in a5xx_gpu_init()
1786 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, a5xx_fault_handler); in a5xx_gpu_init()
Da5xx_debugfs.c119 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in reset_set()
125 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in reset_set()
/drivers/gpu/drm/msm/disp/mdp5/
Dmdp5_kms.c215 struct msm_gem_address_space *aspace = kms->aspace; in mdp5_kms_destroy() local
224 if (aspace) { in mdp5_kms_destroy()
225 aspace->mmu->funcs->detach(aspace->mmu); in mdp5_kms_destroy()
226 msm_gem_address_space_put(aspace); in mdp5_kms_destroy()
582 struct msm_gem_address_space *aspace; in mdp5_kms_init() local
635 aspace = msm_gem_address_space_create(mmu, "mdp5", in mdp5_kms_init()
638 if (IS_ERR(aspace)) { in mdp5_kms_init()
641 ret = PTR_ERR(aspace); in mdp5_kms_init()
645 kms->aspace = aspace; in mdp5_kms_init()
649 aspace = NULL; in mdp5_kms_init()
/drivers/staging/vme/devices/
Dvme_user.h14 __u32 aspace; /* Address Space */ member
36 __u32 aspace; /* Address Space */ member
/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_formats.c773 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_ubwc() argument
785 if (aspace) in _dpu_format_populate_addrs_ubwc()
786 base_addr = msm_framebuffer_iova(fb, aspace, 0); in _dpu_format_populate_addrs_ubwc()
864 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_linear() argument
881 if (aspace) in _dpu_format_populate_addrs_linear()
883 msm_framebuffer_iova(fb, aspace, i); in _dpu_format_populate_addrs_linear()
894 struct msm_gem_address_space *aspace, in dpu_format_populate_layout() argument
926 ret = _dpu_format_populate_addrs_ubwc(aspace, fb, layout); in dpu_format_populate_layout()
928 ret = _dpu_format_populate_addrs_linear(aspace, fb, layout); in dpu_format_populate_layout()
Ddpu_kms.c889 if (!dpu_kms->base.aspace) in _dpu_kms_mmu_destroy()
892 mmu = dpu_kms->base.aspace->mmu; in _dpu_kms_mmu_destroy()
895 msm_gem_address_space_put(dpu_kms->base.aspace); in _dpu_kms_mmu_destroy()
897 dpu_kms->base.aspace = NULL; in _dpu_kms_mmu_destroy()
903 struct msm_gem_address_space *aspace; in _dpu_kms_mmu_init() local
915 aspace = msm_gem_address_space_create(mmu, "dpu1", in _dpu_kms_mmu_init()
918 if (IS_ERR(aspace)) { in _dpu_kms_mmu_init()
920 return PTR_ERR(aspace); in _dpu_kms_mmu_init()
923 dpu_kms->base.aspace = aspace; in _dpu_kms_mmu_init()
Ddpu_formats.h62 struct msm_gem_address_space *aspace,

12