Home
last modified time | relevance | path

Searched refs:vmm (Results 1 – 25 of 93) sorted by relevance

1234

/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dvmm.c75 struct nvkm_vmm *vmm; member
113 VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \
129 if (it->vmm->func->flush) { in nvkm_vmm_flush()
131 it->vmm->func->flush(it->vmm, it->flush); in nvkm_vmm_flush()
145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local
159 func->sparse(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
162 func->unmap(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
170 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes()
177 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes()
190 nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); in nvkm_vmm_unref_pdes()
[all …]
Duvmm.c42 return nvkm_uvmm(object)->vmm; in nvkm_uvmm_search()
51 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnclr() local
62 mutex_lock(&vmm->mutex); in nvkm_uvmm_mthd_pfnclr()
63 ret = nvkm_vmm_pfn_unmap(vmm, addr, size); in nvkm_uvmm_mthd_pfnclr()
64 mutex_unlock(&vmm->mutex); in nvkm_uvmm_mthd_pfnclr()
76 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnmap() local
92 mutex_lock(&vmm->mutex); in nvkm_uvmm_mthd_pfnmap()
93 ret = nvkm_vmm_pfn_map(vmm, page, addr, size, phys); in nvkm_uvmm_mthd_pfnmap()
94 mutex_unlock(&vmm->mutex); in nvkm_uvmm_mthd_pfnmap()
106 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_unmap() local
[all …]
Dvmmnv44.c27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument
39 u32 addr = (list ? *list++ : vmm->null) >> 12; in nv44_vmm_pgt_fill()
66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill()
73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument
82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte()
90 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vmm_pgt_pte()
91 VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vmm_pgt_pte()
[all …]
Dvmmgp100.c34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, in gp100_vmm_pfn_unmap() argument
37 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pfn_unmap()
55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, in gp100_vmm_pfn_clear() argument
65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear()
75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pfn() argument
78 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pgt_pfn()
109 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pfn()
115 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pte() argument
123 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pte()
129 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_sgl() argument
[all …]
Dvmmnv50.c32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument
53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte()
58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl()
65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma()
84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument
87 VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_mem()
[all …]
Dvmmgf100.c32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument
44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl()
65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma()
84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_mem() argument
[all …]
Dvmmnv04.c28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument
33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte()
39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument
52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
80 nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, in nv04_vmm_valid() argument
[all …]
Dvmmnv41.c27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument
32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte()
38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument
52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
81 nv41_vmm_flush(struct nvkm_vmm *vmm, int level) in nv41_vmm_flush() argument
[all …]
Dvmmgm200.c28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument
57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse()
96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument
98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_()
100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_()
104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument
106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
Dvmmtu102.c27 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) in tu102_vmm_flush() argument
29 struct nvkm_device *device = vmm->mmu->subdev.device; in tu102_vmm_flush()
33 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) in tu102_vmm_flush()
36 mutex_lock(&vmm->mmu->mutex); in tu102_vmm_flush()
38 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); in tu102_vmm_flush()
48 mutex_unlock(&vmm->mmu->mutex); in tu102_vmm_flush()
Dnv44.c35 struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; in nv44_mmu_init()
46 nvkm_wr32(device, 0x100818, mmu->vmm->null); in nv44_mmu_init()
61 .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv44_vmm_new, true },
/drivers/gpu/drm/nouveau/
Dnouveau_vmm.c32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
49 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) in nouveau_vma_find() argument
54 if (vma->vmm == vmm) in nouveau_vma_find()
68 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del()
77 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, in nouveau_vma_new() argument
85 if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { in nouveau_vma_new()
92 vma->vmm = vmm; in nouveau_vma_new()
101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new()
109 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new()
[all …]
Dnouveau_chan.c94 nouveau_svmm_part(chan->vmm->svmm, chan->inst); in nouveau_channel_del()
153 chan->vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_channel_prep()
189 ret = nouveau_vma_new(chan->push.buffer, chan->vmm, in nouveau_channel_prep()
204 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
234 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
288 args.volta.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ind()
297 args.kepler.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ind()
305 args.fermi.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ind()
312 args.nv50.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ind()
411 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
[all …]
Dnouveau_svm.c103 NV_DEBUG((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a)
105 NV_WARN((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a)
211 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part()
212 ivmm = nouveau_ivmm_find(svmm->vmm->cli->drm->svm, inst); in nouveau_svmm_part()
217 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part()
232 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join()
233 list_add(&ivmm->head, &svmm->vmm->cli->drm->svm->inst); in nouveau_svmm_join()
234 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join()
244 nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, in nouveau_svmm_invalidate()
267 if (unlikely(!svmm->vmm)) in nouveau_svmm_invalidate_range_start()
[all …]
Dnouveau_mem.c37 struct nvif_vmm *vmm, struct nvif_vma *vma) in nouveau_mem_map() argument
45 switch (vmm->object.oclass) { in nouveau_mem_map()
74 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, 0); in nouveau_mem_map()
80 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini()
81 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini()
Dnouveau_gem.c106 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_gem_object_open() local
110 if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_open()
123 ret = nouveau_vma_new(nvbo, vmm, &vma); in nouveau_gem_object_open()
173 nouveau_cli_work_queue(vma->vmm->cli, fence, &work->work); in nouveau_gem_object_unmap()
183 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : & cli->vmm; in nouveau_gem_object_close() local
187 if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_close()
194 vma = nouveau_vma_find(nvbo, vmm); in nouveau_gem_object_close()
272 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_gem_info() local
282 if (vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in nouveau_gem_info()
283 vma = nouveau_vma_find(nvbo, vmm); in nouveau_gem_info()
[all …]
Dnouveau_bo.c206 struct nvif_vmm *vmm = cli->svm.cli ? &cli->svm.vmm : &cli->vmm.vmm; in nouveau_bo_alloc() local
257 for (i = 0; i < vmm->page_nr; i++) { in nouveau_bo_alloc()
266 (domain & NOUVEAU_GEM_DOMAIN_VRAM) && !vmm->page[i].vram) in nouveau_bo_alloc()
269 (!vmm->page[i].host || vmm->page[i].shift > PAGE_SHIFT)) in nouveau_bo_alloc()
276 if (pi < 0 || !nvbo->comp || vmm->page[i].comp) in nouveau_bo_alloc()
280 if (*size >= 1ULL << vmm->page[i].shift) in nouveau_bo_alloc()
290 if (nvbo->comp && !vmm->page[pi].comp) { in nouveau_bo_alloc()
295 nvbo->page = vmm->page[pi].shift; in nouveau_bo_alloc()
768 struct nvif_vmm *vmm = &drm->client.vmm.vmm; in nouveau_bo_move_prep() local
771 ret = nvif_vmm_get(vmm, LAZY, false, old_mem->mem.page, 0, in nouveau_bo_move_prep()
[all …]
/drivers/gpu/drm/nouveau/nvif/
Dvmm.c28 nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) in nvif_vmm_unmap() argument
30 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, in nvif_vmm_unmap()
36 nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, in nvif_vmm_map() argument
57 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, in nvif_vmm_map()
65 nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) in nvif_vmm_put() argument
68 WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, in nvif_vmm_put()
77 nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, in nvif_vmm_get() argument
98 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, in nvif_vmm_get()
108 nvif_vmm_dtor(struct nvif_vmm *vmm) in nvif_vmm_dtor() argument
110 kfree(vmm->page); in nvif_vmm_dtor()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dgf100.c34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm()
63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm()
103 (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); in gf100_bar_oneinit_bar()
107 atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); in gf100_bar_oneinit_bar()
108 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar()
114 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar()
119 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar()
153 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor()
154 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor()
157 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dnv50.c120 nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm) in nv50_instobj_kmap() argument
137 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap()
158 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap()
162 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap()
167 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap()
178 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap()
183 nv50_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, in nv50_instobj_map() argument
187 return nvkm_memory_map(memory, offset, vmm, vma, argv, argc); in nv50_instobj_map()
220 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local
237 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire()
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dga102.c53 struct nvkm_vmm *vmm; member
127 if (chan->vmm) { in ga102_chan_dtor()
128 nvkm_vmm_part(chan->vmm, chan->inst); in ga102_chan_dtor()
129 nvkm_vmm_unref(&chan->vmm); in ga102_chan_dtor()
154 struct nvkm_vmm *vmm; in ga102_chan_new() local
161 vmm = nvkm_uvmm_search(oclass->client, args->vmm); in ga102_chan_new()
162 if (IS_ERR(vmm)) in ga102_chan_new()
163 return PTR_ERR(vmm); in ga102_chan_new()
232 ret = nvkm_vmm_join(vmm, chan->inst); in ga102_chan_new()
236 chan->vmm = nvkm_vmm_ref(vmm); in ga102_chan_new()
Dchan.c129 if (chan->vmm) in nvkm_fifo_chan_child_del()
130 atomic_dec(&chan->vmm->engref[engine->subdev.type]); in nvkm_fifo_chan_child_del()
163 if (chan->vmm) in nvkm_fifo_chan_child_new()
164 atomic_inc(&chan->vmm->engref[engine->subdev.type]); in nvkm_fifo_chan_child_new()
338 if (chan->vmm) { in nvkm_fifo_chan_dtor()
339 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_fifo_chan_dtor()
340 nvkm_vmm_unref(&chan->vmm); in nvkm_fifo_chan_dtor()
398 struct nvkm_vmm *vmm = nvkm_uvmm_search(client, hvmm); in nvkm_fifo_chan_ctor() local
399 if (IS_ERR(vmm)) in nvkm_fifo_chan_ctor()
400 return PTR_ERR(vmm); in nvkm_fifo_chan_ctor()
[all …]
Dgpfifogk104.c165 nvkm_vmm_put(chan->base.vmm, &engn->vma); in gk104_fifo_gpfifo_engine_dtor()
185 ret = nvkm_vmm_get(chan->base.vmm, 12, engn->inst->size, &engn->vma); in gk104_fifo_gpfifo_engine_ctor()
189 return nvkm_memory_map(engn->inst, 0, chan->base.vmm, engn->vma, NULL, 0); in gk104_fifo_gpfifo_engine_ctor()
253 u64 vmm, u64 ioffset, u64 ilength, u64 *inst, bool priv, in gk104_fifo_gpfifo_new_() argument
261 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gk104_fifo_gpfifo_new_()
274 0x1000, 0x1000, true, vmm, 0, fifo->runlist[runlist].engm_sw, in gk104_fifo_gpfifo_new_()
342 args->v0.version, args->v0.vmm, args->v0.ioffset, in gk104_fifo_gpfifo_new()
347 args->v0.vmm, in gk104_fifo_gpfifo_new()
Dgpfifogf100.c138 nvkm_vmm_put(chan->base.vmm, &engn->vma); in gf100_fifo_gpfifo_engine_dtor()
158 ret = nvkm_vmm_get(chan->base.vmm, 12, engn->inst->size, &engn->vma); in gf100_fifo_gpfifo_engine_ctor()
162 return nvkm_memory_map(engn->inst, 0, chan->base.vmm, engn->vma, NULL, 0); in gf100_fifo_gpfifo_engine_ctor()
237 args->v0.version, args->v0.vmm, args->v0.ioffset, in gf100_fifo_gpfifo_new()
239 if (!args->v0.vmm) in gf100_fifo_gpfifo_new()
252 0x1000, 0x1000, true, args->v0.vmm, 0, in gf100_fifo_gpfifo_new()
/drivers/gpu/drm/
Ddrm_gem_vram_helper.c191 struct drm_vram_mm *vmm = dev->vram_mm; in drm_gem_vram_create() local
195 if (WARN_ONCE(!vmm, "VRAM MM not initialized")) in drm_gem_vram_create()
219 bdev = &vmm->bdev; in drm_gem_vram_create()
922 struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bdev); in bo_driver_io_mem_reserve() local
928 mem->bus.offset = (mem->start << PAGE_SHIFT) + vmm->vram_base; in bo_driver_io_mem_reserve()
956 struct drm_vram_mm *vmm = node->minor->dev->vram_mm; in drm_vram_mm_debugfs() local
957 struct ttm_resource_manager *man = ttm_manager_type(&vmm->bdev, TTM_PL_VRAM); in drm_vram_mm_debugfs()
982 static int drm_vram_mm_init(struct drm_vram_mm *vmm, struct drm_device *dev, in drm_vram_mm_init() argument
987 vmm->vram_base = vram_base; in drm_vram_mm_init()
988 vmm->vram_size = vram_size; in drm_vram_mm_init()
[all …]

1234