Home
last modified time | relevance | path

Searched refs:pgt (Results 1 – 10 of 10) sorted by relevance

/drivers/gpu/drm/nouveau/core/subdev/vm/
Dnv44.c41 nv44_vm_fill(struct nouveau_gpuobj *pgt, dma_addr_t null, in nv44_vm_fill() argument
47 tmp[0] = nv_ro32(pgt, base + 0x0); in nv44_vm_fill()
48 tmp[1] = nv_ro32(pgt, base + 0x4); in nv44_vm_fill()
49 tmp[2] = nv_ro32(pgt, base + 0x8); in nv44_vm_fill()
50 tmp[3] = nv_ro32(pgt, base + 0xc); in nv44_vm_fill()
80 nv_wo32(pgt, base + 0x0, tmp[0]); in nv44_vm_fill()
81 nv_wo32(pgt, base + 0x4, tmp[1]); in nv44_vm_fill()
82 nv_wo32(pgt, base + 0x8, tmp[2]); in nv44_vm_fill()
83 nv_wo32(pgt, base + 0xc, tmp[3] | 0x40000000); in nv44_vm_fill()
87 nv44_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nv44_vm_map_sg() argument
[all …]
Dnv50.c39 struct nouveau_gpuobj *pgt[2]) in nv50_vm_map_pgt()
44 if (pgt[0]) { in nv50_vm_map_pgt()
45 phys = 0x00000003 | pgt[0]->addr; /* present, 4KiB pages */ in nv50_vm_map_pgt()
46 coverage = (pgt[0]->size >> 3) << 12; in nv50_vm_map_pgt()
48 if (pgt[1]) { in nv50_vm_map_pgt()
49 phys = 0x00000001 | pgt[1]->addr; /* present */ in nv50_vm_map_pgt()
50 coverage = (pgt[1]->size >> 3) << 16; in nv50_vm_map_pgt()
80 nv50_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nv50_vm_map() argument
118 nv_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
119 nv_wo32(pgt, pte + 4, offset_h); in nv50_vm_map()
[all …]
Dnvc0.c81 struct nouveau_gpuobj *pgt[2]) in nvc0_vm_map_pgt()
85 if (pgt[0]) in nvc0_vm_map_pgt()
86 pde[1] = 0x00000001 | (pgt[0]->addr >> 8); in nvc0_vm_map_pgt()
87 if (pgt[1]) in nvc0_vm_map_pgt()
88 pde[0] = 0x00000001 | (pgt[1]->addr >> 8); in nvc0_vm_map_pgt()
110 nvc0_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nvc0_vm_map() argument
128 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map()
129 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nvc0_vm_map()
136 nvc0_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nvc0_vm_map_sg() argument
146 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map_sg()
[all …]
Dbase.c51 struct nouveau_gpuobj *pgt = vm->pgt[pde].obj[big]; in nouveau_vm_map_at() local
58 vmm->map(vma, pgt, node, pte, len, phys, delta); in nouveau_vm_map_at()
100 struct nouveau_gpuobj *pgt = vm->pgt[pde].obj[big]; in nouveau_vm_map_sg_table() local
111 vmm->map_sg(vma, pgt, mem, pte, 1, &addr); in nouveau_vm_map_sg_table()
126 vmm->map_sg(vma, pgt, mem, pte, 1, &addr); in nouveau_vm_map_sg_table()
156 struct nouveau_gpuobj *pgt = vm->pgt[pde].obj[big]; in nouveau_vm_map_sg() local
163 vmm->map_sg(vma, pgt, mem, pte, len, list); in nouveau_vm_map_sg()
192 struct nouveau_gpuobj *pgt = vm->pgt[pde].obj[big]; in nouveau_vm_unmap_at() local
199 vmm->unmap(pgt, pte, len); in nouveau_vm_unmap_at()
224 struct nouveau_gpuobj *pgt; in nouveau_vm_unmap_pgt() local
[all …]
Dnv04.c37 nv04_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nv04_vm_map_sg() argument
45 nv_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg()
54 nv04_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument
58 nv_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap()
116 &priv->vm->pgt[0].obj[0]); in nv04_vmmgr_ctor()
117 dma = priv->vm->pgt[0].obj[0]; in nv04_vmmgr_ctor()
118 priv->vm->pgt[0].refcount[0] = 1; in nv04_vmmgr_ctor()
132 nouveau_gpuobj_ref(NULL, &priv->vm->pgt[0].obj[0]); in nv04_vmmgr_dtor()
Dnv41.c41 nv41_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, in nv41_vm_map_sg() argument
49 nv_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg()
58 nv41_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument
62 nv_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap()
125 &priv->vm->pgt[0].obj[0]); in nv41_vmmgr_ctor()
126 priv->vm->pgt[0].refcount[0] = 1; in nv41_vmmgr_ctor()
137 struct nouveau_gpuobj *dma = priv->vm->pgt[0].obj[0]; in nv41_vmmgr_init()
/drivers/gpu/drm/nouveau/core/engine/dmaobj/
Dnv04.c66 struct nouveau_gpuobj *pgt = vmm->vm->pgt[0].obj[0]; in nv04_dmaobj_bind() local
68 return nouveau_gpuobj_dup(parent, pgt, pgpuobj); in nv04_dmaobj_bind()
69 offset = nv_ro32(pgt, 8 + (offset >> 10)); in nv04_dmaobj_bind()
/drivers/gpu/drm/nouveau/core/include/subdev/
Dvm.h63 struct nouveau_vm_pgt *pgt; member
81 struct nouveau_gpuobj *pgt[2]);
87 void (*unmap)(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt);
/drivers/gpu/drm/nouveau/core/subdev/bar/
Dnvc0.c122 &vm->pgt[0].obj[0]); in nvc0_bar_ctor()
123 vm->pgt[0].refcount[0] = 1; in nvc0_bar_ctor()
182 nouveau_gpuobj_ref(NULL, &priv->bar[0].vm->pgt[0].obj[0]); in nvc0_bar_dtor()
Dnv50.c152 NVOBJ_FLAG_ZERO_ALLOC, &vm->pgt[0].obj[0]); in nv50_bar_ctor()
153 vm->pgt[0].refcount[0] = 1; in nv50_bar_ctor()
219 nouveau_gpuobj_ref(NULL, &priv->bar3_vm->pgt[0].obj[0]); in nv50_bar_dtor()