Home
last modified time | relevance | path

Searched refs:dmaobj (Results 1 – 8 of 8) sorted by relevance

/drivers/gpu/drm/nouveau/nvkm/engine/dma/
Dusernv04.c44 struct nv04_dmaobj *dmaobj = nv04_dmaobj(base); in nv04_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv04_dmaobj_bind()
46 u64 offset = dmaobj->base.start & 0xfffff000; in nv04_dmaobj_bind()
47 u64 adjust = dmaobj->base.start & 0x00000fff; in nv04_dmaobj_bind()
48 u32 length = dmaobj->base.limit - dmaobj->base.start; in nv04_dmaobj_bind()
51 if (dmaobj->clone) { in nv04_dmaobj_bind()
54 if (!dmaobj->base.start) in nv04_dmaobj_bind()
65 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0 | (adjust << 20)); in nv04_dmaobj_bind()
67 nvkm_wo32(*pgpuobj, 0x08, dmaobj->flags2 | offset); in nv04_dmaobj_bind()
68 nvkm_wo32(*pgpuobj, 0x0c, dmaobj->flags2 | offset); in nv04_dmaobj_bind()
[all …]
Dusergf100.c44 struct gf100_dmaobj *dmaobj = gf100_dmaobj(base); in gf100_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf100_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf100_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in gf100_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in gf100_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in gf100_dmaobj_bind()
77 struct gf100_dmaobj *dmaobj; in gf100_dmaobj_new() local
81 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gf100_dmaobj_new()
[all …]
Duser.c50 struct nvkm_dmaobj *dmaobj = nvkm_dmaobj(base); in nvkm_dmaobj_bind() local
51 return dmaobj->func->bind(dmaobj, gpuobj, align, pgpuobj); in nvkm_dmaobj_bind()
69 struct nvkm_dmaobj *dmaobj) in nvkm_dmaobj_ctor() argument
79 nvkm_object_ctor(&nvkm_dmaobj_func, oclass, &dmaobj->object); in nvkm_dmaobj_ctor()
80 dmaobj->func = func; in nvkm_dmaobj_ctor()
81 dmaobj->dma = dma; in nvkm_dmaobj_ctor()
89 dmaobj->target = args->v0.target; in nvkm_dmaobj_ctor()
90 dmaobj->access = args->v0.access; in nvkm_dmaobj_ctor()
91 dmaobj->start = args->v0.start; in nvkm_dmaobj_ctor()
92 dmaobj->limit = args->v0.limit; in nvkm_dmaobj_ctor()
[all …]
Dusernv50.c44 struct nv50_dmaobj *dmaobj = nv50_dmaobj(base); in nv50_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv50_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in nv50_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in nv50_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in nv50_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in nv50_dmaobj_bind()
77 struct nv50_dmaobj *dmaobj; in nv50_dmaobj_new() local
81 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in nv50_dmaobj_new()
[all …]
Dusergv100.c41 struct gv100_dmaobj *dmaobj = gv100_dmaobj(base); in gv100_dmaobj_bind() local
42 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gv100_dmaobj_bind()
43 u64 start = dmaobj->base.start >> 8; in gv100_dmaobj_bind()
44 u64 limit = dmaobj->base.limit >> 8; in gv100_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gv100_dmaobj_bind()
74 struct gv100_dmaobj *dmaobj; in gv100_dmaobj_new() local
78 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gv100_dmaobj_new()
80 *pdmaobj = &dmaobj->base; in gv100_dmaobj_new()
83 &data, &size, &dmaobj->base); in gv100_dmaobj_new()
105 dmaobj->flags0 |= 0x00100000; in gv100_dmaobj_new()
[all …]
Dusergf119.c43 struct gf119_dmaobj *dmaobj = gf119_dmaobj(base); in gf119_dmaobj_bind() local
44 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf119_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf119_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x04, dmaobj->base.start >> 8); in gf119_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x08, dmaobj->base.limit >> 8); in gf119_dmaobj_bind()
75 struct gf119_dmaobj *dmaobj; in gf119_dmaobj_new() local
79 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gf119_dmaobj_new()
81 *pdmaobj = &dmaobj->base; in gf119_dmaobj_new()
84 &data, &size, &dmaobj->base); in gf119_dmaobj_new()
100 if (dmaobj->base.target != NV_MEM_TARGET_VM) { in gf119_dmaobj_new()
[all …]
Dbase.c37 struct nvkm_dmaobj *dmaobj = NULL; in nvkm_dma_oclass_new() local
40 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj); in nvkm_dma_oclass_new()
41 if (dmaobj) in nvkm_dma_oclass_new()
42 *pobject = &dmaobj->object; in nvkm_dma_oclass_new()
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dchan.c348 struct nvkm_dmaobj *dmaobj, u64 offset, u64 length, in nvkm_chan_new_() argument
360 (!func->ramfc->ctxdma != !dmaobj) || in nvkm_chan_new_()
366 func->userd->bar < 0, userd, func->ramfc->ctxdma, dmaobj, in nvkm_chan_new_()
433 ret = nvkm_object_bind(&dmaobj->object, chan->inst, -16, &chan->push); in nvkm_chan_new_()