Home
last modified time | relevance | path

Searched refs:fctx (Results 1 – 10 of 10) sorted by relevance

/drivers/gpu/drm/nouveau/
Dnouveau_fence.c65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() local
67 if (!--fctx->notify_ref) in nouveau_fence_signal()
91 nouveau_fence_context_del(struct nouveau_fence_chan *fctx) in nouveau_fence_context_del() argument
95 spin_lock_irq(&fctx->lock); in nouveau_fence_context_del()
96 while (!list_empty(&fctx->pending)) { in nouveau_fence_context_del()
97 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_context_del()
100 nvif_notify_put(&fctx->notify); in nouveau_fence_context_del()
102 spin_unlock_irq(&fctx->lock); in nouveau_fence_context_del()
104 nvif_notify_fini(&fctx->notify); in nouveau_fence_context_del()
105 fctx->dead = 1; in nouveau_fence_context_del()
[all …]
Dnv84_fence.c34 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_crtc() local
35 return fctx->dispc_vma[crtc].offset; in nv84_fence_crtc()
77 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() local
81 addr += fctx->vma_gart.offset; in nv84_fence_emit()
83 addr += fctx->vma.offset; in nv84_fence_emit()
85 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit()
92 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() local
96 addr += fctx->vma_gart.offset; in nv84_fence_sync()
98 addr += fctx->vma.offset; in nv84_fence_sync()
100 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync()
[all …]
Dnv10_fence.c59 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del() local
61 nouveau_fence_context_del(&fctx->base); in nv10_fence_context_del()
62 for (i = 0; i < ARRAY_SIZE(fctx->head); i++) in nv10_fence_context_del()
63 nvif_object_fini(&fctx->head[i]); in nv10_fence_context_del()
64 nvif_object_fini(&fctx->sema); in nv10_fence_context_del()
66 nouveau_fence_context_free(&fctx->base); in nv10_fence_context_del()
72 struct nv10_fence_chan *fctx; in nv10_fence_context_new() local
74 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new()
75 if (!fctx) in nv10_fence_context_new()
78 nouveau_fence_context_new(chan, &fctx->base); in nv10_fence_context_new()
[all …]
Dnv04_fence.c69 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() local
70 nouveau_fence_context_del(&fctx->base); in nv04_fence_context_del()
72 nouveau_fence_context_free(&fctx->base); in nv04_fence_context_del()
78 struct nv04_fence_chan *fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv04_fence_context_new() local
79 if (fctx) { in nv04_fence_context_new()
80 nouveau_fence_context_new(chan, &fctx->base); in nv04_fence_context_new()
81 fctx->base.emit = nv04_fence_emit; in nv04_fence_context_new()
82 fctx->base.sync = nv04_fence_sync; in nv04_fence_context_new()
83 fctx->base.read = nv04_fence_read; in nv04_fence_context_new()
84 chan->fence = fctx; in nv04_fence_context_new()
Dnv17_fence.c38 struct nv10_fence_chan *fctx = chan->fence; in nv17_fence_sync() local
53 OUT_RING (prev, fctx->sema.handle); in nv17_fence_sync()
62 OUT_RING (chan, fctx->sema.handle); in nv17_fence_sync()
77 struct nv10_fence_chan *fctx; in nv17_fence_context_new() local
83 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv17_fence_context_new()
84 if (!fctx) in nv17_fence_context_new()
87 nouveau_fence_context_new(chan, &fctx->base); in nv17_fence_context_new()
88 fctx->base.emit = nv10_fence_emit; in nv17_fence_context_new()
89 fctx->base.read = nv10_fence_read; in nv17_fence_context_new()
90 fctx->base.sync = nv17_fence_sync; in nv17_fence_context_new()
[all …]
Dnv50_fence.c39 struct nv10_fence_chan *fctx; in nv50_fence_context_new() local
45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv50_fence_context_new()
46 if (!fctx) in nv50_fence_context_new()
49 nouveau_fence_context_new(chan, &fctx->base); in nv50_fence_context_new()
50 fctx->base.emit = nv10_fence_emit; in nv50_fence_context_new()
51 fctx->base.read = nv10_fence_read; in nv50_fence_context_new()
52 fctx->base.sync = nv17_fence_sync; in nv50_fence_context_new()
61 &fctx->sema); in nv50_fence_context_new()
76 &fctx->head[i]); in nv50_fence_context_new()
Dnvc0_fence.c68 struct nv84_fence_chan *fctx = chan->fence; in nvc0_fence_context_new() local
69 fctx->base.emit32 = nvc0_fence_emit32; in nvc0_fence_context_new()
70 fctx->base.sync32 = nvc0_fence_sync32; in nvc0_fence_context_new()
Dnouveau_display.c671 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_page_flip_emit() local
679 list_add_tail(&s->head, &fctx->flip); in nouveau_page_flip_emit()
828 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_finish_page_flip() local
836 if (list_empty(&fctx->flip)) { in nouveau_finish_page_flip()
842 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); in nouveau_finish_page_flip()
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Ddmanv04.c78 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_fifo_dma_fini() local
101 u32 cv = (nvkm_ro32(fctx, c->ctxp + data) & ~cm); in nv04_fifo_dma_fini()
102 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); in nv04_fifo_dma_fini()
/drivers/net/ethernet/broadcom/
Dcnic.c2350 struct fcoe_context *fctx; in cnic_bnx2x_fcoe_ofld1() local
2384 fctx = cnic_get_bnx2x_ctx(dev, cid, 1, &ctx_addr); in cnic_bnx2x_fcoe_ofld1()
2385 if (fctx) { in cnic_bnx2x_fcoe_ofld1()
2391 fctx->xstorm_ag_context.cdu_reserved = val; in cnic_bnx2x_fcoe_ofld1()
2394 fctx->ustorm_ag_context.cdu_usage = val; in cnic_bnx2x_fcoe_ofld1()