/drivers/gpu/drm/msm/ |
D | msm_fence.c | 17 struct msm_fence_context *fctx; in msm_fence_context_alloc() local 19 fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); in msm_fence_context_alloc() 20 if (!fctx) in msm_fence_context_alloc() 23 fctx->dev = dev; in msm_fence_context_alloc() 24 strscpy(fctx->name, name, sizeof(fctx->name)); in msm_fence_context_alloc() 25 fctx->context = dma_fence_context_alloc(1); in msm_fence_context_alloc() 26 fctx->fenceptr = fenceptr; in msm_fence_context_alloc() 27 spin_lock_init(&fctx->spinlock); in msm_fence_context_alloc() 29 return fctx; in msm_fence_context_alloc() 32 void msm_fence_context_free(struct msm_fence_context *fctx) in msm_fence_context_free() argument [all …]
|
D | msm_fence.h | 57 void msm_fence_context_free(struct msm_fence_context *fctx); 59 void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence); 61 struct dma_fence * msm_fence_alloc(struct msm_fence_context *fctx);
|
D | msm_ringbuffer.c | 30 submit->hw_fence = msm_fence_alloc(submit->ring->fctx); in msm_job_run() 115 ring->fctx = msm_fence_context_alloc(gpu->dev, &ring->memptrs->fence, name); in msm_ringbuffer_new() 131 msm_fence_context_free(ring->fctx); in msm_ringbuffer_destroy()
|
D | msm_ringbuffer.h | 66 struct msm_fence_context *fctx; member
|
D | msm_gpu.c | 178 msm_update_fence(submit->ring->fctx, in update_fences()
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_fence.c | 65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() local 67 if (!--fctx->notify_ref) in nouveau_fence_signal() 90 nouveau_fence_context_kill(struct nouveau_fence_chan *fctx, int error) in nouveau_fence_context_kill() argument 94 spin_lock_irq(&fctx->lock); in nouveau_fence_context_kill() 95 while (!list_empty(&fctx->pending)) { in nouveau_fence_context_kill() 96 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_context_kill() 102 nvif_notify_put(&fctx->notify); in nouveau_fence_context_kill() 104 spin_unlock_irq(&fctx->lock); in nouveau_fence_context_kill() 108 nouveau_fence_context_del(struct nouveau_fence_chan *fctx) in nouveau_fence_context_del() argument 110 nouveau_fence_context_kill(fctx, 0); in nouveau_fence_context_del() [all …]
|
D | nv84_fence.c | 83 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() local 84 u64 addr = fctx->vma->addr + chan->chid * 16; in nv84_fence_emit() 86 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit() 93 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() local 94 u64 addr = fctx->vma->addr + prev->chid * 16; in nv84_fence_sync() 96 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync() 110 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_context_del() local 112 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); in nv84_fence_context_del() 114 nouveau_vma_del(&fctx->vma); in nv84_fence_context_del() 116 nouveau_fence_context_del(&fctx->base); in nv84_fence_context_del() [all …]
|
D | nv10_fence.c | 61 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del() local 62 nouveau_fence_context_del(&fctx->base); in nv10_fence_context_del() 63 nvif_object_dtor(&fctx->sema); in nv10_fence_context_del() 65 nouveau_fence_context_free(&fctx->base); in nv10_fence_context_del() 71 struct nv10_fence_chan *fctx; in nv10_fence_context_new() local 73 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new() 74 if (!fctx) in nv10_fence_context_new() 77 nouveau_fence_context_new(chan, &fctx->base); in nv10_fence_context_new() 78 fctx->base.emit = nv10_fence_emit; in nv10_fence_context_new() 79 fctx->base.read = nv10_fence_read; in nv10_fence_context_new() [all …]
|
D | nv04_fence.c | 70 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() local 71 nouveau_fence_context_del(&fctx->base); in nv04_fence_context_del() 73 nouveau_fence_context_free(&fctx->base); in nv04_fence_context_del() 79 struct nv04_fence_chan *fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv04_fence_context_new() local 80 if (fctx) { in nv04_fence_context_new() 81 nouveau_fence_context_new(chan, &fctx->base); in nv04_fence_context_new() 82 fctx->base.emit = nv04_fence_emit; in nv04_fence_context_new() 83 fctx->base.sync = nv04_fence_sync; in nv04_fence_context_new() 84 fctx->base.read = nv04_fence_read; in nv04_fence_context_new() 85 chan->fence = fctx; in nv04_fence_context_new()
|
D | nv17_fence.c | 41 struct nv10_fence_chan *fctx = chan->fence; in nv17_fence_sync() local 57 PUSH_MTHD(ppush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle, in nv17_fence_sync() 65 PUSH_MTHD(npush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle, in nv17_fence_sync() 81 struct nv10_fence_chan *fctx; in nv17_fence_context_new() local 86 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv17_fence_context_new() 87 if (!fctx) in nv17_fence_context_new() 90 nouveau_fence_context_new(chan, &fctx->base); in nv17_fence_context_new() 91 fctx->base.emit = nv10_fence_emit; in nv17_fence_context_new() 92 fctx->base.read = nv10_fence_read; in nv17_fence_context_new() 93 fctx->base.sync = nv17_fence_sync; in nv17_fence_context_new() [all …]
|
D | nv50_fence.c | 39 struct nv10_fence_chan *fctx; in nv50_fence_context_new() local 45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv50_fence_context_new() 46 if (!fctx) in nv50_fence_context_new() 49 nouveau_fence_context_new(chan, &fctx->base); in nv50_fence_context_new() 50 fctx->base.emit = nv10_fence_emit; in nv50_fence_context_new() 51 fctx->base.read = nv10_fence_read; in nv50_fence_context_new() 52 fctx->base.sync = nv17_fence_sync; in nv50_fence_context_new() 62 &fctx->sema); in nv50_fence_context_new()
|
D | nvc0_fence.c | 82 struct nv84_fence_chan *fctx = chan->fence; in nvc0_fence_context_new() local 83 fctx->base.emit32 = nvc0_fence_emit32; in nvc0_fence_context_new() 84 fctx->base.sync32 = nvc0_fence_sync32; in nvc0_fence_context_new()
|
/drivers/crypto/cavium/nitrox/ |
D | nitrox_aead.c | 38 struct flexi_crypto_context *fctx; in nitrox_aes_gcm_setkey() local 46 fctx = nctx->u.fctx; in nitrox_aes_gcm_setkey() 47 flags.fu = be64_to_cpu(fctx->flags.f); in nitrox_aes_gcm_setkey() 49 fctx->flags.f = cpu_to_be64(flags.fu); in nitrox_aes_gcm_setkey() 52 memset(&fctx->crypto, 0, sizeof(fctx->crypto)); in nitrox_aes_gcm_setkey() 53 memcpy(fctx->crypto.u.key, key, keylen); in nitrox_aes_gcm_setkey() 62 struct flexi_crypto_context *fctx = nctx->u.fctx; in nitrox_aead_setauthsize() local 65 flags.fu = be64_to_cpu(fctx->flags.f); in nitrox_aead_setauthsize() 67 fctx->flags.f = cpu_to_be64(flags.fu); in nitrox_aead_setauthsize() 219 struct flexi_crypto_context *fctx = nctx->u.fctx; in nitrox_aes_gcm_enc() local [all …]
|
D | nitrox_skcipher.c | 153 struct flexi_crypto_context *fctx = nctx->u.fctx; in nitrox_skcipher_exit() local 155 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); in nitrox_skcipher_exit() 156 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); in nitrox_skcipher_exit() 171 struct flexi_crypto_context *fctx; in nitrox_skcipher_setkey() local 184 fctx = nctx->u.fctx; in nitrox_skcipher_setkey() 185 flags = &fctx->flags; in nitrox_skcipher_setkey() 192 memcpy(fctx->crypto.u.key, key, keylen); in nitrox_skcipher_setkey() 342 struct flexi_crypto_context *fctx; in nitrox_aes_xts_setkey() local 355 fctx = nctx->u.fctx; in nitrox_aes_xts_setkey() 357 memcpy(fctx->auth.u.key2, (key + keylen), keylen); in nitrox_aes_xts_setkey() [all …]
|
D | nitrox_req.h | 206 struct flexi_crypto_context *fctx; member
|
/drivers/crypto/cavium/cpt/ |
D | cptvf_algs.c | 101 struct fc_context *fctx = &rctx->fctx; in create_ctx_hdr() local 121 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type; in create_ctx_hdr() 122 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type; in create_ctx_hdr() 123 fctx->enc.enc_ctrl.e.iv_source = FROM_DPTR; in create_ctx_hdr() 126 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2); in create_ctx_hdr() 128 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len); in create_ctx_hdr() 129 ctrl_flags = (__be64 *)&fctx->enc.enc_ctrl.flags; in create_ctx_hdr() 130 *ctrl_flags = cpu_to_be64(fctx->enc.enc_ctrl.flags); in create_ctx_hdr() 142 req_info->in[*argcnt].vptr = (u8 *)fctx; in create_ctx_hdr() 198 struct fc_context *fctx = &rctx->fctx; in cvm_enc_dec() local [all …]
|
D | cptvf_algs.h | 113 struct fc_context fctx; member
|
/drivers/crypto/marvell/octeontx/ |
D | otx_cptvf_algs.c | 107 if (memcmp(rctx->fctx.hmac.s.hmac_calc, in validate_hmac_cipher_null() 108 rctx->fctx.hmac.s.hmac_recv, in validate_hmac_cipher_null() 240 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; in create_ctx_hdr() local 272 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type; in create_ctx_hdr() 273 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type; in create_ctx_hdr() 274 fctx->enc.enc_ctrl.e.iv_source = OTX_CPT_FROM_CPTR; in create_ctx_hdr() 277 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2); in create_ctx_hdr() 279 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len); in create_ctx_hdr() 281 memcpy(fctx->enc.encr_iv, req->iv, crypto_skcipher_ivsize(stfm)); in create_ctx_hdr() 283 fctx->enc.enc_ctrl.flags = cpu_to_be64(fctx->enc.enc_ctrl.cflags); in create_ctx_hdr() [all …]
|
D | otx_cptvf_algs.h | 162 struct otx_cpt_fc_ctx fctx; member
|
/drivers/crypto/marvell/octeontx2/ |
D | otx2_cptvf_algs.c | 91 if (memcmp(rctx->fctx.hmac.s.hmac_calc, in validate_hmac_cipher_null() 92 rctx->fctx.hmac.s.hmac_recv, in validate_hmac_cipher_null() 225 struct otx2_cpt_fc_ctx *fctx = &rctx->fctx; in create_ctx_hdr() local 257 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type; in create_ctx_hdr() 258 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type; in create_ctx_hdr() 259 fctx->enc.enc_ctrl.e.iv_source = OTX2_CPT_FROM_CPTR; in create_ctx_hdr() 262 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2); in create_ctx_hdr() 264 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len); in create_ctx_hdr() 266 memcpy(fctx->enc.encr_iv, req->iv, crypto_skcipher_ivsize(stfm)); in create_ctx_hdr() 268 cpu_to_be64s(&fctx->enc.enc_ctrl.u); in create_ctx_hdr() [all …]
|
D | otx2_cptvf_algs.h | 148 struct otx2_cpt_fc_ctx fctx; member
|
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | dmanv04.c | 79 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_fifo_dma_fini() local 98 nvkm_kmap(fctx); in nv04_fifo_dma_fini() 103 u32 cv = (nvkm_ro32(fctx, c->ctxp + data) & ~cm); in nv04_fifo_dma_fini() 104 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); in nv04_fifo_dma_fini() 106 nvkm_done(fctx); in nv04_fifo_dma_fini()
|
/drivers/block/ |
D | rbd.c | 2500 struct rbd_img_fill_ctx *fctx) in rbd_img_fill_request_nocopy() argument 2505 img_req->data_type = fctx->pos_type; in rbd_img_fill_request_nocopy() 2511 fctx->iter = *fctx->pos; in rbd_img_fill_request_nocopy() 2518 fctx->set_pos_fn, &fctx->iter); in rbd_img_fill_request_nocopy() 2542 struct rbd_img_fill_ctx *fctx) in rbd_img_fill_request() argument 2549 if (fctx->pos_type == OBJ_REQUEST_NODATA || in rbd_img_fill_request() 2552 num_img_extents, fctx); in rbd_img_fill_request() 2563 fctx->iter = *fctx->pos; in rbd_img_fill_request() 2570 fctx->count_fn, &fctx->iter); in rbd_img_fill_request() 2587 fctx->iter = *fctx->pos; in rbd_img_fill_request() [all …]
|
/drivers/gpu/drm/nouveau/dispnv04/ |
D | crtc.c | 1051 struct nouveau_fence_chan *fctx = chan->fence; in nv04_finish_page_flip() local 1059 if (list_empty(&fctx->flip)) { in nv04_finish_page_flip() 1065 s = list_first_entry(&fctx->flip, struct nv04_page_flip_state, head); in nv04_finish_page_flip() 1107 struct nouveau_fence_chan *fctx = chan->fence; in nv04_page_flip_emit() local 1116 list_add_tail(&s->head, &fctx->flip); in nv04_page_flip_emit()
|
/drivers/gpu/drm/msm/adreno/ |
D | adreno_gpu.c | 427 ring->memptrs->fence = ring->fctx->completed_fence; in adreno_hw_init()
|