• Home
  • Raw
  • Download

Lines Matching refs:op

24 	struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);  in sl3516_ce_need_fallback()  local
25 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_need_fallback()
102 struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); in sl3516_ce_cipher_fallback() local
111 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sl3516_ce_cipher_fallback()
126 struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); in sl3516_ce_cipher() local
127 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_cipher()
145 op->keylen); in sl3516_ce_cipher()
231 cpu_to_be32_array((__be32 *)ecb->key, (u32 *)op->key, op->keylen / 4); in sl3516_ce_cipher()
237 ecb->control.aesnk = op->keylen / 4; in sl3516_ce_cipher()
277 struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); in sl3516_ce_skdecrypt() local
287 engine = op->ce->engine; in sl3516_ce_skdecrypt()
295 struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); in sl3516_ce_skencrypt() local
305 engine = op->ce->engine; in sl3516_ce_skencrypt()
312 struct sl3516_ce_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); in sl3516_ce_cipher_init() local
319 memset(op, 0, sizeof(struct sl3516_ce_cipher_tfm_ctx)); in sl3516_ce_cipher_init()
322 op->ce = algt->ce; in sl3516_ce_cipher_init()
324 op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); in sl3516_ce_cipher_init()
325 if (IS_ERR(op->fallback_tfm)) { in sl3516_ce_cipher_init()
326 dev_err(op->ce->dev, "ERROR: Cannot allocate fallback for %s %ld\n", in sl3516_ce_cipher_init()
327 name, PTR_ERR(op->fallback_tfm)); in sl3516_ce_cipher_init()
328 return PTR_ERR(op->fallback_tfm); in sl3516_ce_cipher_init()
332 crypto_skcipher_reqsize(op->fallback_tfm); in sl3516_ce_cipher_init()
334 dev_info(op->ce->dev, "Fallback for %s is %s\n", in sl3516_ce_cipher_init()
336 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm))); in sl3516_ce_cipher_init()
338 op->enginectx.op.do_one_request = sl3516_ce_handle_cipher_request; in sl3516_ce_cipher_init()
339 op->enginectx.op.prepare_request = NULL; in sl3516_ce_cipher_init()
340 op->enginectx.op.unprepare_request = NULL; in sl3516_ce_cipher_init()
342 err = pm_runtime_get_sync(op->ce->dev); in sl3516_ce_cipher_init()
348 pm_runtime_put_noidle(op->ce->dev); in sl3516_ce_cipher_init()
349 crypto_free_skcipher(op->fallback_tfm); in sl3516_ce_cipher_init()
355 struct sl3516_ce_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); in sl3516_ce_cipher_exit() local
357 kfree_sensitive(op->key); in sl3516_ce_cipher_exit()
358 crypto_free_skcipher(op->fallback_tfm); in sl3516_ce_cipher_exit()
359 pm_runtime_put_sync_suspend(op->ce->dev); in sl3516_ce_cipher_exit()
365 struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); in sl3516_ce_aes_setkey() local
366 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_aes_setkey()
379 kfree_sensitive(op->key); in sl3516_ce_aes_setkey()
380 op->keylen = keylen; in sl3516_ce_aes_setkey()
381 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); in sl3516_ce_aes_setkey()
382 if (!op->key) in sl3516_ce_aes_setkey()
385 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); in sl3516_ce_aes_setkey()
386 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); in sl3516_ce_aes_setkey()
388 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); in sl3516_ce_aes_setkey()