Lines Matching refs:sgd
21 struct scatterlist *sgs, *sgd; in rk_cipher_need_fallback() local
29 sgd = req->dst; in rk_cipher_need_fallback()
30 while (sgs && sgd) { in rk_cipher_need_fallback()
34 if (!IS_ALIGNED(sgd->offset, sizeof(u32))) { in rk_cipher_need_fallback()
41 dtodo = min(len, sgd->length); in rk_cipher_need_fallback()
50 sgd = sg_next(sgd); in rk_cipher_need_fallback()
303 struct scatterlist *sgd, unsigned int todo) in crypto_dma_start() argument
307 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, sg_dma_address(sgd)); in crypto_dma_start()
318 struct scatterlist *sgs, *sgd; in rk_cipher_run() local
338 sgd = areq->dst; in rk_cipher_run()
340 while (sgs && sgd && len) { in rk_cipher_run()
343 sgd = sg_next(sgd); in rk_cipher_run()
351 if (sgs == sgd) { in rk_cipher_run()
363 err = dma_map_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
382 crypto_dma_start(ctx->dev, sgs, sgd, todo / 4); in rk_cipher_run()
390 if (sgs == sgd) { in rk_cipher_run()
394 dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
400 offset = sgd->length - ivsize; in rk_cipher_run()
401 scatterwalk_map_and_copy(iv, sgd, offset, ivsize, 0); in rk_cipher_run()
405 sgd = sg_next(sgd); in rk_cipher_run()
426 if (sgs == sgd) { in rk_cipher_run()
430 dma_unmap_sg(ctx->dev->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()