/drivers/crypto/ |
D | mv_cesa.c | 600 struct crypto_async_request *async_req = NULL; in queue_manag() local 611 async_req = crypto_dequeue_request(&cpg->queue); in queue_manag() 612 if (async_req) { in queue_manag() 624 if (async_req) { in queue_manag() 625 if (async_req->tfm->__crt_alg->cra_type != in queue_manag() 628 ablkcipher_request_cast(async_req); in queue_manag() 632 ahash_request_cast(async_req); in queue_manag() 635 async_req = NULL; in queue_manag()
|
D | bfin_crc.c | 305 struct crypto_async_request *async_req, *backlog; in bfin_crypto_crc_handle_queue() local 321 async_req = crypto_dequeue_request(&crc->queue); in bfin_crypto_crc_handle_queue() 322 if (async_req) in bfin_crypto_crc_handle_queue() 326 if (!async_req) in bfin_crypto_crc_handle_queue() 332 req = ahash_request_cast(async_req); in bfin_crypto_crc_handle_queue()
|
D | s5p-sss.c | 406 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local 412 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb() 415 if (!async_req) in s5p_tasklet_cb() 421 dev->req = ablkcipher_request_cast(async_req); in s5p_tasklet_cb()
|
D | tegra-aes.c | 389 struct crypto_async_request *async_req, *backlog; in tegra_aes_handle_req() local 406 async_req = crypto_dequeue_request(&dd->queue); in tegra_aes_handle_req() 407 if (!async_req) in tegra_aes_handle_req() 411 if (!async_req) in tegra_aes_handle_req() 417 req = ablkcipher_request_cast(async_req); in tegra_aes_handle_req()
|
D | sahara.c | 535 struct crypto_async_request *async_req, *backlog; in sahara_aes_queue_task() local 543 async_req = crypto_dequeue_request(&dev->queue); in sahara_aes_queue_task() 544 if (!async_req) in sahara_aes_queue_task() 548 if (!async_req) in sahara_aes_queue_task() 554 req = ablkcipher_request_cast(async_req); in sahara_aes_queue_task()
|
D | omap-aes.c | 679 struct crypto_async_request *async_req, *backlog; in omap_aes_handle_queue() local 693 async_req = crypto_dequeue_request(&dd->queue); in omap_aes_handle_queue() 694 if (async_req) in omap_aes_handle_queue() 698 if (!async_req) in omap_aes_handle_queue() 704 req = ablkcipher_request_cast(async_req); in omap_aes_handle_queue()
|
D | atmel-sha.c | 825 struct crypto_async_request *async_req, *backlog; in atmel_sha_handle_queue() local 840 async_req = crypto_dequeue_request(&dd->queue); in atmel_sha_handle_queue() 841 if (async_req) in atmel_sha_handle_queue() 846 if (!async_req) in atmel_sha_handle_queue() 852 req = ahash_request_cast(async_req); in atmel_sha_handle_queue()
|
D | atmel-aes.c | 551 struct crypto_async_request *async_req, *backlog; in atmel_aes_handle_queue() local 565 async_req = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue() 566 if (async_req) in atmel_aes_handle_queue() 570 if (!async_req) in atmel_aes_handle_queue() 576 req = ablkcipher_request_cast(async_req); in atmel_aes_handle_queue()
|
D | atmel-tdes.c | 586 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local 600 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue() 601 if (async_req) in atmel_tdes_handle_queue() 605 if (!async_req) in atmel_tdes_handle_queue() 611 req = ablkcipher_request_cast(async_req); in atmel_tdes_handle_queue()
|
D | omap-sham.c | 938 struct crypto_async_request *async_req, *backlog; in omap_sham_handle_queue() local 951 async_req = crypto_dequeue_request(&dd->queue); in omap_sham_handle_queue() 952 if (async_req) in omap_sham_handle_queue() 956 if (!async_req) in omap_sham_handle_queue() 962 req = ahash_request_cast(async_req); in omap_sham_handle_queue()
|
D | hifn_795x.c | 2020 struct crypto_async_request *async_req; in hifn_flush() local 2036 while ((async_req = crypto_dequeue_request(&dev->queue))) { in hifn_flush() 2037 req = container_of(async_req, struct ablkcipher_request, base); in hifn_flush() 2139 struct crypto_async_request *async_req, *backlog; in hifn_process_queue() local 2147 async_req = crypto_dequeue_request(&dev->queue); in hifn_process_queue() 2150 if (!async_req) in hifn_process_queue() 2156 req = container_of(async_req, struct ablkcipher_request, base); in hifn_process_queue()
|
/drivers/net/usb/ |
D | rtl8150.c | 145 struct async_req { struct 173 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb() 186 struct async_req *req; in async_set_registers() 188 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
|
/drivers/crypto/amcc/ |
D | crypto4xx_core.h | 66 struct crypto_async_request *async_req; /* base crypto request member
|
D | crypto4xx_core.c | 632 ablk_req = ablkcipher_request_cast(pd_uinfo->async_req); in crypto4xx_ablkcipher_done() 656 ahash_req = ahash_request_cast(pd_uinfo->async_req); in crypto4xx_ahash_done() 676 if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) == in crypto4xx_pd_done() 855 pd_uinfo->async_req = req; in crypto4xx_build_pd()
|
/drivers/staging/sep/ |
D | sep_crypto.c | 2502 struct crypto_async_request *async_req; in sep_dequeuer() local 2515 async_req = crypto_dequeue_request(this_queue); in sep_dequeuer() 2518 if (!async_req) { in sep_dequeuer() 2530 if (!async_req->tfm) { in sep_dequeuer() 2535 if (!async_req->tfm->__crt_alg) { in sep_dequeuer() 2540 if (!async_req->tfm->__crt_alg->cra_type) { in sep_dequeuer() 2546 if (async_req->tfm->__crt_alg->cra_type != in sep_dequeuer() 2550 cypher_req = container_of(async_req, in sep_dequeuer() 2567 hash_req = ahash_request_cast(async_req); in sep_dequeuer()
|
/drivers/md/ |
D | dm-crypt.c | 722 static void kcryptd_async_done(struct crypto_async_request *async_req, 1176 static void kcryptd_async_done(struct crypto_async_request *async_req, in kcryptd_async_done() argument 1179 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done()
|