• Home
  • Raw
  • Download

Lines Matching refs:req_ctx

343 	struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx;  in cc_unmap_cipher_request()  local
345 if (req_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_cipher_request()
347 &req_ctx->gen_ctx.iv_dma_addr, ivsize); in cc_unmap_cipher_request()
348 dma_unmap_single(dev, req_ctx->gen_ctx.iv_dma_addr, in cc_unmap_cipher_request()
352 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI && in cc_unmap_cipher_request()
353 req_ctx->mlli_params.mlli_virt_addr) { in cc_unmap_cipher_request()
354 dma_pool_free(req_ctx->mlli_params.curr_pool, in cc_unmap_cipher_request()
355 req_ctx->mlli_params.mlli_virt_addr, in cc_unmap_cipher_request()
356 req_ctx->mlli_params.mlli_dma_addr); in cc_unmap_cipher_request()
360 dma_unmap_sg(dev, src, req_ctx->in_nents, DMA_TO_DEVICE); in cc_unmap_cipher_request()
361 dma_unmap_sg(dev, dst, req_ctx->out_nents, DMA_FROM_DEVICE); in cc_unmap_cipher_request()
365 dma_unmap_sg(dev, src, req_ctx->in_nents, DMA_BIDIRECTIONAL); in cc_unmap_cipher_request()
375 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; in cc_map_cipher_request() local
376 struct mlli_params *mlli_params = &req_ctx->mlli_params; in cc_map_cipher_request()
384 req_ctx->dma_buf_type = CC_DMA_BUF_DLLI; in cc_map_cipher_request()
391 req_ctx->gen_ctx.iv_dma_addr = in cc_map_cipher_request()
393 if (dma_mapping_error(dev, req_ctx->gen_ctx.iv_dma_addr)) { in cc_map_cipher_request()
399 ivsize, info, &req_ctx->gen_ctx.iv_dma_addr); in cc_map_cipher_request()
401 req_ctx->gen_ctx.iv_dma_addr = 0; in cc_map_cipher_request()
405 rc = cc_map_sg(dev, src, nbytes, src_direction, &req_ctx->in_nents, in cc_map_cipher_request()
410 req_ctx->dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_cipher_request()
414 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_cipher_request()
415 req_ctx->out_nents = 0; in cc_map_cipher_request()
416 cc_add_sg_entry(dev, &sg_data, req_ctx->in_nents, src, in cc_map_cipher_request()
418 &req_ctx->in_mlli_nents); in cc_map_cipher_request()
423 &req_ctx->out_nents, LLI_MAX_NUM_OF_DATA_ENTRIES, in cc_map_cipher_request()
428 req_ctx->dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_cipher_request()
430 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_cipher_request()
431 cc_add_sg_entry(dev, &sg_data, req_ctx->in_nents, src, in cc_map_cipher_request()
433 &req_ctx->in_mlli_nents); in cc_map_cipher_request()
434 cc_add_sg_entry(dev, &sg_data, req_ctx->out_nents, dst, in cc_map_cipher_request()
436 &req_ctx->out_mlli_nents); in cc_map_cipher_request()
440 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_cipher_request()
448 cc_dma_buf_type(req_ctx->dma_buf_type)); in cc_map_cipher_request()
453 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); in cc_map_cipher_request()