Lines Matching +full:op +full:- +full:mode
22 #include "ccp-dev.h"
59 #define CCP_NEW_JOBID(ccp) ((ccp->vdata->version == CCP_VERSION(3, 0)) ? \
64 return atomic_inc_return(&ccp->current_id) & CCP_JOBID_MASK; in ccp_gen_jobid()
69 if (wa->dma_count) in ccp_sg_free()
70 dma_unmap_sg(wa->dma_dev, wa->dma_sg_head, wa->nents, wa->dma_dir); in ccp_sg_free()
72 wa->dma_count = 0; in ccp_sg_free()
81 wa->sg = sg; in ccp_init_sg_workarea()
85 wa->nents = sg_nents_for_len(sg, len); in ccp_init_sg_workarea()
86 if (wa->nents < 0) in ccp_init_sg_workarea()
87 return wa->nents; in ccp_init_sg_workarea()
89 wa->bytes_left = len; in ccp_init_sg_workarea()
90 wa->sg_used = 0; in ccp_init_sg_workarea()
98 wa->dma_sg = sg; in ccp_init_sg_workarea()
99 wa->dma_sg_head = sg; in ccp_init_sg_workarea()
100 wa->dma_dev = dev; in ccp_init_sg_workarea()
101 wa->dma_dir = dma_dir; in ccp_init_sg_workarea()
102 wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); in ccp_init_sg_workarea()
103 if (!wa->dma_count) in ccp_init_sg_workarea()
104 return -ENOMEM; in ccp_init_sg_workarea()
111 unsigned int nbytes = min_t(u64, len, wa->bytes_left); in ccp_update_sg_workarea()
114 if (!wa->sg) in ccp_update_sg_workarea()
117 wa->sg_used += nbytes; in ccp_update_sg_workarea()
118 wa->bytes_left -= nbytes; in ccp_update_sg_workarea()
119 if (wa->sg_used == sg_dma_len(wa->dma_sg)) { in ccp_update_sg_workarea()
121 wa->dma_sg = sg_next(wa->dma_sg); in ccp_update_sg_workarea()
124 * that have been merged, the non-DMA mapped scatterlist in ccp_update_sg_workarea()
126 * This ensures that the current non-DMA mapped entry in ccp_update_sg_workarea()
130 sg_combined_len += wa->sg->length; in ccp_update_sg_workarea()
131 wa->sg = sg_next(wa->sg); in ccp_update_sg_workarea()
132 } while (wa->sg_used > sg_combined_len); in ccp_update_sg_workarea()
134 wa->sg_used = 0; in ccp_update_sg_workarea()
140 if (wa->length <= CCP_DMAPOOL_MAX_SIZE) { in ccp_dm_free()
141 if (wa->address) in ccp_dm_free()
142 dma_pool_free(wa->dma_pool, wa->address, in ccp_dm_free()
143 wa->dma.address); in ccp_dm_free()
145 if (wa->dma.address) in ccp_dm_free()
146 dma_unmap_single(wa->dev, wa->dma.address, wa->length, in ccp_dm_free()
147 wa->dma.dir); in ccp_dm_free()
148 kfree(wa->address); in ccp_dm_free()
151 wa->address = NULL; in ccp_dm_free()
152 wa->dma.address = 0; in ccp_dm_free()
165 wa->dev = cmd_q->ccp->dev; in ccp_init_dm_workarea()
166 wa->length = len; in ccp_init_dm_workarea()
169 wa->dma_pool = cmd_q->dma_pool; in ccp_init_dm_workarea()
171 wa->address = dma_pool_alloc(wa->dma_pool, GFP_KERNEL, in ccp_init_dm_workarea()
172 &wa->dma.address); in ccp_init_dm_workarea()
173 if (!wa->address) in ccp_init_dm_workarea()
174 return -ENOMEM; in ccp_init_dm_workarea()
176 wa->dma.length = CCP_DMAPOOL_MAX_SIZE; in ccp_init_dm_workarea()
178 memset(wa->address, 0, CCP_DMAPOOL_MAX_SIZE); in ccp_init_dm_workarea()
180 wa->address = kzalloc(len, GFP_KERNEL); in ccp_init_dm_workarea()
181 if (!wa->address) in ccp_init_dm_workarea()
182 return -ENOMEM; in ccp_init_dm_workarea()
184 wa->dma.address = dma_map_single(wa->dev, wa->address, len, in ccp_init_dm_workarea()
186 if (dma_mapping_error(wa->dev, wa->dma.address)) in ccp_init_dm_workarea()
187 return -ENOMEM; in ccp_init_dm_workarea()
189 wa->dma.length = len; in ccp_init_dm_workarea()
191 wa->dma.dir = dir; in ccp_init_dm_workarea()
200 WARN_ON(!wa->address); in ccp_set_dm_area()
202 if (len > (wa->length - wa_offset)) in ccp_set_dm_area()
203 return -EINVAL; in ccp_set_dm_area()
205 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_set_dm_area()
214 WARN_ON(!wa->address); in ccp_get_dm_area()
216 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_get_dm_area()
233 p = wa->address + wa_offset; in ccp_reverse_set_dm_area()
234 q = p + len - 1; in ccp_reverse_set_dm_area()
240 q--; in ccp_reverse_set_dm_area()
253 p = wa->address + wa_offset; in ccp_reverse_get_dm_area()
254 q = p + len - 1; in ccp_reverse_get_dm_area()
260 q--; in ccp_reverse_get_dm_area()
268 ccp_dm_free(&data->dm_wa); in ccp_free_data()
269 ccp_sg_free(&data->sg_wa); in ccp_free_data()
281 ret = ccp_init_sg_workarea(&data->sg_wa, cmd_q->ccp->dev, sg, sg_len, in ccp_init_data()
286 ret = ccp_init_dm_workarea(&data->dm_wa, cmd_q, dm_len, dir); in ccp_init_data()
300 struct ccp_sg_workarea *sg_wa = &data->sg_wa; in ccp_queue_buf()
301 struct ccp_dm_workarea *dm_wa = &data->dm_wa; in ccp_queue_buf()
306 memset(dm_wa->address, 0, dm_wa->length); in ccp_queue_buf()
308 if (!sg_wa->sg) in ccp_queue_buf()
312 * nbytes will always be <= UINT_MAX because dm_wa->length is in ccp_queue_buf()
315 nbytes = min_t(u64, sg_wa->bytes_left, dm_wa->length); in ccp_queue_buf()
316 scatterwalk_map_and_copy(dm_wa->address, sg_wa->sg, sg_wa->sg_used, in ccp_queue_buf()
321 while (sg_wa->bytes_left && (buf_count < dm_wa->length)) { in ccp_queue_buf()
322 nbytes = min(sg_dma_len(sg_wa->dma_sg) - sg_wa->sg_used, in ccp_queue_buf()
323 dm_wa->length - buf_count); in ccp_queue_buf()
324 nbytes = min_t(u64, sg_wa->bytes_left, nbytes); in ccp_queue_buf()
344 struct ccp_op *op, unsigned int block_size, in ccp_prepare_data() argument
354 sg_src_len = sg_dma_len(src->sg_wa.dma_sg) - src->sg_wa.sg_used; in ccp_prepare_data()
355 sg_src_len = min_t(u64, src->sg_wa.bytes_left, sg_src_len); in ccp_prepare_data()
358 sg_dst_len = sg_dma_len(dst->sg_wa.dma_sg) - dst->sg_wa.sg_used; in ccp_prepare_data()
359 sg_dst_len = min_t(u64, src->sg_wa.bytes_left, sg_dst_len); in ccp_prepare_data()
372 op->soc = 0; in ccp_prepare_data()
380 op->soc = 1; in ccp_prepare_data()
381 op->src.u.dma.address = src->dm_wa.dma.address; in ccp_prepare_data()
382 op->src.u.dma.offset = 0; in ccp_prepare_data()
383 op->src.u.dma.length = (blocksize_op) ? block_size : cp_len; in ccp_prepare_data()
388 op->src.u.dma.address = sg_dma_address(src->sg_wa.dma_sg); in ccp_prepare_data()
389 op->src.u.dma.offset = src->sg_wa.sg_used; in ccp_prepare_data()
390 op->src.u.dma.length = op_len & ~(block_size - 1); in ccp_prepare_data()
392 ccp_update_sg_workarea(&src->sg_wa, op->src.u.dma.length); in ccp_prepare_data()
401 op->soc = 1; in ccp_prepare_data()
402 op->dst.u.dma.address = dst->dm_wa.dma.address; in ccp_prepare_data()
403 op->dst.u.dma.offset = 0; in ccp_prepare_data()
404 op->dst.u.dma.length = op->src.u.dma.length; in ccp_prepare_data()
409 op->dst.u.dma.address = sg_dma_address(dst->sg_wa.dma_sg); in ccp_prepare_data()
410 op->dst.u.dma.offset = dst->sg_wa.sg_used; in ccp_prepare_data()
411 op->dst.u.dma.length = op->src.u.dma.length; in ccp_prepare_data()
417 struct ccp_op *op) in ccp_process_data() argument
419 op->init = 0; in ccp_process_data()
422 if (op->dst.u.dma.address == dst->dm_wa.dma.address) in ccp_process_data()
425 ccp_update_sg_workarea(&dst->sg_wa, in ccp_process_data()
426 op->dst.u.dma.length); in ccp_process_data()
434 struct ccp_op op; in ccp_copy_to_from_sb() local
436 memset(&op, 0, sizeof(op)); in ccp_copy_to_from_sb()
438 op.cmd_q = cmd_q; in ccp_copy_to_from_sb()
439 op.jobid = jobid; in ccp_copy_to_from_sb()
440 op.eom = 1; in ccp_copy_to_from_sb()
443 op.soc = 1; in ccp_copy_to_from_sb()
444 op.src.type = CCP_MEMTYPE_SB; in ccp_copy_to_from_sb()
445 op.src.u.sb = sb; in ccp_copy_to_from_sb()
446 op.dst.type = CCP_MEMTYPE_SYSTEM; in ccp_copy_to_from_sb()
447 op.dst.u.dma.address = wa->dma.address; in ccp_copy_to_from_sb()
448 op.dst.u.dma.length = wa->length; in ccp_copy_to_from_sb()
450 op.src.type = CCP_MEMTYPE_SYSTEM; in ccp_copy_to_from_sb()
451 op.src.u.dma.address = wa->dma.address; in ccp_copy_to_from_sb()
452 op.src.u.dma.length = wa->length; in ccp_copy_to_from_sb()
453 op.dst.type = CCP_MEMTYPE_SB; in ccp_copy_to_from_sb()
454 op.dst.u.sb = sb; in ccp_copy_to_from_sb()
457 op.u.passthru.byte_swap = byte_swap; in ccp_copy_to_from_sb()
459 return cmd_q->ccp->vdata->perform->passthru(&op); in ccp_copy_to_from_sb()
479 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_cmac_cmd()
482 struct ccp_op op; in ccp_run_aes_cmac_cmd() local
486 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_cmac_cmd()
487 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_cmac_cmd()
488 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_cmac_cmd()
489 return -EINVAL; in ccp_run_aes_cmac_cmd()
491 if (aes->src_len & (AES_BLOCK_SIZE - 1)) in ccp_run_aes_cmac_cmd()
492 return -EINVAL; in ccp_run_aes_cmac_cmd()
494 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
495 return -EINVAL; in ccp_run_aes_cmac_cmd()
497 if (!aes->key || !aes->iv || !aes->src) in ccp_run_aes_cmac_cmd()
498 return -EINVAL; in ccp_run_aes_cmac_cmd()
500 if (aes->cmac_final) { in ccp_run_aes_cmac_cmd()
501 if (aes->cmac_key_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
502 return -EINVAL; in ccp_run_aes_cmac_cmd()
504 if (!aes->cmac_key) in ccp_run_aes_cmac_cmd()
505 return -EINVAL; in ccp_run_aes_cmac_cmd()
511 ret = -EIO; in ccp_run_aes_cmac_cmd()
512 memset(&op, 0, sizeof(op)); in ccp_run_aes_cmac_cmd()
513 op.cmd_q = cmd_q; in ccp_run_aes_cmac_cmd()
514 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_cmac_cmd()
515 op.sb_key = cmd_q->sb_key; in ccp_run_aes_cmac_cmd()
516 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_aes_cmac_cmd()
517 op.init = 1; in ccp_run_aes_cmac_cmd()
518 op.u.aes.type = aes->type; in ccp_run_aes_cmac_cmd()
519 op.u.aes.mode = aes->mode; in ccp_run_aes_cmac_cmd()
520 op.u.aes.action = aes->action; in ccp_run_aes_cmac_cmd()
522 /* All supported key sizes fit in a single (32-byte) SB entry in ccp_run_aes_cmac_cmd()
523 * and must be in little endian format. Use the 256-bit byte in ccp_run_aes_cmac_cmd()
533 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_cmac_cmd()
534 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_cmac_cmd()
537 ret = ccp_copy_to_sb(cmd_q, &key, op.jobid, op.sb_key, in ccp_run_aes_cmac_cmd()
540 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
544 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_aes_cmac_cmd()
545 * must be in little endian format. Use the 256-bit byte swap in ccp_run_aes_cmac_cmd()
554 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
555 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmac_cmd()
558 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_cmac_cmd()
561 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
566 ret = ccp_init_data(&src, cmd_q, aes->src, aes->src_len, in ccp_run_aes_cmac_cmd()
572 ccp_prepare_data(&src, NULL, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_cmac_cmd()
573 if (aes->cmac_final && !src.sg_wa.bytes_left) { in ccp_run_aes_cmac_cmd()
574 op.eom = 1; in ccp_run_aes_cmac_cmd()
577 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, in ccp_run_aes_cmac_cmd()
578 op.sb_ctx, in ccp_run_aes_cmac_cmd()
581 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
585 ret = ccp_set_dm_area(&ctx, 0, aes->cmac_key, 0, in ccp_run_aes_cmac_cmd()
586 aes->cmac_key_len); in ccp_run_aes_cmac_cmd()
589 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_cmac_cmd()
592 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
597 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_cmac_cmd()
599 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
603 ccp_process_data(&src, NULL, &op); in ccp_run_aes_cmac_cmd()
606 /* Retrieve the AES context - convert from LE to BE using in ccp_run_aes_cmac_cmd()
607 * 32-byte (256-bit) byteswapping in ccp_run_aes_cmac_cmd()
609 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_cmac_cmd()
612 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
617 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
618 ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmac_cmd()
635 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_gcm_cmd()
639 struct ccp_op op; in ccp_run_aes_gcm_cmd() local
654 if (!aes->iv) in ccp_run_aes_gcm_cmd()
655 return -EINVAL; in ccp_run_aes_gcm_cmd()
657 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_gcm_cmd()
658 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_gcm_cmd()
659 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_gcm_cmd()
660 return -EINVAL; in ccp_run_aes_gcm_cmd()
662 if (!aes->key) /* Gotta have a key SGL */ in ccp_run_aes_gcm_cmd()
663 return -EINVAL; in ccp_run_aes_gcm_cmd()
666 authsize = aes->authsize ? aes->authsize : AES_BLOCK_SIZE; in ccp_run_aes_gcm_cmd()
677 return -EINVAL; in ccp_run_aes_gcm_cmd()
686 p_aad = aes->src; in ccp_run_aes_gcm_cmd()
687 p_inp = scatterwalk_ffwd(sg_inp, aes->src, aes->aad_len); in ccp_run_aes_gcm_cmd()
688 p_outp = scatterwalk_ffwd(sg_outp, aes->dst, aes->aad_len); in ccp_run_aes_gcm_cmd()
689 if (aes->action == CCP_AES_ACTION_ENCRYPT) { in ccp_run_aes_gcm_cmd()
690 ilen = aes->src_len; in ccp_run_aes_gcm_cmd()
694 ilen = aes->src_len - authsize; in ccp_run_aes_gcm_cmd()
698 jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_gcm_cmd()
700 memset(&op, 0, sizeof(op)); in ccp_run_aes_gcm_cmd()
701 op.cmd_q = cmd_q; in ccp_run_aes_gcm_cmd()
702 op.jobid = jobid; in ccp_run_aes_gcm_cmd()
703 op.sb_key = cmd_q->sb_key; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
704 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
705 op.init = 1; in ccp_run_aes_gcm_cmd()
706 op.u.aes.type = aes->type; in ccp_run_aes_gcm_cmd()
715 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_gcm_cmd()
716 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_gcm_cmd()
719 ret = ccp_copy_to_sb(cmd_q, &key, op.jobid, op.sb_key, in ccp_run_aes_gcm_cmd()
722 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
736 dm_offset = CCP_AES_CTX_SB_COUNT * CCP_SB_BYTES - aes->iv_len; in ccp_run_aes_gcm_cmd()
737 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_gcm_cmd()
741 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_gcm_cmd()
744 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
748 op.init = 1; in ccp_run_aes_gcm_cmd()
749 if (aes->aad_len > 0) { in ccp_run_aes_gcm_cmd()
751 ret = ccp_init_data(&aad, cmd_q, p_aad, aes->aad_len, in ccp_run_aes_gcm_cmd()
757 op.u.aes.mode = CCP_AES_MODE_GHASH; in ccp_run_aes_gcm_cmd()
758 op.u.aes.action = CCP_AES_GHASHAAD; in ccp_run_aes_gcm_cmd()
761 ccp_prepare_data(&aad, NULL, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_gcm_cmd()
763 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
765 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
769 ccp_process_data(&aad, NULL, &op); in ccp_run_aes_gcm_cmd()
770 op.init = 0; in ccp_run_aes_gcm_cmd()
774 op.u.aes.mode = CCP_AES_MODE_GCTR; in ccp_run_aes_gcm_cmd()
775 op.u.aes.action = aes->action; in ccp_run_aes_gcm_cmd()
797 op.soc = 0; in ccp_run_aes_gcm_cmd()
798 op.eom = 0; in ccp_run_aes_gcm_cmd()
799 op.init = 1; in ccp_run_aes_gcm_cmd()
801 ccp_prepare_data(&src, &dst, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_gcm_cmd()
806 op.eom = 1; in ccp_run_aes_gcm_cmd()
807 op.u.aes.size = (nbytes * 8) - 1; in ccp_run_aes_gcm_cmd()
811 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
813 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
817 ccp_process_data(&src, &dst, &op); in ccp_run_aes_gcm_cmd()
818 op.init = 0; in ccp_run_aes_gcm_cmd()
823 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_gcm_cmd()
826 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
830 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_gcm_cmd()
834 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_gcm_cmd()
837 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
849 final[0] = cpu_to_be64(aes->aad_len * 8); in ccp_run_aes_gcm_cmd()
852 memset(&op, 0, sizeof(op)); in ccp_run_aes_gcm_cmd()
853 op.cmd_q = cmd_q; in ccp_run_aes_gcm_cmd()
854 op.jobid = jobid; in ccp_run_aes_gcm_cmd()
855 op.sb_key = cmd_q->sb_key; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
856 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
857 op.init = 1; in ccp_run_aes_gcm_cmd()
858 op.u.aes.type = aes->type; in ccp_run_aes_gcm_cmd()
859 op.u.aes.mode = CCP_AES_MODE_GHASH; in ccp_run_aes_gcm_cmd()
860 op.u.aes.action = CCP_AES_GHASHFINAL; in ccp_run_aes_gcm_cmd()
861 op.src.type = CCP_MEMTYPE_SYSTEM; in ccp_run_aes_gcm_cmd()
862 op.src.u.dma.address = final_wa.dma.address; in ccp_run_aes_gcm_cmd()
863 op.src.u.dma.length = AES_BLOCK_SIZE; in ccp_run_aes_gcm_cmd()
864 op.dst.type = CCP_MEMTYPE_SYSTEM; in ccp_run_aes_gcm_cmd()
865 op.dst.u.dma.address = final_wa.dma.address; in ccp_run_aes_gcm_cmd()
866 op.dst.u.dma.length = AES_BLOCK_SIZE; in ccp_run_aes_gcm_cmd()
867 op.eom = 1; in ccp_run_aes_gcm_cmd()
868 op.u.aes.size = 0; in ccp_run_aes_gcm_cmd()
869 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
873 if (aes->action == CCP_AES_ACTION_ENCRYPT) { in ccp_run_aes_gcm_cmd()
889 authsize) ? -EBADMSG : 0; in ccp_run_aes_gcm_cmd()
905 if (aes->aad_len) in ccp_run_aes_gcm_cmd()
920 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_cmd()
923 struct ccp_op op; in ccp_run_aes_cmd() local
928 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_cmd()
929 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_cmd()
930 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_cmd()
931 return -EINVAL; in ccp_run_aes_cmd()
933 if (((aes->mode == CCP_AES_MODE_ECB) || in ccp_run_aes_cmd()
934 (aes->mode == CCP_AES_MODE_CBC) || in ccp_run_aes_cmd()
935 (aes->mode == CCP_AES_MODE_CFB)) && in ccp_run_aes_cmd()
936 (aes->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_aes_cmd()
937 return -EINVAL; in ccp_run_aes_cmd()
939 if (!aes->key || !aes->src || !aes->dst) in ccp_run_aes_cmd()
940 return -EINVAL; in ccp_run_aes_cmd()
942 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
943 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmd()
944 return -EINVAL; in ccp_run_aes_cmd()
946 if (!aes->iv) in ccp_run_aes_cmd()
947 return -EINVAL; in ccp_run_aes_cmd()
953 ret = -EIO; in ccp_run_aes_cmd()
954 memset(&op, 0, sizeof(op)); in ccp_run_aes_cmd()
955 op.cmd_q = cmd_q; in ccp_run_aes_cmd()
956 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_cmd()
957 op.sb_key = cmd_q->sb_key; in ccp_run_aes_cmd()
958 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_aes_cmd()
959 op.init = (aes->mode == CCP_AES_MODE_ECB) ? 0 : 1; in ccp_run_aes_cmd()
960 op.u.aes.type = aes->type; in ccp_run_aes_cmd()
961 op.u.aes.mode = aes->mode; in ccp_run_aes_cmd()
962 op.u.aes.action = aes->action; in ccp_run_aes_cmd()
964 /* All supported key sizes fit in a single (32-byte) SB entry in ccp_run_aes_cmd()
965 * and must be in little endian format. Use the 256-bit byte in ccp_run_aes_cmd()
975 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_cmd()
976 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_cmd()
979 ret = ccp_copy_to_sb(cmd_q, &key, op.jobid, op.sb_key, in ccp_run_aes_cmd()
982 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
986 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_aes_cmd()
987 * must be in little endian format. Use the 256-bit byte swap in ccp_run_aes_cmd()
996 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
997 /* Load the AES context - convert to LE */ in ccp_run_aes_cmd()
998 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd()
999 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmd()
1002 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_cmd()
1005 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1009 switch (aes->mode) { in ccp_run_aes_cmd()
1012 op.u.aes.size = AES_BLOCK_SIZE * BITS_PER_BYTE - 1; in ccp_run_aes_cmd()
1015 op.u.aes.size = 0; in ccp_run_aes_cmd()
1018 /* Prepare the input and output data workareas. For in-place in ccp_run_aes_cmd()
1022 if (sg_virt(aes->src) == sg_virt(aes->dst)) in ccp_run_aes_cmd()
1025 ret = ccp_init_data(&src, cmd_q, aes->src, aes->src_len, in ccp_run_aes_cmd()
1034 ret = ccp_init_data(&dst, cmd_q, aes->dst, aes->src_len, in ccp_run_aes_cmd()
1042 ccp_prepare_data(&src, &dst, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_cmd()
1044 op.eom = 1; in ccp_run_aes_cmd()
1047 * mode we have to wait for the operation to complete in ccp_run_aes_cmd()
1050 if (aes->mode == CCP_AES_MODE_ECB) in ccp_run_aes_cmd()
1051 op.soc = 1; in ccp_run_aes_cmd()
1054 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_cmd()
1056 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1060 ccp_process_data(&src, &dst, &op); in ccp_run_aes_cmd()
1063 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
1064 /* Retrieve the AES context - convert from LE to BE using in ccp_run_aes_cmd()
1065 * 32-byte (256-bit) byteswapping in ccp_run_aes_cmd()
1067 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_aes_cmd()
1070 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1075 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd()
1076 ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmd()
1098 struct ccp_xts_aes_engine *xts = &cmd->u.xts; in ccp_run_xts_aes_cmd()
1101 struct ccp_op op; in ccp_run_xts_aes_cmd() local
1108 switch (xts->unit_size) { in ccp_run_xts_aes_cmd()
1126 return -EINVAL; in ccp_run_xts_aes_cmd()
1129 if (xts->key_len == AES_KEYSIZE_128) in ccp_run_xts_aes_cmd()
1131 else if (xts->key_len == AES_KEYSIZE_256) in ccp_run_xts_aes_cmd()
1134 return -EINVAL; in ccp_run_xts_aes_cmd()
1136 if (!xts->final && (xts->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_xts_aes_cmd()
1137 return -EINVAL; in ccp_run_xts_aes_cmd()
1139 if (xts->iv_len != AES_BLOCK_SIZE) in ccp_run_xts_aes_cmd()
1140 return -EINVAL; in ccp_run_xts_aes_cmd()
1142 if (!xts->key || !xts->iv || !xts->src || !xts->dst) in ccp_run_xts_aes_cmd()
1143 return -EINVAL; in ccp_run_xts_aes_cmd()
1148 ret = -EIO; in ccp_run_xts_aes_cmd()
1149 memset(&op, 0, sizeof(op)); in ccp_run_xts_aes_cmd()
1150 op.cmd_q = cmd_q; in ccp_run_xts_aes_cmd()
1151 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_xts_aes_cmd()
1152 op.sb_key = cmd_q->sb_key; in ccp_run_xts_aes_cmd()
1153 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_xts_aes_cmd()
1154 op.init = 1; in ccp_run_xts_aes_cmd()
1155 op.u.xts.type = aestype; in ccp_run_xts_aes_cmd()
1156 op.u.xts.action = xts->action; in ccp_run_xts_aes_cmd()
1157 op.u.xts.unit_size = xts->unit_size; in ccp_run_xts_aes_cmd()
1159 /* A version 3 device only supports 128-bit keys, which fits into a in ccp_run_xts_aes_cmd()
1160 * single SB entry. A version 5 device uses a 512-bit vector, so two in ccp_run_xts_aes_cmd()
1163 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) in ccp_run_xts_aes_cmd()
1173 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) { in ccp_run_xts_aes_cmd()
1175 * Use the 256-bit byte swap passthru option to convert from in ccp_run_xts_aes_cmd()
1178 dm_offset = CCP_SB_BYTES - AES_KEYSIZE_128; in ccp_run_xts_aes_cmd()
1179 ret = ccp_set_dm_area(&key, dm_offset, xts->key, 0, xts->key_len); in ccp_run_xts_aes_cmd()
1182 ret = ccp_set_dm_area(&key, 0, xts->key, xts->key_len, xts->key_len); in ccp_run_xts_aes_cmd()
1186 /* Version 5 CCPs use a 512-bit space for the key: each portion in ccp_run_xts_aes_cmd()
1187 * occupies 256 bits, or one entire slot, and is zero-padded. in ccp_run_xts_aes_cmd()
1192 pad = dm_offset - xts->key_len; in ccp_run_xts_aes_cmd()
1193 ret = ccp_set_dm_area(&key, pad, xts->key, 0, xts->key_len); in ccp_run_xts_aes_cmd()
1196 ret = ccp_set_dm_area(&key, dm_offset + pad, xts->key, in ccp_run_xts_aes_cmd()
1197 xts->key_len, xts->key_len); in ccp_run_xts_aes_cmd()
1201 ret = ccp_copy_to_sb(cmd_q, &key, op.jobid, op.sb_key, in ccp_run_xts_aes_cmd()
1204 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1208 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_xts_aes_cmd()
1218 ret = ccp_set_dm_area(&ctx, 0, xts->iv, 0, xts->iv_len); in ccp_run_xts_aes_cmd()
1221 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_xts_aes_cmd()
1224 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1228 /* Prepare the input and output data workareas. For in-place in ccp_run_xts_aes_cmd()
1232 if (sg_virt(xts->src) == sg_virt(xts->dst)) in ccp_run_xts_aes_cmd()
1235 ret = ccp_init_data(&src, cmd_q, xts->src, xts->src_len, in ccp_run_xts_aes_cmd()
1244 ret = ccp_init_data(&dst, cmd_q, xts->dst, xts->src_len, in ccp_run_xts_aes_cmd()
1252 ccp_prepare_data(&src, &dst, &op, unit_size, true); in ccp_run_xts_aes_cmd()
1254 op.eom = 1; in ccp_run_xts_aes_cmd()
1256 ret = cmd_q->ccp->vdata->perform->xts_aes(&op); in ccp_run_xts_aes_cmd()
1258 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1262 ccp_process_data(&src, &dst, &op); in ccp_run_xts_aes_cmd()
1265 /* Retrieve the AES context - convert from LE to BE using in ccp_run_xts_aes_cmd()
1266 * 32-byte (256-bit) byteswapping in ccp_run_xts_aes_cmd()
1268 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_xts_aes_cmd()
1271 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1276 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_xts_aes_cmd()
1277 ccp_get_dm_area(&ctx, dm_offset, xts->iv, 0, xts->iv_len); in ccp_run_xts_aes_cmd()
1298 struct ccp_des3_engine *des3 = &cmd->u.des3; in ccp_run_des3_cmd()
1302 struct ccp_op op; in ccp_run_des3_cmd() local
1309 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) in ccp_run_des3_cmd()
1310 return -EINVAL; in ccp_run_des3_cmd()
1312 if (!cmd_q->ccp->vdata->perform->des3) in ccp_run_des3_cmd()
1313 return -EINVAL; in ccp_run_des3_cmd()
1315 if (des3->key_len != DES3_EDE_KEY_SIZE) in ccp_run_des3_cmd()
1316 return -EINVAL; in ccp_run_des3_cmd()
1318 if (((des3->mode == CCP_DES3_MODE_ECB) || in ccp_run_des3_cmd()
1319 (des3->mode == CCP_DES3_MODE_CBC)) && in ccp_run_des3_cmd()
1320 (des3->src_len & (DES3_EDE_BLOCK_SIZE - 1))) in ccp_run_des3_cmd()
1321 return -EINVAL; in ccp_run_des3_cmd()
1323 if (!des3->key || !des3->src || !des3->dst) in ccp_run_des3_cmd()
1324 return -EINVAL; in ccp_run_des3_cmd()
1326 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1327 if (des3->iv_len != DES3_EDE_BLOCK_SIZE) in ccp_run_des3_cmd()
1328 return -EINVAL; in ccp_run_des3_cmd()
1330 if (!des3->iv) in ccp_run_des3_cmd()
1331 return -EINVAL; in ccp_run_des3_cmd()
1334 ret = -EIO; in ccp_run_des3_cmd()
1336 memset(&op, 0, sizeof(op)); in ccp_run_des3_cmd()
1339 op.cmd_q = cmd_q; in ccp_run_des3_cmd()
1340 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_des3_cmd()
1341 op.sb_key = cmd_q->sb_key; in ccp_run_des3_cmd()
1343 op.init = (des3->mode == CCP_DES3_MODE_ECB) ? 0 : 1; in ccp_run_des3_cmd()
1344 op.u.des3.type = des3->type; in ccp_run_des3_cmd()
1345 op.u.des3.mode = des3->mode; in ccp_run_des3_cmd()
1346 op.u.des3.action = des3->action; in ccp_run_des3_cmd()
1349 * All supported key sizes fit in a single (32-byte) KSB entry and in ccp_run_des3_cmd()
1350 * (like AES) must be in little endian format. Use the 256-bit byte in ccp_run_des3_cmd()
1364 dm_offset = CCP_SB_BYTES - des3->key_len; /* Basic offset */ in ccp_run_des3_cmd()
1366 len_singlekey = des3->key_len / 3; in ccp_run_des3_cmd()
1368 des3->key, 0, len_singlekey); in ccp_run_des3_cmd()
1372 des3->key, len_singlekey, len_singlekey); in ccp_run_des3_cmd()
1376 des3->key, 2 * len_singlekey, len_singlekey); in ccp_run_des3_cmd()
1381 ret = ccp_copy_to_sb(cmd_q, &key, op.jobid, op.sb_key, in ccp_run_des3_cmd()
1384 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1389 * The DES3 context fits in a single (32-byte) KSB entry and in ccp_run_des3_cmd()
1390 * must be in little endian format. Use the 256-bit byte swap in ccp_run_des3_cmd()
1393 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1394 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_des3_cmd()
1403 dm_offset = CCP_SB_BYTES - des3->iv_len; in ccp_run_des3_cmd()
1404 ret = ccp_set_dm_area(&ctx, dm_offset, des3->iv, 0, in ccp_run_des3_cmd()
1405 des3->iv_len); in ccp_run_des3_cmd()
1409 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_des3_cmd()
1412 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1418 * Prepare the input and output data workareas. For in-place in ccp_run_des3_cmd()
1422 if (sg_virt(des3->src) == sg_virt(des3->dst)) in ccp_run_des3_cmd()
1425 ret = ccp_init_data(&src, cmd_q, des3->src, des3->src_len, in ccp_run_des3_cmd()
1434 ret = ccp_init_data(&dst, cmd_q, des3->dst, des3->src_len, in ccp_run_des3_cmd()
1442 ccp_prepare_data(&src, &dst, &op, DES3_EDE_BLOCK_SIZE, true); in ccp_run_des3_cmd()
1444 op.eom = 1; in ccp_run_des3_cmd()
1446 /* Since we don't retrieve the context in ECB mode in ccp_run_des3_cmd()
1450 op.soc = 0; in ccp_run_des3_cmd()
1453 ret = cmd_q->ccp->vdata->perform->des3(&op); in ccp_run_des3_cmd()
1455 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1459 ccp_process_data(&src, &dst, &op); in ccp_run_des3_cmd()
1462 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1464 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_des3_cmd()
1467 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1472 ccp_get_dm_area(&ctx, dm_offset, des3->iv, 0, in ccp_run_des3_cmd()
1483 if (des3->mode != CCP_DES3_MODE_ECB) in ccp_run_des3_cmd()
1495 struct ccp_sha_engine *sha = &cmd->u.sha; in ccp_run_sha_cmd()
1498 struct ccp_op op; in ccp_run_sha_cmd() local
1507 switch (sha->type) { in ccp_run_sha_cmd()
1509 if (sha->ctx_len < SHA1_DIGEST_SIZE) in ccp_run_sha_cmd()
1510 return -EINVAL; in ccp_run_sha_cmd()
1514 if (sha->ctx_len < SHA224_DIGEST_SIZE) in ccp_run_sha_cmd()
1515 return -EINVAL; in ccp_run_sha_cmd()
1519 if (sha->ctx_len < SHA256_DIGEST_SIZE) in ccp_run_sha_cmd()
1520 return -EINVAL; in ccp_run_sha_cmd()
1524 if (cmd_q->ccp->vdata->version < CCP_VERSION(4, 0) in ccp_run_sha_cmd()
1525 || sha->ctx_len < SHA384_DIGEST_SIZE) in ccp_run_sha_cmd()
1526 return -EINVAL; in ccp_run_sha_cmd()
1530 if (cmd_q->ccp->vdata->version < CCP_VERSION(4, 0) in ccp_run_sha_cmd()
1531 || sha->ctx_len < SHA512_DIGEST_SIZE) in ccp_run_sha_cmd()
1532 return -EINVAL; in ccp_run_sha_cmd()
1536 return -EINVAL; in ccp_run_sha_cmd()
1539 if (!sha->ctx) in ccp_run_sha_cmd()
1540 return -EINVAL; in ccp_run_sha_cmd()
1542 if (!sha->final && (sha->src_len & (block_size - 1))) in ccp_run_sha_cmd()
1543 return -EINVAL; in ccp_run_sha_cmd()
1545 /* The version 3 device can't handle zero-length input */ in ccp_run_sha_cmd()
1546 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) { in ccp_run_sha_cmd()
1548 if (!sha->src_len) { in ccp_run_sha_cmd()
1553 if (!sha->final) in ccp_run_sha_cmd()
1559 if (sha->msg_bits) in ccp_run_sha_cmd()
1560 return -EINVAL; in ccp_run_sha_cmd()
1562 /* The CCP cannot perform zero-length sha operations in ccp_run_sha_cmd()
1568 switch (sha->type) { in ccp_run_sha_cmd()
1582 return -EINVAL; in ccp_run_sha_cmd()
1585 scatterwalk_map_and_copy((void *)sha_zero, sha->ctx, 0, in ccp_run_sha_cmd()
1593 switch (sha->type) { in ccp_run_sha_cmd()
1599 if (cmd_q->ccp->vdata->version != CCP_VERSION(3, 0)) in ccp_run_sha_cmd()
1600 ooffset = ioffset = CCP_SB_BYTES - SHA1_DIGEST_SIZE; in ccp_run_sha_cmd()
1610 if (cmd_q->ccp->vdata->version != CCP_VERSION(3, 0)) in ccp_run_sha_cmd()
1611 ooffset = CCP_SB_BYTES - SHA224_DIGEST_SIZE; in ccp_run_sha_cmd()
1628 ooffset = 2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE; in ccp_run_sha_cmd()
1638 ret = -EINVAL; in ccp_run_sha_cmd()
1642 /* For zero-length plaintext the src pointer is ignored; in ccp_run_sha_cmd()
1645 if (sha->src_len && !sha->src) in ccp_run_sha_cmd()
1646 return -EINVAL; in ccp_run_sha_cmd()
1648 memset(&op, 0, sizeof(op)); in ccp_run_sha_cmd()
1649 op.cmd_q = cmd_q; in ccp_run_sha_cmd()
1650 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_sha_cmd()
1651 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_sha_cmd()
1652 op.u.sha.type = sha->type; in ccp_run_sha_cmd()
1653 op.u.sha.msg_bits = sha->msg_bits; in ccp_run_sha_cmd()
1655 /* For SHA1/224/256 the context fits in a single (32-byte) SB entry; in ccp_run_sha_cmd()
1658 * be in little endian format: use the 256-bit byte swap option. in ccp_run_sha_cmd()
1664 if (sha->first) { in ccp_run_sha_cmd()
1665 switch (sha->type) { in ccp_run_sha_cmd()
1679 ret = -EINVAL; in ccp_run_sha_cmd()
1684 ret = ccp_set_dm_area(&ctx, 0, sha->ctx, 0, in ccp_run_sha_cmd()
1690 ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_sha_cmd()
1693 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1697 if (sha->src) { in ccp_run_sha_cmd()
1699 ret = ccp_init_data(&src, cmd_q, sha->src, sha->src_len, in ccp_run_sha_cmd()
1705 ccp_prepare_data(&src, NULL, &op, block_size, false); in ccp_run_sha_cmd()
1706 if (sha->final && !src.sg_wa.bytes_left) in ccp_run_sha_cmd()
1707 op.eom = 1; in ccp_run_sha_cmd()
1709 ret = cmd_q->ccp->vdata->perform->sha(&op); in ccp_run_sha_cmd()
1711 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1715 ccp_process_data(&src, NULL, &op); in ccp_run_sha_cmd()
1718 op.eom = 1; in ccp_run_sha_cmd()
1719 ret = cmd_q->ccp->vdata->perform->sha(&op); in ccp_run_sha_cmd()
1721 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1726 /* Retrieve the SHA context - convert from LE to BE using in ccp_run_sha_cmd()
1727 * 32-byte (256-bit) byteswapping to BE in ccp_run_sha_cmd()
1729 ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx, in ccp_run_sha_cmd()
1732 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1736 if (sha->final) { in ccp_run_sha_cmd()
1738 switch (sha->type) { in ccp_run_sha_cmd()
1743 sha->ctx, 0, in ccp_run_sha_cmd()
1749 sha->ctx, LSB_ITEM_SIZE - ooffset, in ccp_run_sha_cmd()
1752 sha->ctx, 0, in ccp_run_sha_cmd()
1753 LSB_ITEM_SIZE - ooffset); in ccp_run_sha_cmd()
1756 ret = -EINVAL; in ccp_run_sha_cmd()
1761 ccp_get_dm_area(&ctx, 0, sha->ctx, 0, in ccp_run_sha_cmd()
1765 if (sha->final && sha->opad) { in ccp_run_sha_cmd()
1771 if (sha->opad_len != block_size) { in ccp_run_sha_cmd()
1772 ret = -EINVAL; in ccp_run_sha_cmd()
1778 ret = -ENOMEM; in ccp_run_sha_cmd()
1783 scatterwalk_map_and_copy(hmac_buf, sha->opad, 0, block_size, 0); in ccp_run_sha_cmd()
1784 switch (sha->type) { in ccp_run_sha_cmd()
1798 (LSB_ITEM_SIZE - ooffset), in ccp_run_sha_cmd()
1804 ret = -EINVAL; in ccp_run_sha_cmd()
1810 hmac_cmd.u.sha.type = sha->type; in ccp_run_sha_cmd()
1811 hmac_cmd.u.sha.ctx = sha->ctx; in ccp_run_sha_cmd()
1812 hmac_cmd.u.sha.ctx_len = sha->ctx_len; in ccp_run_sha_cmd()
1823 cmd->engine_error = hmac_cmd.engine_error; in ccp_run_sha_cmd()
1829 if (sha->src) in ccp_run_sha_cmd()
1841 struct ccp_rsa_engine *rsa = &cmd->u.rsa; in ccp_run_rsa_cmd()
1843 struct ccp_op op; in ccp_run_rsa_cmd() local
1848 if (rsa->key_size > cmd_q->ccp->vdata->rsamax) in ccp_run_rsa_cmd()
1849 return -EINVAL; in ccp_run_rsa_cmd()
1851 if (!rsa->exp || !rsa->mod || !rsa->src || !rsa->dst) in ccp_run_rsa_cmd()
1852 return -EINVAL; in ccp_run_rsa_cmd()
1854 memset(&op, 0, sizeof(op)); in ccp_run_rsa_cmd()
1855 op.cmd_q = cmd_q; in ccp_run_rsa_cmd()
1856 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_rsa_cmd()
1862 * must be a multiple of 256-bits). Compute o_len, i_len in bytes. in ccp_run_rsa_cmd()
1866 o_len = 32 * ((rsa->key_size + 255) / 256); in ccp_run_rsa_cmd()
1870 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) { in ccp_run_rsa_cmd()
1875 op.sb_key = cmd_q->ccp->vdata->perform->sballoc(cmd_q, in ccp_run_rsa_cmd()
1877 if (!op.sb_key) in ccp_run_rsa_cmd()
1878 return -EIO; in ccp_run_rsa_cmd()
1884 op.sb_key = cmd_q->sb_key; in ccp_run_rsa_cmd()
1894 ret = ccp_reverse_set_dm_area(&exp, 0, rsa->exp, 0, rsa->exp_len); in ccp_run_rsa_cmd()
1898 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) { in ccp_run_rsa_cmd()
1900 * as many 32-byte blocks as were allocated above. It's in ccp_run_rsa_cmd()
1903 ret = ccp_copy_to_sb(cmd_q, &exp, op.jobid, op.sb_key, in ccp_run_rsa_cmd()
1906 cmd->engine_error = cmd_q->cmd_error; in ccp_run_rsa_cmd()
1911 op.exp.u.dma.address = exp.dma.address; in ccp_run_rsa_cmd()
1912 op.exp.u.dma.offset = 0; in ccp_run_rsa_cmd()
1923 ret = ccp_reverse_set_dm_area(&src, 0, rsa->mod, 0, rsa->mod_len); in ccp_run_rsa_cmd()
1926 ret = ccp_reverse_set_dm_area(&src, o_len, rsa->src, 0, rsa->src_len); in ccp_run_rsa_cmd()
1935 op.soc = 1; in ccp_run_rsa_cmd()
1936 op.src.u.dma.address = src.dma.address; in ccp_run_rsa_cmd()
1937 op.src.u.dma.offset = 0; in ccp_run_rsa_cmd()
1938 op.src.u.dma.length = i_len; in ccp_run_rsa_cmd()
1939 op.dst.u.dma.address = dst.dma.address; in ccp_run_rsa_cmd()
1940 op.dst.u.dma.offset = 0; in ccp_run_rsa_cmd()
1941 op.dst.u.dma.length = o_len; in ccp_run_rsa_cmd()
1943 op.u.rsa.mod_size = rsa->key_size; in ccp_run_rsa_cmd()
1944 op.u.rsa.input_len = i_len; in ccp_run_rsa_cmd()
1946 ret = cmd_q->ccp->vdata->perform->rsa(&op); in ccp_run_rsa_cmd()
1948 cmd->engine_error = cmd_q->cmd_error; in ccp_run_rsa_cmd()
1952 ccp_reverse_get_dm_area(&dst, 0, rsa->dst, 0, rsa->mod_len); in ccp_run_rsa_cmd()
1965 cmd_q->ccp->vdata->perform->sbfree(cmd_q, op.sb_key, sb_count); in ccp_run_rsa_cmd()
1973 struct ccp_passthru_engine *pt = &cmd->u.passthru; in ccp_run_passthru_cmd()
1976 struct ccp_op op; in ccp_run_passthru_cmd() local
1981 if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) in ccp_run_passthru_cmd()
1982 return -EINVAL; in ccp_run_passthru_cmd()
1984 if (!pt->src || !pt->dst) in ccp_run_passthru_cmd()
1985 return -EINVAL; in ccp_run_passthru_cmd()
1987 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_cmd()
1988 if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) in ccp_run_passthru_cmd()
1989 return -EINVAL; in ccp_run_passthru_cmd()
1990 if (!pt->mask) in ccp_run_passthru_cmd()
1991 return -EINVAL; in ccp_run_passthru_cmd()
1996 memset(&op, 0, sizeof(op)); in ccp_run_passthru_cmd()
1997 op.cmd_q = cmd_q; in ccp_run_passthru_cmd()
1998 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_passthru_cmd()
2000 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_cmd()
2002 op.sb_key = cmd_q->sb_key; in ccp_run_passthru_cmd()
2011 ret = ccp_set_dm_area(&mask, 0, pt->mask, 0, pt->mask_len); in ccp_run_passthru_cmd()
2014 ret = ccp_copy_to_sb(cmd_q, &mask, op.jobid, op.sb_key, in ccp_run_passthru_cmd()
2017 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_cmd()
2022 /* Prepare the input and output data workareas. For in-place in ccp_run_passthru_cmd()
2026 if (sg_virt(pt->src) == sg_virt(pt->dst)) in ccp_run_passthru_cmd()
2029 ret = ccp_init_data(&src, cmd_q, pt->src, pt->src_len, in ccp_run_passthru_cmd()
2038 ret = ccp_init_data(&dst, cmd_q, pt->dst, pt->src_len, in ccp_run_passthru_cmd()
2055 ret = -EINVAL; in ccp_run_passthru_cmd()
2060 op.eom = 1; in ccp_run_passthru_cmd()
2061 op.soc = 1; in ccp_run_passthru_cmd()
2064 op.src.type = CCP_MEMTYPE_SYSTEM; in ccp_run_passthru_cmd()
2065 op.src.u.dma.address = sg_dma_address(src.sg_wa.sg); in ccp_run_passthru_cmd()
2066 op.src.u.dma.offset = 0; in ccp_run_passthru_cmd()
2067 op.src.u.dma.length = sg_dma_len(src.sg_wa.sg); in ccp_run_passthru_cmd()
2069 op.dst.type = CCP_MEMTYPE_SYSTEM; in ccp_run_passthru_cmd()
2070 op.dst.u.dma.address = sg_dma_address(dst.sg_wa.sg); in ccp_run_passthru_cmd()
2071 op.dst.u.dma.offset = dst.sg_wa.sg_used; in ccp_run_passthru_cmd()
2072 op.dst.u.dma.length = op.src.u.dma.length; in ccp_run_passthru_cmd()
2074 ret = cmd_q->ccp->vdata->perform->passthru(&op); in ccp_run_passthru_cmd()
2076 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_cmd()
2096 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) in ccp_run_passthru_cmd()
2106 struct ccp_passthru_nomap_engine *pt = &cmd->u.passthru_nomap; in ccp_run_passthru_nomap_cmd()
2108 struct ccp_op op; in ccp_run_passthru_nomap_cmd() local
2111 if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) in ccp_run_passthru_nomap_cmd()
2112 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2114 if (!pt->src_dma || !pt->dst_dma) in ccp_run_passthru_nomap_cmd()
2115 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2117 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_nomap_cmd()
2118 if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) in ccp_run_passthru_nomap_cmd()
2119 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2120 if (!pt->mask) in ccp_run_passthru_nomap_cmd()
2121 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2126 memset(&op, 0, sizeof(op)); in ccp_run_passthru_nomap_cmd()
2127 op.cmd_q = cmd_q; in ccp_run_passthru_nomap_cmd()
2128 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_passthru_nomap_cmd()
2130 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_nomap_cmd()
2132 op.sb_key = cmd_q->sb_key; in ccp_run_passthru_nomap_cmd()
2134 mask.length = pt->mask_len; in ccp_run_passthru_nomap_cmd()
2135 mask.dma.address = pt->mask; in ccp_run_passthru_nomap_cmd()
2136 mask.dma.length = pt->mask_len; in ccp_run_passthru_nomap_cmd()
2138 ret = ccp_copy_to_sb(cmd_q, &mask, op.jobid, op.sb_key, in ccp_run_passthru_nomap_cmd()
2141 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_nomap_cmd()
2147 op.eom = 1; in ccp_run_passthru_nomap_cmd()
2148 op.soc = 1; in ccp_run_passthru_nomap_cmd()
2150 op.src.type = CCP_MEMTYPE_SYSTEM; in ccp_run_passthru_nomap_cmd()
2151 op.src.u.dma.address = pt->src_dma; in ccp_run_passthru_nomap_cmd()
2152 op.src.u.dma.offset = 0; in ccp_run_passthru_nomap_cmd()
2153 op.src.u.dma.length = pt->src_len; in ccp_run_passthru_nomap_cmd()
2155 op.dst.type = CCP_MEMTYPE_SYSTEM; in ccp_run_passthru_nomap_cmd()
2156 op.dst.u.dma.address = pt->dst_dma; in ccp_run_passthru_nomap_cmd()
2157 op.dst.u.dma.offset = 0; in ccp_run_passthru_nomap_cmd()
2158 op.dst.u.dma.length = pt->src_len; in ccp_run_passthru_nomap_cmd()
2160 ret = cmd_q->ccp->vdata->perform->passthru(&op); in ccp_run_passthru_nomap_cmd()
2162 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_nomap_cmd()
2169 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_mm_cmd()
2171 struct ccp_op op; in ccp_run_ecc_mm_cmd() local
2175 if (!ecc->u.mm.operand_1 || in ccp_run_ecc_mm_cmd()
2176 (ecc->u.mm.operand_1_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2177 return -EINVAL; in ccp_run_ecc_mm_cmd()
2179 if (ecc->function != CCP_ECC_FUNCTION_MINV_384BIT) in ccp_run_ecc_mm_cmd()
2180 if (!ecc->u.mm.operand_2 || in ccp_run_ecc_mm_cmd()
2181 (ecc->u.mm.operand_2_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2182 return -EINVAL; in ccp_run_ecc_mm_cmd()
2184 if (!ecc->u.mm.result || in ccp_run_ecc_mm_cmd()
2185 (ecc->u.mm.result_len < CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2186 return -EINVAL; in ccp_run_ecc_mm_cmd()
2188 memset(&op, 0, sizeof(op)); in ccp_run_ecc_mm_cmd()
2189 op.cmd_q = cmd_q; in ccp_run_ecc_mm_cmd()
2190 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_ecc_mm_cmd()
2208 ret = ccp_reverse_set_dm_area(&src, 0, ecc->mod, 0, ecc->mod_len); in ccp_run_ecc_mm_cmd()
2214 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.mm.operand_1, 0, in ccp_run_ecc_mm_cmd()
2215 ecc->u.mm.operand_1_len); in ccp_run_ecc_mm_cmd()
2220 if (ecc->function != CCP_ECC_FUNCTION_MINV_384BIT) { in ccp_run_ecc_mm_cmd()
2222 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.mm.operand_2, 0, in ccp_run_ecc_mm_cmd()
2223 ecc->u.mm.operand_2_len); in ccp_run_ecc_mm_cmd()
2238 op.soc = 1; in ccp_run_ecc_mm_cmd()
2239 op.src.u.dma.address = src.dma.address; in ccp_run_ecc_mm_cmd()
2240 op.src.u.dma.offset = 0; in ccp_run_ecc_mm_cmd()
2241 op.src.u.dma.length = src.length; in ccp_run_ecc_mm_cmd()
2242 op.dst.u.dma.address = dst.dma.address; in ccp_run_ecc_mm_cmd()
2243 op.dst.u.dma.offset = 0; in ccp_run_ecc_mm_cmd()
2244 op.dst.u.dma.length = dst.length; in ccp_run_ecc_mm_cmd()
2246 op.u.ecc.function = cmd->u.ecc.function; in ccp_run_ecc_mm_cmd()
2248 ret = cmd_q->ccp->vdata->perform->ecc(&op); in ccp_run_ecc_mm_cmd()
2250 cmd->engine_error = cmd_q->cmd_error; in ccp_run_ecc_mm_cmd()
2254 ecc->ecc_result = le16_to_cpup( in ccp_run_ecc_mm_cmd()
2256 if (!(ecc->ecc_result & CCP_ECC_RESULT_SUCCESS)) { in ccp_run_ecc_mm_cmd()
2257 ret = -EIO; in ccp_run_ecc_mm_cmd()
2262 ccp_reverse_get_dm_area(&dst, 0, ecc->u.mm.result, 0, in ccp_run_ecc_mm_cmd()
2276 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_pm_cmd()
2278 struct ccp_op op; in ccp_run_ecc_pm_cmd() local
2282 if (!ecc->u.pm.point_1.x || in ccp_run_ecc_pm_cmd()
2283 (ecc->u.pm.point_1.x_len > CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2284 !ecc->u.pm.point_1.y || in ccp_run_ecc_pm_cmd()
2285 (ecc->u.pm.point_1.y_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2286 return -EINVAL; in ccp_run_ecc_pm_cmd()
2288 if (ecc->function == CCP_ECC_FUNCTION_PADD_384BIT) { in ccp_run_ecc_pm_cmd()
2289 if (!ecc->u.pm.point_2.x || in ccp_run_ecc_pm_cmd()
2290 (ecc->u.pm.point_2.x_len > CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2291 !ecc->u.pm.point_2.y || in ccp_run_ecc_pm_cmd()
2292 (ecc->u.pm.point_2.y_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2293 return -EINVAL; in ccp_run_ecc_pm_cmd()
2295 if (!ecc->u.pm.domain_a || in ccp_run_ecc_pm_cmd()
2296 (ecc->u.pm.domain_a_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2297 return -EINVAL; in ccp_run_ecc_pm_cmd()
2299 if (ecc->function == CCP_ECC_FUNCTION_PMUL_384BIT) in ccp_run_ecc_pm_cmd()
2300 if (!ecc->u.pm.scalar || in ccp_run_ecc_pm_cmd()
2301 (ecc->u.pm.scalar_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2302 return -EINVAL; in ccp_run_ecc_pm_cmd()
2305 if (!ecc->u.pm.result.x || in ccp_run_ecc_pm_cmd()
2306 (ecc->u.pm.result.x_len < CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2307 !ecc->u.pm.result.y || in ccp_run_ecc_pm_cmd()
2308 (ecc->u.pm.result.y_len < CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2309 return -EINVAL; in ccp_run_ecc_pm_cmd()
2311 memset(&op, 0, sizeof(op)); in ccp_run_ecc_pm_cmd()
2312 op.cmd_q = cmd_q; in ccp_run_ecc_pm_cmd()
2313 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_ecc_pm_cmd()
2331 ret = ccp_reverse_set_dm_area(&src, 0, ecc->mod, 0, ecc->mod_len); in ccp_run_ecc_pm_cmd()
2337 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_1.x, 0, in ccp_run_ecc_pm_cmd()
2338 ecc->u.pm.point_1.x_len); in ccp_run_ecc_pm_cmd()
2342 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_1.y, 0, in ccp_run_ecc_pm_cmd()
2343 ecc->u.pm.point_1.y_len); in ccp_run_ecc_pm_cmd()
2352 if (ecc->function == CCP_ECC_FUNCTION_PADD_384BIT) { in ccp_run_ecc_pm_cmd()
2354 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_2.x, 0, in ccp_run_ecc_pm_cmd()
2355 ecc->u.pm.point_2.x_len); in ccp_run_ecc_pm_cmd()
2359 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_2.y, 0, in ccp_run_ecc_pm_cmd()
2360 ecc->u.pm.point_2.y_len); in ccp_run_ecc_pm_cmd()
2370 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.domain_a, 0, in ccp_run_ecc_pm_cmd()
2371 ecc->u.pm.domain_a_len); in ccp_run_ecc_pm_cmd()
2376 if (ecc->function == CCP_ECC_FUNCTION_PMUL_384BIT) { in ccp_run_ecc_pm_cmd()
2379 ecc->u.pm.scalar, 0, in ccp_run_ecc_pm_cmd()
2380 ecc->u.pm.scalar_len); in ccp_run_ecc_pm_cmd()
2396 op.soc = 1; in ccp_run_ecc_pm_cmd()
2397 op.src.u.dma.address = src.dma.address; in ccp_run_ecc_pm_cmd()
2398 op.src.u.dma.offset = 0; in ccp_run_ecc_pm_cmd()
2399 op.src.u.dma.length = src.length; in ccp_run_ecc_pm_cmd()
2400 op.dst.u.dma.address = dst.dma.address; in ccp_run_ecc_pm_cmd()
2401 op.dst.u.dma.offset = 0; in ccp_run_ecc_pm_cmd()
2402 op.dst.u.dma.length = dst.length; in ccp_run_ecc_pm_cmd()
2404 op.u.ecc.function = cmd->u.ecc.function; in ccp_run_ecc_pm_cmd()
2406 ret = cmd_q->ccp->vdata->perform->ecc(&op); in ccp_run_ecc_pm_cmd()
2408 cmd->engine_error = cmd_q->cmd_error; in ccp_run_ecc_pm_cmd()
2412 ecc->ecc_result = le16_to_cpup( in ccp_run_ecc_pm_cmd()
2414 if (!(ecc->ecc_result & CCP_ECC_RESULT_SUCCESS)) { in ccp_run_ecc_pm_cmd()
2415 ret = -EIO; in ccp_run_ecc_pm_cmd()
2425 ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.x, 0, in ccp_run_ecc_pm_cmd()
2428 ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.y, 0, in ccp_run_ecc_pm_cmd()
2447 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_cmd()
2449 ecc->ecc_result = 0; in ccp_run_ecc_cmd()
2451 if (!ecc->mod || in ccp_run_ecc_cmd()
2452 (ecc->mod_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_cmd()
2453 return -EINVAL; in ccp_run_ecc_cmd()
2455 switch (ecc->function) { in ccp_run_ecc_cmd()
2467 return -EINVAL; in ccp_run_ecc_cmd()
2475 cmd->engine_error = 0; in ccp_run_cmd()
2476 cmd_q->cmd_error = 0; in ccp_run_cmd()
2477 cmd_q->int_rcvd = 0; in ccp_run_cmd()
2478 cmd_q->free_slots = cmd_q->ccp->vdata->perform->get_free_slots(cmd_q); in ccp_run_cmd()
2480 switch (cmd->engine) { in ccp_run_cmd()
2482 switch (cmd->u.aes.mode) { in ccp_run_cmd()
2507 if (cmd->flags & CCP_CMD_PASSTHRU_NO_DMA_MAP) in ccp_run_cmd()
2516 ret = -EINVAL; in ccp_run_cmd()