Lines Matching refs:cpg
103 static struct crypto_priv *cpg; variable
148 int active = readl(cpg->reg + SEC_ACCEL_CMD) & SEC_CMD_EN_SEC_ACCL0; in mv_completion_timer_callback()
154 del_timer(&cpg->completion_timer); in mv_completion_timer_callback()
155 writel(SEC_CMD_DISABLE_SEC, cpg->reg + SEC_ACCEL_CMD); in mv_completion_timer_callback()
156 while(readl(cpg->reg + SEC_ACCEL_CMD) & SEC_CMD_DISABLE_SEC) in mv_completion_timer_callback()
158 cpg->eng_st = ENGINE_W_DEQUEUE; in mv_completion_timer_callback()
159 wake_up_process(cpg->queue_th); in mv_completion_timer_callback()
164 setup_timer(&cpg->completion_timer, &mv_completion_timer_callback, 0); in mv_setup_timer()
165 mod_timer(&cpg->completion_timer, in mv_setup_timer()
245 struct req_progress *p = &cpg->p; in setup_data_in()
247 min(p->hw_nbytes - p->hw_processed_bytes, cpg->max_req_size); in setup_data_in()
248 copy_src_to_buf(p, cpg->sram + SRAM_DATA_IN_START + p->crypt_len, in setup_data_in()
255 struct ablkcipher_request *req = ablkcipher_request_cast(cpg->cur_req); in mv_process_current_q()
270 memcpy(cpg->sram + SRAM_DATA_IV, req->info, 16); in mv_process_current_q()
275 memcpy(cpg->sram + SRAM_DATA_KEY_P, ctx->aes_dec_key, in mv_process_current_q()
279 memcpy(cpg->sram + SRAM_DATA_KEY_P, ctx->aes_enc_key, in mv_process_current_q()
299 op.enc_len = cpg->p.crypt_len; in mv_process_current_q()
300 memcpy(cpg->sram + SRAM_CONFIG, &op, in mv_process_current_q()
305 writel(SEC_CMD_EN_SEC_ACCL0, cpg->reg + SEC_ACCEL_CMD); in mv_process_current_q()
310 struct ablkcipher_request *req = ablkcipher_request_cast(cpg->cur_req); in mv_crypto_algo_completion()
313 sg_miter_stop(&cpg->p.src_sg_it); in mv_crypto_algo_completion()
314 sg_miter_stop(&cpg->p.dst_sg_it); in mv_crypto_algo_completion()
319 memcpy(req->info, cpg->sram + SRAM_DATA_IV_BUF, 16); in mv_crypto_algo_completion()
324 struct ahash_request *req = ahash_request_cast(cpg->cur_req); in mv_process_hash_current()
327 struct req_progress *p = &cpg->p; in mv_process_hash_current()
338 memcpy(cpg->sram + SRAM_HMAC_IV_IN, in mv_process_hash_current()
373 writel(req_ctx->state[0], cpg->reg + DIGEST_INITIAL_VAL_A); in mv_process_hash_current()
374 writel(req_ctx->state[1], cpg->reg + DIGEST_INITIAL_VAL_B); in mv_process_hash_current()
375 writel(req_ctx->state[2], cpg->reg + DIGEST_INITIAL_VAL_C); in mv_process_hash_current()
376 writel(req_ctx->state[3], cpg->reg + DIGEST_INITIAL_VAL_D); in mv_process_hash_current()
377 writel(req_ctx->state[4], cpg->reg + DIGEST_INITIAL_VAL_E); in mv_process_hash_current()
381 memcpy(cpg->sram + SRAM_CONFIG, &op, sizeof(struct sec_accel_config)); in mv_process_hash_current()
385 writel(SEC_CMD_EN_SEC_ACCL0, cpg->reg + SEC_ACCEL_CMD); in mv_process_hash_current()
428 ctx->state[0] = readl(cpg->reg + DIGEST_INITIAL_VAL_A); in mv_save_digest_state()
429 ctx->state[1] = readl(cpg->reg + DIGEST_INITIAL_VAL_B); in mv_save_digest_state()
430 ctx->state[2] = readl(cpg->reg + DIGEST_INITIAL_VAL_C); in mv_save_digest_state()
431 ctx->state[3] = readl(cpg->reg + DIGEST_INITIAL_VAL_D); in mv_save_digest_state()
432 ctx->state[4] = readl(cpg->reg + DIGEST_INITIAL_VAL_E); in mv_save_digest_state()
437 struct ahash_request *req = ahash_request_cast(cpg->cur_req); in mv_hash_algo_completion()
441 copy_src_to_buf(&cpg->p, ctx->buffer, ctx->extra_bytes); in mv_hash_algo_completion()
442 sg_miter_stop(&cpg->p.src_sg_it); in mv_hash_algo_completion()
446 memcpy(req->result, cpg->sram + SRAM_DIGEST_BUF, in mv_hash_algo_completion()
460 struct crypto_async_request *req = cpg->cur_req; in dequeue_complete_req()
463 cpg->p.hw_processed_bytes += cpg->p.crypt_len; in dequeue_complete_req()
464 if (cpg->p.copy_back) { in dequeue_complete_req()
465 int need_copy_len = cpg->p.crypt_len; in dequeue_complete_req()
470 if (!cpg->p.sg_dst_left) { in dequeue_complete_req()
471 ret = sg_miter_next(&cpg->p.dst_sg_it); in dequeue_complete_req()
473 cpg->p.sg_dst_left = cpg->p.dst_sg_it.length; in dequeue_complete_req()
474 cpg->p.dst_start = 0; in dequeue_complete_req()
477 buf = cpg->p.dst_sg_it.addr; in dequeue_complete_req()
478 buf += cpg->p.dst_start; in dequeue_complete_req()
480 dst_copy = min(need_copy_len, cpg->p.sg_dst_left); in dequeue_complete_req()
483 cpg->sram + SRAM_DATA_OUT_START + sram_offset, in dequeue_complete_req()
486 cpg->p.sg_dst_left -= dst_copy; in dequeue_complete_req()
488 cpg->p.dst_start += dst_copy; in dequeue_complete_req()
492 cpg->p.crypt_len = 0; in dequeue_complete_req()
494 BUG_ON(cpg->eng_st != ENGINE_W_DEQUEUE); in dequeue_complete_req()
495 if (cpg->p.hw_processed_bytes < cpg->p.hw_nbytes) { in dequeue_complete_req()
497 cpg->eng_st = ENGINE_BUSY; in dequeue_complete_req()
498 cpg->p.process(0); in dequeue_complete_req()
500 cpg->p.complete(); in dequeue_complete_req()
501 cpg->eng_st = ENGINE_IDLE; in dequeue_complete_req()
527 struct req_progress *p = &cpg->p; in mv_start_new_crypt_req()
530 cpg->cur_req = &req->base; in mv_start_new_crypt_req()
548 struct req_progress *p = &cpg->p; in mv_start_new_hash_req()
551 cpg->cur_req = &req->base; in mv_start_new_hash_req()
572 memcpy(cpg->sram + SRAM_DATA_IN_START, ctx->buffer, in mv_start_new_hash_req()
586 cpg->eng_st = ENGINE_IDLE; in mv_start_new_hash_req()
595 cpg->eng_st = ENGINE_IDLE; in queue_manag()
602 if (cpg->eng_st == ENGINE_W_DEQUEUE) in queue_manag()
605 spin_lock_irq(&cpg->lock); in queue_manag()
606 if (cpg->eng_st == ENGINE_IDLE) { in queue_manag()
607 backlog = crypto_get_backlog(&cpg->queue); in queue_manag()
608 async_req = crypto_dequeue_request(&cpg->queue); in queue_manag()
610 BUG_ON(cpg->eng_st != ENGINE_IDLE); in queue_manag()
611 cpg->eng_st = ENGINE_BUSY; in queue_manag()
614 spin_unlock_irq(&cpg->lock); in queue_manag()
646 spin_lock_irqsave(&cpg->lock, flags); in mv_handle_req()
647 ret = crypto_enqueue_request(&cpg->queue, req); in mv_handle_req()
648 spin_unlock_irqrestore(&cpg->lock, flags); in mv_handle_req()
649 wake_up_process(cpg->queue_th); in mv_handle_req()
909 val = readl(cpg->reg + SEC_ACCEL_INT_STATUS); in crypto_int()
913 if (!del_timer(&cpg->completion_timer)) { in crypto_int()
918 writel(val, cpg->reg + FPGA_INT_STATUS); in crypto_int()
919 writel(val, cpg->reg + SEC_ACCEL_INT_STATUS); in crypto_int()
920 BUG_ON(cpg->eng_st != ENGINE_BUSY); in crypto_int()
921 cpg->eng_st = ENGINE_W_DEQUEUE; in crypto_int()
922 wake_up_process(cpg->queue_th); in crypto_int()
1029 if (cpg) { in mv_probe()
1074 cpg = cp; in mv_probe()
1093 writel(0, cpg->reg + SEC_ACCEL_INT_STATUS); in mv_probe()
1094 writel(SEC_INT_ACCEL0_DONE, cpg->reg + SEC_ACCEL_INT_MASK); in mv_probe()
1095 writel(SEC_CFG_STOP_DIG_ERR, cpg->reg + SEC_ACCEL_CFG); in mv_probe()
1096 writel(SRAM_CONFIG, cpg->reg + SEC_ACCEL_DESC_P0); in mv_probe()
1114 cpg->has_sha1 = 1; in mv_probe()
1120 cpg->has_hmac_sha1 = 1; in mv_probe()
1143 cpg = NULL; in mv_probe()
1169 cpg = NULL; in mv_remove()