• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2020 HiSilicon (Shanghai) Technologies CO., LIMITED.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  *
15  * Description: hal hash. \n
16  *
17  * History: \n
18  * 2023-03-22, Create file. \n
19  */
20 
21 #include "hal_hash.h"
22 
23 #include "crypto_drv_common.h"
24 #include "hal_spacc_reg.h"
25 
26 #ifndef crypto_memory_barrier
27 #define crypto_memory_barrier()
28 #endif
29 
30 #define CACHE_LINE_SIZE                     32
31 #define HASH_COMPAT_ERRNO(err_code)         HAL_COMPAT_ERRNO(ERROR_MODULE_HASH, err_code)
32 
33 typedef struct {
34     td_u32 hash_first_node : 1;
35     td_u32 hash_last_node : 1;
36     td_u32 reserved_0 : 30;       // reserve
37     td_u32 hash_alg_length;       // hash message length
38     td_u32 hash_start_addr_l;     // hash message address
39     td_u32 hash_start_addr_h : 4; // hash message address
40 } hash_entry_in;
41 
42 typedef struct {
43     hash_entry_in *entry_in;    /* ! spacc digest in entry struct */
44     td_u32 idx_in;              /* !< current hash nodes to be used */
45     td_u32 idx_cnt;             /* !< total hash nodes */
46     td_void *wait;
47     crypto_wait_timeout_interruptible wait_func;
48     td_bool wait_func_enable;
49     td_bool done;
50     td_bool is_wait;
51     td_bool is_last;
52     td_u32 timeout_ms;
53 } hal_hash_hard_chn_ctx;
54 
55 typedef enum {
56     HASH_CHN_ALG_SEL_SHA1   = 0xa,
57     HASH_CHN_ALG_SEL_SHA2   = 0xb,
58     HASH_CHN_ALG_SEL_SM3    = 0xc,
59 } hash_chn_alg_sel_e;
60 
61 typedef enum {
62     HASH_CHN_ALG_MODE_224   = 0x0,
63     HASH_CHN_ALG_MODE_256   = 0x1,
64     HASH_CHN_ALG_MODE_384   = 0x2,
65     HASH_CHN_ALG_MODE_512   = 0x3,
66 } hash_chn_alg_mode_e;
67 
68 #define HASH_MAX_DEPTH              2
69 #define HASH_NODE_SIZE              (sizeof(hash_entry_in) * HASH_MAX_DEPTH)
70 
71 #if defined(CRYPTO_CONFIG_ROMBOOT_ENV)
72 static td_u8 g_node_buffer[HASH_NODE_SIZE * CRYPTO_HASH_HARD_CHN_CNT];
73 #endif
74 
75 static hal_hash_hard_chn_ctx g_hash_hard_ctx[CRYPTO_HASH_HARD_CHN_CNT];
76 static td_bool g_hal_hash_initialize = TD_FALSE;
77 
inner_hash_irq_enable(td_u32 chn_num,td_bool enable)78 static void inner_hash_irq_enable(td_u32 chn_num, td_bool enable)
79 {
80     td_u32 reg_val;
81 
82     reg_val = spacc_reg_read(HASH_CHANN_RAW_INT_EN);
83     if (enable == TD_TRUE) {
84         reg_val |= (1 << chn_num);
85     } else {
86         reg_val &= ~(1 << chn_num);
87     }
88     spacc_reg_write(HASH_CHANN_RAW_INT_EN, reg_val);
89 }
90 
hal_hash_clear_channel(td_u32 chn_num)91 static td_s32 hal_hash_clear_channel(td_u32 chn_num)
92 {
93     td_u32 i = 0;
94     td_u32 clear_finish_reg = 0;
95     td_u32 mask = 0x01 << chn_num;
96 
97     inner_hash_irq_enable(chn_num, TD_FALSE);
98 
99     spacc_reg_write(SPACC_HASH_CHN_CLEAR_REQ, mask);
100     for (i = 0; i < HASH_TIME_OUT_US; i++) {
101         clear_finish_reg = spacc_reg_read(SPACC_INT_RAW_HASH_CLEAR_FINISH);
102         if ((mask & clear_finish_reg) != 0) {
103             spacc_reg_write(SPACC_INT_RAW_HASH_CLEAR_FINISH, mask);
104             break;
105         }
106         crypto_udelay(1);
107     }
108 
109     crypto_chk_return((i >= HASH_TIME_OUT_US), TD_FAILURE, "hash clear channel timeout\n");
110 
111     return TD_SUCCESS;
112 }
113 
priv_hal_hash_get_cfg(crypto_hash_type hash_type,hash_chn_alg_mode_e * mode,hash_chn_alg_sel_e * alg,td_u32 * state_size)114 static td_s32 priv_hal_hash_get_cfg(crypto_hash_type hash_type, hash_chn_alg_mode_e *mode, hash_chn_alg_sel_e *alg,
115     td_u32 *state_size)
116 {
117     td_s32 ret = TD_SUCCESS;
118     td_u32 hash_alg = crypto_hash_get_alg(hash_type);
119     td_u32 hash_mode = crypto_hash_get_mode(hash_type);
120 
121     switch (hash_mode) {
122         case CRYPTO_HASH_MODE_UNDEF:
123             *mode = 0;
124             *alg = HASH_CHN_ALG_SEL_SHA1;
125             *state_size = CRYPTO_HASH_BLOCK_SIZE_20BYTE;
126             break;
127         case CRYPTO_HASH_MODE_224: {
128             *mode = HASH_CHN_ALG_MODE_224;
129             *alg = HASH_CHN_ALG_SEL_SHA2;
130             *state_size = CRYPTO_HASH_BLOCK_SIZE_32BYTE;
131             break;
132         }
133         case CRYPTO_HASH_MODE_256: {
134             if (hash_alg == CRYPTO_HASH_ALG_SHA2) {
135                 *mode = HASH_CHN_ALG_MODE_256;
136                 *alg = HASH_CHN_ALG_SEL_SHA2;
137                 *state_size = CRYPTO_HASH_BLOCK_SIZE_32BYTE;
138             } else if (hash_alg == CRYPTO_HASH_ALG_SM3) {
139                 *mode = HASH_CHN_ALG_MODE_224;
140                 *alg = HASH_CHN_ALG_SEL_SM3;
141                 *state_size = CRYPTO_HASH_BLOCK_SIZE_32BYTE;
142             } else {
143                 crypto_log_err("Invalid Hash Alg!\n");
144                 ret = TD_FAILURE;
145             }
146             break;
147         }
148         case CRYPTO_HASH_MODE_384: {
149             *mode = HASH_CHN_ALG_MODE_384;
150             *alg = HASH_CHN_ALG_SEL_SHA2;
151             *state_size = CRYPTO_HASH_BLOCK_SIZE_64BYTE;
152             break;
153         }
154         case CRYPTO_HASH_MODE_512: {
155             *mode = HASH_CHN_ALG_MODE_512;
156             *alg = HASH_CHN_ALG_SEL_SHA2;
157             *state_size = CRYPTO_HASH_BLOCK_SIZE_64BYTE;
158             break;
159         }
160         default: {
161             crypto_log_err("Invalid Hash Mode!\n");
162             ret = HASH_COMPAT_ERRNO(ERROR_UNSUPPORT);
163             break;
164         }
165     }
166     return ret;
167 }
168 
hal_cipher_hash_init(td_void)169 td_s32 hal_cipher_hash_init(td_void)
170 {
171     td_u32 i;
172     td_s32 ret = TD_FAILURE;
173     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
174     td_void *node_buffer = TD_NULL;
175     crypto_hal_func_enter();
176 
177     if (g_hal_hash_initialize == TD_TRUE) {
178         return TD_SUCCESS;
179     }
180     (td_void)memset_s(g_hash_hard_ctx, sizeof(g_hash_hard_ctx), 0, sizeof(g_hash_hard_ctx));
181 
182 #if defined(CRYPTO_CONFIG_ROMBOOT_ENV)
183     node_buffer = g_node_buffer;
184 #else
185     node_buffer = crypto_malloc_mmz(HASH_NODE_SIZE * CRYPTO_HASH_HARD_CHN_CNT);
186     if (node_buffer == TD_NULL) {
187         crypto_log_err("crypto_malloc_mmz failed\n");
188         return TD_FAILURE;
189     }
190 #endif
191     for (i = 0; i < CRYPTO_HASH_HARD_CHN_CNT; i++) {
192         hard_ctx = &g_hash_hard_ctx[i];
193         hard_ctx->entry_in = (hash_entry_in *)((uintptr_t)node_buffer + HASH_NODE_SIZE * i);
194         hard_ctx->idx_cnt = HASH_MAX_DEPTH;
195         hard_ctx->idx_in = 0;
196     }
197 
198     g_hal_hash_initialize = TD_TRUE;
199 
200     ret = TD_SUCCESS;
201     crypto_hal_func_exit();
202     return ret;
203 }
204 
hal_cipher_hash_deinit(td_void)205 td_s32 hal_cipher_hash_deinit(td_void)
206 {
207     td_u32 i;
208     td_s32 ret = TD_FAILURE;
209     td_void *node_buffer = TD_NULL;
210     td_u32 node_size = HASH_NODE_SIZE * CRYPTO_HASH_HARD_CHN_CNT;
211     crypto_hal_func_enter();
212 
213     if (g_hal_hash_initialize == TD_FALSE) {
214         return TD_SUCCESS;
215     }
216 
217     node_buffer = (td_void *)g_hash_hard_ctx[0].entry_in;
218     for (i = 0; i < CRYPTO_HASH_HARD_CHN_CNT; i++) {
219         hal_hash_unlock(i);
220     }
221     (td_void)memset_s(node_buffer, node_size, 0, node_size);
222 #if !defined(CRYPTO_CONFIG_ROMBOOT_ENV)
223     crypto_free_coherent(node_buffer);
224 #endif
225     g_hal_hash_initialize = TD_FALSE;
226     ret = TD_SUCCESS;
227     crypto_hal_func_exit();
228     return ret;
229 }
230 
231 #define HASH_SS_SECURE_VAL      0x5
232 #define HASH_SS_NONSECURE_VAL    0xa
233 
hal_hash_lock(td_u32 chn_num)234 td_s32 hal_hash_lock(td_u32 chn_num)
235 {
236     td_u32 used = 0;
237     td_u32 chnn_who_used = 0;
238     spacc_cpu_mask cpu_mask = SPACC_CPU_IDLE;
239     in_hash_chn_ctrl ctrl = { 0 };
240     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
241 
242     crypto_hal_func_enter();
243 
244     if ((chn_num > CRYPTO_HASH_HARD_CHN_CNT) || (chn_num == 0)) {
245         return TD_FAILURE;
246     }
247 
248     hard_ctx = &g_hash_hard_ctx[chn_num];
249     if (crypto_get_cpu_type() == CRYPTO_CPU_TYPE_SCPU) {
250         cpu_mask = SPACC_CPU_TEE;
251     } else {
252         cpu_mask = SPACC_CPU_REE;
253     }
254 
255     used = spacc_reg_read(SPACC_HASH_CHN_LOCK);
256 
257     /* try to use this channel. */
258     chnn_who_used = CHN_WHO_USED_GET(used, chn_num);
259     if (chnn_who_used != SPACC_CPU_IDLE) {
260         return HASH_COMPAT_ERRNO(ERROR_CHN_BUSY);
261     }
262     CHN_WHO_USED_SET(used, chn_num, cpu_mask);
263     spacc_reg_write(SPACC_HASH_CHN_LOCK, used);
264 
265     /* check if lock success. */
266     used = spacc_reg_read(SPACC_HASH_CHN_LOCK);
267     chnn_who_used = CHN_WHO_USED_GET(used, chn_num);
268     if (chnn_who_used != cpu_mask) {
269         return HASH_COMPAT_ERRNO(ERROR_CHN_BUSY);
270     }
271 
272     /* clear hash channel. */
273     hal_hash_clear_channel(chn_num);
274 
275     /* set secure cfg. */
276     ctrl.bits.hash_chn_en = 1;
277     if (crypto_get_cpu_type() == CRYPTO_CPU_TYPE_SCPU) {
278         ctrl.bits.hash_chn_ss = HASH_SS_SECURE_VAL;
279     } else {
280         ctrl.bits.hash_chn_ss = HASH_SS_NONSECURE_VAL;
281     }
282     spacc_reg_write(IN_HASH_CHN_CTRL(chn_num), ctrl.u32);
283 
284     /* set node start addr and node length. */
285     spacc_reg_write(IN_HASH_CHN_NODE_START_ADDR_L(chn_num), crypto_get_phys_addr(hard_ctx->entry_in));
286     spacc_reg_write(IN_HASH_CHN_NODE_START_ADDR_H(chn_num), 0);
287     spacc_reg_write(IN_HASH_CHN_NODE_LENGTH(chn_num), HASH_MAX_DEPTH);
288 
289     hal_cipher_hash_done_try(chn_num);
290 
291     hard_ctx->idx_in = 0;
292     (td_void)memset_s(hard_ctx->entry_in, HASH_NODE_SIZE, 0, HASH_NODE_SIZE);
293 
294     crypto_hal_func_exit();
295     return TD_SUCCESS;
296 }
297 
hal_hash_unlock(td_u32 chn_num)298 td_s32 hal_hash_unlock(td_u32 chn_num)
299 {
300     td_u32 used;
301     crypto_hal_func_enter();
302 
303     used = spacc_reg_read(SPACC_HASH_CHN_LOCK);
304     CHN_WHO_USED_CLR(used, chn_num);
305     spacc_reg_write(SPACC_HASH_CHN_LOCK, used);
306 
307     crypto_hal_func_exit();
308     return TD_SUCCESS;
309 }
310 
hal_cipher_hash_config(td_u32 chn_num,crypto_hash_type hash_type,const td_u32 * state)311 td_s32 hal_cipher_hash_config(td_u32 chn_num, crypto_hash_type hash_type, const td_u32 *state)
312 {
313     td_s32 ret;
314     td_u32 i;
315     in_hash_chn_key_ctrl hash_key_ctrl = { 0 };
316     hash_chn_alg_mode_e mode;
317     hash_chn_alg_sel_e alg;
318     td_u32 state_size;
319     crypto_hal_func_enter();
320 
321     crypto_param_require(g_hal_hash_initialize == TD_TRUE);
322     crypto_param_require(chn_num <= CRYPTO_HASH_HARD_CHN_CNT);
323     crypto_param_require(state != TD_NULL);
324 
325     ret = priv_hal_hash_get_cfg(hash_type, &mode, &alg, &state_size);
326     crypto_chk_return(ret != TD_SUCCESS, ret, "priv_hal_hash_get_cfg failed\n");
327 
328     hash_key_ctrl.u32 = spacc_reg_read(IN_HASH_CHN_KEY_CTRL(chn_num));
329     hash_key_ctrl.bits.hash_chn_alg_mode = mode;
330     hash_key_ctrl.bits.hash_chn_alg_sel = alg;
331     spacc_reg_write(IN_HASH_CHN_KEY_CTRL(chn_num), hash_key_ctrl.u32);
332 
333     /* Write last state */
334     for (i = 0; i < state_size / CRYPTO_WORD_WIDTH; i++) {
335         spacc_reg_write(CHANN_HASH_STATE_VAL_ADDR(chn_num), i);
336         spacc_reg_write(CHANN_HASH_STATE_VAL(chn_num), state[i]);
337     }
338 
339     crypto_hal_func_exit();
340     return TD_SUCCESS;
341 }
342 
hal_cipher_hash_attach(td_u32 chn_num,td_u32 keyslot_chn_num)343 td_s32 hal_cipher_hash_attach(td_u32 chn_num, td_u32 keyslot_chn_num)
344 {
345     in_hash_chn_key_ctrl hash_key_ctrl = { 0 };
346     crypto_hal_func_enter();
347 
348     hash_key_ctrl.u32 = spacc_reg_read(IN_HASH_CHN_KEY_CTRL(chn_num));
349     hash_key_ctrl.bits.hash_key_chn_id = keyslot_chn_num;
350     hash_key_ctrl.bits.hmac_vld = 1;
351     spacc_reg_write(IN_HASH_CHN_KEY_CTRL(chn_num), hash_key_ctrl.u32);
352     crypto_hal_func_exit();
353     return CRYPTO_SUCCESS;
354 }
355 
hal_cipher_hash_add_in_node(td_u32 chn_num,td_phys_addr_t data_phys,td_u32 data_len,in_node_type_e in_node_type)356 td_s32 hal_cipher_hash_add_in_node(td_u32 chn_num, td_phys_addr_t data_phys, td_u32 data_len,
357     in_node_type_e in_node_type)
358 {
359     td_u32 idx;
360     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
361     hash_entry_in *entry_in = TD_NULL;
362     crypto_hal_func_enter();
363 
364     crypto_chk_return(data_phys == 0, HASH_COMPAT_ERRNO(ERROR_INVALID_PHYS_ADDR), "data_phys is invalid\n");
365 
366     hard_ctx = &g_hash_hard_ctx[chn_num];
367 
368     /* clear entry_in. */
369     idx = hard_ctx->idx_in++;
370     hard_ctx->idx_in %= hard_ctx->idx_cnt;
371 
372     entry_in = &hard_ctx->entry_in[idx];
373     (td_void)memset_s(entry_in, sizeof(hash_entry_in), 0, sizeof(hash_entry_in));
374 
375     /* set addr and length. */
376     entry_in->hash_first_node = ((in_node_type & IN_NODE_TYPE_FIRST) == 0) ? 0 : 1;
377     entry_in->hash_last_node = ((in_node_type & IN_NODE_TYPE_LAST) == 0) ? 0 : 1;
378     entry_in->hash_start_addr_l = data_phys;
379     entry_in->hash_start_addr_h = 0;
380     entry_in->hash_alg_length = data_len;
381 
382     if ((in_node_type & IN_NODE_TYPE_LAST) != 0) {
383         hard_ctx->is_last = TD_TRUE;
384     } else {
385         hard_ctx->is_last = TD_FALSE;
386     }
387 
388     crypto_hal_func_exit();
389     return CRYPTO_SUCCESS;
390 }
391 
hal_cipher_hash_start(td_u32 chn_num,td_bool is_wait)392 td_s32 hal_cipher_hash_start(td_u32 chn_num, td_bool is_wait)
393 {
394     td_u32 ptr;
395     in_hash_chn_node_wr_point in_node_wr_ptr;
396     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
397     crypto_hal_func_enter();
398 
399     hard_ctx = &g_hash_hard_ctx[chn_num];
400 
401     if (hard_ctx->wait_func != TD_NULL && is_wait && hard_ctx->wait_func_enable && crypto_wait_func_is_support()) {
402         hard_ctx->done = TD_FALSE;
403         hard_ctx->is_wait = TD_TRUE;
404         inner_hash_irq_enable(chn_num, TD_TRUE);
405     } else {
406         hard_ctx->is_wait = TD_FALSE;
407         inner_hash_irq_enable(chn_num, TD_FALSE);
408     }
409 
410     /* configure in-node, only compute one nodes */
411     in_node_wr_ptr.u32 = spacc_reg_read(IN_HASH_CHN_NODE_WR_POINT(chn_num));
412 
413     ptr = in_node_wr_ptr.bits.hash_chn_node_wr_point + 1;
414     in_node_wr_ptr.bits.hash_chn_node_wr_point = ptr % hard_ctx->idx_cnt;
415 
416     /* make sure all the above explicit memory accesses and instructions are completed
417      * before start the hardware.
418      */
419     crypto_memory_barrier();
420     crypto_cache_flush((uintptr_t)hard_ctx->entry_in, HASH_NODE_SIZE);
421     /* Start */
422     spacc_reg_write(IN_HASH_CHN_NODE_WR_POINT(chn_num), in_node_wr_ptr.u32);
423 
424     crypto_hal_func_exit();
425     return TD_SUCCESS;
426 }
427 
hal_hash_condition(const td_void * param)428 static td_s32 hal_hash_condition(const td_void *param)
429 {
430     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
431     td_u32 chn_num = *(td_u32 *)param;
432 
433     hard_ctx = &g_hash_hard_ctx[chn_num];
434     if (hard_ctx->done == TD_TRUE) {
435         hard_ctx->done = TD_FALSE;
436         return TD_TRUE;
437     } else {
438         return TD_FALSE;
439     }
440 }
441 
442 #define crypto_hash_wait(idx) do {  \
443     if ((idx) <= MS_TO_US) {  \
444         crypto_udelay(1); /* short waitting for 1000 us */ \
445     } else {    \
446         crypto_msleep(1); /* long waitting for 5000 ms */ \
447     } \
448 } while (0)
449 
inner_hash_wait_chain_done(td_u32 chn_num,hal_hash_hard_chn_ctx * hard_ctx)450 static td_s32 inner_hash_wait_chain_done(td_u32 chn_num, hal_hash_hard_chn_ctx *hard_ctx)
451 {
452     td_s32 ret = TD_SUCCESS;
453     td_u32 i;
454 
455     if ((hard_ctx->is_wait == TD_TRUE) && (hard_ctx->wait_func != TD_NULL)) {
456         crypto_log_dbg("hash wait_function mode\r\n");
457         ret = hard_ctx->wait_func(hard_ctx->wait, hal_hash_condition, (td_void *)(&chn_num), hard_ctx->timeout_ms);
458         if (ret <= 0) {
459             crypto_log_err("wait_func Timeout!, ret is 0x%x\n", ret);
460             ret = HASH_COMPAT_ERRNO(ERROR_HASH_CALC_TIMEOUT);
461         }
462     } else {
463         crypto_log_dbg("hash busy delay mode\r\n");
464         for (i = 0; i < CRYPTO_HAL_HASH_CALC_TIMEOUT; i++) {
465             if (hal_cipher_hash_done_try(chn_num) != 0) {
466                 break;
467             }
468             crypto_hash_wait(i);
469         }
470         if (i >= CRYPTO_HAL_HASH_CALC_TIMEOUT) {
471             crypto_log_err("hash wait done timeout, chn=%d\n", chn_num);
472             ret = HASH_COMPAT_ERRNO(ERROR_HASH_CALC_TIMEOUT);
473         }
474     }
475     return ret;
476 }
477 
inner_hash_wait_node_done(td_u32 chn_num)478 static td_s32 inner_hash_wait_node_done(td_u32 chn_num)
479 {
480     td_u32 i;
481     td_u32 unproc_data_len = 0;
482     in_hash_chn_node_wr_point hash_wr_ptr = { 0 };
483     in_hash_chn_node_rd_point hash_rd_ptr = { 0 };
484 
485     for (i = 0; i < CRYPTO_HAL_HASH_CALC_TIMEOUT; i++) {
486         hash_rd_ptr.u32 = spacc_reg_read(IN_HASH_CHN_NODE_RD_POINT(chn_num));
487         hash_wr_ptr.u32 = spacc_reg_read(IN_HASH_CHN_NODE_WR_POINT(chn_num));
488         if (hash_rd_ptr.u32 == hash_wr_ptr.u32) {
489             break;
490         }
491         crypto_hash_wait(i);
492     }
493 
494     if (i >= CRYPTO_HAL_HASH_CALC_TIMEOUT) {
495         return HASH_COMPAT_ERRNO(ERROR_HASH_CALC_TIMEOUT);
496     }
497 
498     // After the value of the read pointer is increased by 1, wait a microsecond to ensure that the
499     // value of IN_HASH_CHN_DATA_LEN is switched from 0 to the actual unprocessed data length.
500     crypto_udelay(1);
501     for (i = 0; i < CRYPTO_HAL_HASH_CALC_TIMEOUT; i++) {
502         unproc_data_len = spacc_reg_read(IN_HASH_CHN_DATA_LEN(chn_num));
503         if (unproc_data_len == 0) {
504             break;
505         }
506         crypto_hash_wait(i);
507     }
508     if (i >= CRYPTO_HAL_HASH_CALC_TIMEOUT) {
509         return HASH_COMPAT_ERRNO(ERROR_HASH_CALC_TIMEOUT);
510     }
511 
512     return CRYPTO_SUCCESS;
513 }
514 
hal_cipher_hash_wait_done(td_u32 chn_num,td_u32 * state,td_u32 state_size)515 td_s32 hal_cipher_hash_wait_done(td_u32 chn_num, td_u32 *state, td_u32 state_size)
516 {
517     td_u32 i;
518     td_s32 ret = TD_SUCCESS;
519     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
520     crypto_hal_func_enter();
521 
522     crypto_param_require((state_size / CRYPTO_WORD_WIDTH) <= CRYPTO_HASH_RESULT_SIZE_MAX_IN_WORD);
523 
524     hard_ctx = &g_hash_hard_ctx[chn_num];
525 
526     if (hard_ctx->is_last) {
527         ret = inner_hash_wait_chain_done(chn_num, hard_ctx);
528     } else {
529         ret = inner_hash_wait_node_done(chn_num);
530     }
531     if (ret == HASH_COMPAT_ERRNO(ERROR_HASH_CALC_TIMEOUT)) {
532         hal_hash_debug();
533         hal_hash_debug_chn(chn_num);
534     } else {
535         ret = CRYPTO_SUCCESS;
536     }
537     inner_hash_irq_enable(chn_num, TD_FALSE);
538 
539     /* read hash result */
540     if (state != TD_NULL) {
541         for (i = 0; i < state_size / CRYPTO_WORD_WIDTH; i++) {
542             spacc_reg_write(CHANN_HASH_STATE_VAL_ADDR(chn_num), i);
543             state[i] = spacc_reg_read(CHANN_HASH_STATE_VAL(chn_num));
544         }
545     }
546 
547     crypto_hal_func_exit();
548     return ret;
549 }
550 
hal_cipher_hash_register_wait_func(td_u32 chn_num,td_void * wait,crypto_wait_timeout_interruptible wait_func,td_u32 timeout_ms)551 td_s32 hal_cipher_hash_register_wait_func(td_u32 chn_num, td_void *wait,
552     crypto_wait_timeout_interruptible wait_func, td_u32 timeout_ms)
553 {
554     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
555     crypto_hal_func_enter();
556 
557     hard_ctx = &g_hash_hard_ctx[chn_num];
558     hard_ctx->wait = wait;
559     hard_ctx->wait_func = wait_func;
560     hard_ctx->timeout_ms = timeout_ms;
561 
562     crypto_hal_func_exit();
563     return TD_SUCCESS;
564 }
565 
hal_cipher_hash_done_notify(td_u32 chn_num)566 td_s32 hal_cipher_hash_done_notify(td_u32 chn_num)
567 {
568     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
569     crypto_hal_func_enter();
570 
571     hard_ctx = &g_hash_hard_ctx[chn_num];
572     hard_ctx->done = TD_TRUE;
573 
574     crypto_hal_func_exit();
575     return TD_SUCCESS;
576 }
577 
hal_cipher_hash_done_try(td_u32 chn_num)578 td_u32 hal_cipher_hash_done_try(td_u32 chn_num)
579 {
580     hash_chann_raw_int hash_fin_int_raw = { 0 };
581 
582     hash_fin_int_raw.u32 = spacc_reg_read(HASH_CHANN_RAW_INT);
583     hash_fin_int_raw.u32 &= (0x01 << chn_num);
584     spacc_reg_write(HASH_CHANN_RAW_INT, hash_fin_int_raw.u32);
585 
586     return hash_fin_int_raw.u32;
587 }
588 
hal_hash_debug(td_void)589 td_void hal_hash_debug(td_void)
590 {
591     td_u32 i;
592     td_u32 chnn_who_used;
593     td_u32 used;
594     crypto_hal_func_enter();
595 
596     used = spacc_reg_read(SPACC_HASH_CHN_LOCK);
597 
598     crypto_print("The Status of Hash Hard Channel:\n");
599     for (i = 0; i < CRYPTO_HASH_HARD_CHN_CNT; i++) {
600         chnn_who_used = CHN_WHO_USED_GET(used, i);
601         if (chnn_who_used == SPACC_CPU_REE) {
602             crypto_print("CHN %d is locked by REE CPU\n", i);
603         } else if (chnn_who_used == SPACC_CPU_TEE) {
604             crypto_print("CHN %d is locked by TEE CPU\n", i);
605         } else {
606             crypto_print("CHN %d is idle\n", i);
607         }
608     }
609     crypto_print("HASH_CHANN_RAW_INT is 0x%x\n", spacc_reg_read(HASH_CHANN_RAW_INT));
610     crypto_print("HASH_CHANN_RAW_INT_EN is 0x%x\n", spacc_reg_read(HASH_CHANN_RAW_INT_EN));
611 
612     crypto_print("TEE_HASH_CALC_CTRL_CHECK_ERR is 0x%x\n", spacc_reg_read(TEE_HASH_CALC_CTRL_CHECK_ERR));
613     crypto_print("TEE_HASH_CALC_CTRL_CHECK_ERR_STATUS is 0x%x\n", spacc_reg_read(TEE_HASH_CALC_CTRL_CHECK_ERR_STATUS));
614 
615     crypto_print("REE_HASH_CALC_CTRL_CHECK_ERR is 0x%x\n", spacc_reg_read(REE_HASH_CALC_CTRL_CHECK_ERR));
616     crypto_print("REE_HASH_CALC_CTRL_CHECK_ERR_STATUS is 0x%x\n", spacc_reg_read(REE_HASH_CALC_CTRL_CHECK_ERR_STATUS));
617 
618     crypto_hal_func_exit();
619 }
620 
hal_hash_debug_chn(td_u32 chn_num)621 td_void hal_hash_debug_chn(td_u32 chn_num)
622 {
623     td_u32 i;
624     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
625     hash_entry_in *entry_in = TD_NULL;
626     crypto_hal_func_enter();
627 
628     crypto_unused(entry_in);
629     if (chn_num >= CRYPTO_HASH_HARD_CHN_CNT) {
630         crypto_log_err("Invalid chn_num!\n");
631         return;
632     }
633 
634     hard_ctx = &g_hash_hard_ctx[chn_num];
635 
636     crypto_print("The Status of Hash Hard Channel %d:\n", chn_num);
637     crypto_print("IN_HASH_CHN_CTRL is 0x%x\n", spacc_reg_read(IN_HASH_CHN_CTRL(chn_num)));
638     crypto_print("IN_HASH_CHN_KEY_CTRL is 0x%x\n", spacc_reg_read(IN_HASH_CHN_KEY_CTRL(chn_num)));
639     crypto_print("IN_HASH_CHN_NODE_LENGTH: 0x%x\n", spacc_reg_read(IN_HASH_CHN_NODE_LENGTH(chn_num)));
640     crypto_print("IN_HASH_CHN_NODE_START_ADDR_L: 0x%x\n", spacc_reg_read(IN_HASH_CHN_NODE_START_ADDR_L(chn_num)));
641     crypto_print("IN_HASH_CHN_NODE_START_ADDR_H: 0x%x\n", spacc_reg_read(IN_HASH_CHN_NODE_START_ADDR_H(chn_num)));
642 
643     crypto_print("IN_HASH_CHN_NODE_WR_POINT: 0x%x\n", spacc_reg_read(IN_HASH_CHN_NODE_WR_POINT(chn_num)));
644     crypto_print("IN_HASH_CHN_NODE_RD_POINT: 0x%x\n", spacc_reg_read(IN_HASH_CHN_NODE_RD_POINT(chn_num)));
645 
646     crypto_print("entry_in's virtual addr is %p, physical addr is 0x%lx\n", hard_ctx->entry_in,
647         (td_ulong)crypto_get_phys_addr(hard_ctx->entry_in));
648     crypto_print("Total Entry Count is %u, Current Entry IDX is %u\n", hard_ctx->idx_cnt, hard_ctx->idx_in);
649     if (hard_ctx->wait_func != TD_NULL) {
650         crypto_print("Register Wait Func, timeout is %u ms, done flag is %s\n", hard_ctx->timeout_ms, hard_ctx->done ?
651             "TD_TRUE" : "TD_FALSE");
652     }
653 
654     /* Print Entry_In. */
655     for (i = 0; i < HASH_MAX_DEPTH; i++) {
656         crypto_print("Entry_IN[%u]:\n", i);
657         entry_in = &hard_ctx->entry_in[i];
658         crypto_print("hash_first_node is 0x%x\n", entry_in->hash_first_node);
659         crypto_print("hash_last_node is 0x%x\n", entry_in->hash_last_node);
660         crypto_print("hash_alg_length is 0x%x\n", entry_in->hash_alg_length);
661         crypto_print("hash_start_addr_l is 0x%x\n", entry_in->hash_start_addr_l);
662         crypto_print("hash_start_addr_h is 0x%x\n", entry_in->hash_start_addr_h);
663     }
664 
665     /* Print last state */
666     for (i = 0; i < CRYPTO_HASH_RESULT_SIZE_MAX_IN_WORD; i++) {
667         spacc_reg_write(CHANN_HASH_STATE_VAL_ADDR(chn_num), i);
668         crypto_print("CHANn_HASH_STATE_VAL[%d] is 0x%x\n", i, spacc_reg_read(CHANN_HASH_STATE_VAL(chn_num)));
669     }
670     crypto_hal_func_exit();
671 }
672 
hal_cipher_hash_wait_func_config(td_u32 chn_num,td_bool is_enable)673 td_s32 hal_cipher_hash_wait_func_config(td_u32 chn_num, td_bool is_enable)
674 {
675     hal_hash_hard_chn_ctx *hard_ctx = TD_NULL;
676     crypto_hal_func_enter();
677 
678     hard_ctx = &g_hash_hard_ctx[chn_num];
679     hard_ctx->wait_func_enable = is_enable;
680 
681     crypto_hal_func_exit();
682     return TD_SUCCESS;
683 }