1 /*
2 * Copyright (C) 2021 HiSilicon (Shanghai) Technologies CO., LIMITED.
3 *
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 */
18
19 #include "drv_osal_lib.h"
20 #include "cryp_symc.h"
21 #include "securec.h"
22
23 #ifdef SOFT_SM4_SUPPORT
24
25 /* *********************** Internal Structure Definition ********************* */
26 #define SM4_BLOCK_SIZE 16
27
28 static const hi_u32 g_fk[SM4_BLOCK_SIZE / WORD_WIDTH] = {
29 0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc
30 };
31
32 static const hi_u32 g_ck[] = {
33 0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269,
34 0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9,
35 0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249,
36 0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9,
37 0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229,
38 0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299,
39 0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209,
40 0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279
41 };
42
43 static const hi_u32 g_sbox[16][16] = { /* Two-dimensional array 16, 16. */
44 {0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05},
45 {0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99},
46 {0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, 0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62},
47 {0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, 0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6},
48 {0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, 0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8},
49 {0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, 0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35},
50 {0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, 0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87},
51 {0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52, 0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e},
52 {0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5, 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1},
53 {0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55, 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3},
54 {0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60, 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f},
55 {0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f, 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51},
56 {0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f, 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8},
57 {0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0},
58 {0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84},
59 {0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48}
60 };
61
62 #define getu32(pt) (((hi_u32)(pt)[0] << 24) ^ ((hi_u32)(pt)[1] << 16) ^ ((hi_u32)(pt)[2] << 8) ^ ((hi_u32)(pt)[3]))
63 #define putu32(ct, st) \
64 do { \
65 (ct)[0] = (hi_u8)((st) >> 24); \
66 (ct)[1] = (hi_u8)((st) >> 16); \
67 (ct)[2] = (hi_u8)((st) >> 8); \
68 (ct)[3] = (hi_u8)(st); \
69 } while (0)
70
71 #define KEY_EXT 0
72 #define CIPHER 1
73 #define SM4_RD_KEY_LEN 32
74 #define SM4_RD_KEY_BUF_LEN 36
75 typedef struct {
76 hi_u32 rd_key[SM4_RD_KEY_BUF_LEN];
77 } sm4_key;
78
79 typedef union {
80 hi_u32 i; /* descript: i = {c[3], c[2], c[1], c[0]} */
81 hi_u8 c[WORD_WIDTH];
82 } is4;
83
84 typedef struct {
85 hi_u8 key[SM4_KEY_SIZE]; /* sm4 even round keys, default */
86 hi_u32 klen; /* symc key length */
87 hi_u8 iv[AES_IV_SIZE];
88 symc_mode mode;
89 } ext_sm4_context;
90
91 /* linearity replace: left shift left, right shift right, max for residual. */
92 #define linearity_replace(tmp, left, right, max) (((tmp) << (left)) + (((tmp) >> (right)) & (max)))
93
94 /* Sbox */
mix_r(const hi_u32 data_in,const hi_s32 type)95 static hi_u32 mix_r(const hi_u32 data_in, const hi_s32 type)
96 {
97 is4 temp;
98 hi_u32 rep_rsl;
99 hi_u8 sbox_tmp[WORD_WIDTH] = {0};
100 hi_u8 sbox_c, sbox_r;
101 hi_s32 j;
102
103 for (j = 0; j < WORD_WIDTH; j++) {
104 sbox_tmp[j] = 0;
105 sbox_r = ((data_in << (j * BYTE_BITS)) >> 28); /* soft sm4 alg: right shift 28 bits. */
106 sbox_c = ((data_in << (j * BYTE_BITS + BYTE_4BIT)) >> 28); /* soft sm4 alg: right shift 28 bits. */
107 sbox_tmp[j] = g_sbox[sbox_r][sbox_c];
108 }
109 temp.c[WORD_IDX_3] = sbox_tmp[WORD_IDX_0];
110 temp.c[WORD_IDX_2] = sbox_tmp[WORD_IDX_1];
111 temp.c[WORD_IDX_1] = sbox_tmp[WORD_IDX_2];
112 temp.c[WORD_IDX_0] = sbox_tmp[WORD_IDX_3];
113
114 /* linearity replace */
115 if (type == KEY_EXT) {
116 rep_rsl = temp.i ^ \
117 linearity_replace(temp.i, 13, 19, 0x00001fff) ^ \ /* linearity replace: left shift 13, 19, 0x00001fff. */
118 linearity_replace(temp.i, 23, 9, 0x007fffff); /* linearity replace: left shift 23, 9, 0x007fffff. */
119 } else {
120 rep_rsl = temp.i ^ \
121 linearity_replace(temp.i, 2, 30, 0x00000003) ^ \ /* linearity replace: left shift 2, 30, 0x00000003. */
122 linearity_replace(temp.i, 10, 22, 0x000003ff) ^ \ /* linearity replace: left shift 10, 22, 0x000003ff. */
123 linearity_replace(temp.i, 18, 14, 0x0003ffff) ^ \ /* linearity replace: left shift 18, 14, 0x0003ffff. */
124 linearity_replace(temp.i, 24, 8, 0x00ffffff); /* linearity replace: left shift 24, 8, 0x00ffffff. */
125 }
126
127 return rep_rsl;
128 }
129
130 /* Set key */
sm4_set_encrypt_key(const hi_u8 * user_key,const hi_s32 bits,sm4_key * key)131 static hi_s32 sm4_set_encrypt_key(const hi_u8 *user_key, const hi_s32 bits, sm4_key *key)
132 {
133 hi_s32 i;
134 hi_u32 k_temp[SM4_KEY_SIZE / WORD_WIDTH] = {0};
135 hi_u32 sm4_key[SM4_RD_KEY_BUF_LEN] = {0};
136 hi_u32 temp;
137
138 if (user_key == HI_NULL || key == HI_NULL) {
139 return -1;
140 }
141
142 k_temp[WORD_IDX_0] = getu32(user_key);
143 k_temp[WORD_IDX_1] = getu32(user_key + WORD_IDX_1 * WORD_WIDTH);
144 k_temp[WORD_IDX_2] = getu32(user_key + WORD_IDX_2 * WORD_WIDTH);
145 k_temp[WORD_IDX_3] = getu32(user_key + WORD_IDX_3 * WORD_WIDTH);
146
147 for (i = 0; i < SM4_KEY_SIZE / WORD_WIDTH; i++) {
148 sm4_key[i] = k_temp[i] ^ g_fk[i];
149 }
150 i = 0;
151 for (;;) {
152 temp = sm4_key[i + WORD_IDX_1] ^ sm4_key[i + WORD_IDX_2] ^ sm4_key[i + WORD_IDX_3] ^ g_ck[i];
153 sm4_key[i + WORD_IDX_4] = sm4_key[i] ^ mix_r(temp, KEY_EXT);
154 key->rd_key[i] = sm4_key[i + WORD_IDX_4];
155
156 if (++i == SM4_RD_KEY_LEN) {
157 (hi_void)memset_s(sm4_key, sizeof(sm4_key), 0, SM4_RD_KEY_BUF_LEN);
158 return 0;
159 }
160 }
161 (hi_void)memset_s(sm4_key, sizeof(sm4_key), 0, SM4_RD_KEY_BUF_LEN);
162 return 0;
163 }
164
165 /* SM4 Encrypt */
sm4_encrypt(const hi_u8 * in,hi_u8 * out,const sm4_key * key,hi_u32 len)166 static hi_void sm4_encrypt(const hi_u8 *in, hi_u8 *out, const sm4_key *key, hi_u32 len)
167 {
168 hi_u32 s[SM4_RD_KEY_BUF_LEN] = {0};
169 hi_u32 temp;
170 hi_s32 i;
171
172 crypto_unused(len);
173
174 s[WORD_IDX_0] = getu32(in + WORD_IDX_0 * WORD_WIDTH);
175 s[WORD_IDX_1] = getu32(in + WORD_IDX_1 * WORD_WIDTH);
176 s[WORD_IDX_2] = getu32(in + WORD_IDX_2 * WORD_WIDTH);
177 s[WORD_IDX_3] = getu32(in + WORD_IDX_3 * WORD_WIDTH);
178
179 for (i = 0; i < SM4_RD_KEY_LEN; i++) {
180 temp = s[i + WORD_IDX_1] ^ s[i + WORD_IDX_2] ^ s[i + WORD_IDX_3] ^ key->rd_key[i];
181 s[i + WORD_IDX_4] = s[i] ^ mix_r(temp, CIPHER);
182 }
183 putu32(out + WORD_IDX_0 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_3]);
184 putu32(out + WORD_IDX_1 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_2]);
185 putu32(out + WORD_IDX_2 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_1]);
186 putu32(out + WORD_IDX_3 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_0]);
187 (hi_void)memset_s(s, sizeof(s), 0, SM4_RD_KEY_BUF_LEN);
188 return;
189 }
190
191 /* SM4 Decrypt */
sm4_decrypt(const hi_u8 * in,hi_u8 * out,const sm4_key * key,hi_u32 len)192 static hi_void sm4_decrypt(const hi_u8 *in, hi_u8 *out, const sm4_key *key, hi_u32 len)
193 {
194 hi_u32 s[SM4_RD_KEY_BUF_LEN] = {0};
195 hi_u32 temp;
196 hi_s32 i;
197
198 crypto_unused(len);
199
200 s[WORD_IDX_0] = getu32(in + WORD_IDX_0 * WORD_WIDTH);
201 s[WORD_IDX_1] = getu32(in + WORD_IDX_1 * WORD_WIDTH);
202 s[WORD_IDX_2] = getu32(in + WORD_IDX_2 * WORD_WIDTH);
203 s[WORD_IDX_3] = getu32(in + WORD_IDX_3 * WORD_WIDTH);
204
205 for (i = 0; i < SM4_RD_KEY_LEN; i++) {
206 temp = s[i + WORD_IDX_1] ^ s[i + WORD_IDX_2] ^ s[i + WORD_IDX_3] ^ \
207 key->rd_key[SM4_RD_KEY_LEN - BOUND_VAL_1 - i];
208 s[i + WORD_IDX_4] = s[i] ^ mix_r(temp, CIPHER);
209 }
210
211 putu32(out + WORD_IDX_0 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_3]);
212 putu32(out + WORD_IDX_1 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_2]);
213 putu32(out + WORD_IDX_2 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_1]);
214 putu32(out + WORD_IDX_3 * WORD_WIDTH, s[SM4_RD_KEY_LEN + WORD_IDX_0]);
215 (hi_void)memset_s(s, sizeof(s), 0, SM4_RD_KEY_BUF_LEN);
216 return;
217 }
218
219 /* SM4 ECB Crypt */
sm4_ecb_crypt(const hi_u8 * in,hi_u8 * out,hi_u32 len,const sm4_key * key,const hi_s32 dec)220 static hi_void sm4_ecb_crypt(const hi_u8 *in, hi_u8 *out, hi_u32 len, const sm4_key *key, const hi_s32 dec)
221 {
222 if (dec == SYMC_OPERATION_ENCRYPT) {
223 sm4_encrypt(in, out, key, len);
224 } else {
225 sm4_decrypt(in, out, key, len);
226 }
227 return;
228 }
229
230 /* SM4 CBC Crypt */
sm4_cbc_crypt(const hi_u8 * in,hi_u8 * out,const hi_u32 length,const sm4_key * key,hi_u8 * ivec,const hi_s32 dec)231 static hi_void sm4_cbc_crypt(const hi_u8 *in, hi_u8 *out, const hi_u32 length,
232 const sm4_key *key, hi_u8 *ivec, const hi_s32 dec)
233 {
234 hi_u32 n;
235 hi_u32 len = length;
236 hi_u8 tmp[SM4_BLOCK_SIZE] = {0};
237
238 if (dec == SYMC_OPERATION_ENCRYPT) {
239 while (len >= SM4_BLOCK_SIZE) {
240 for (n = 0; n < SM4_BLOCK_SIZE; ++n) {
241 tmp[n] = in[n] ^ ivec[n];
242 }
243
244 sm4_encrypt(tmp, out, key, SM4_BLOCK_SIZE);
245
246 if (memcpy_s((hi_void *)ivec, AES_IV_SIZE, out, SM4_BLOCK_SIZE) != EOK) {
247 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
248 return;
249 }
250 len -= SM4_BLOCK_SIZE;
251 in += SM4_BLOCK_SIZE;
252 out += SM4_BLOCK_SIZE;
253 }
254 } else {
255 while (len >= SM4_BLOCK_SIZE) {
256 if (memcpy_s(tmp, sizeof(tmp), in, length) != EOK) {
257 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
258 return;
259 }
260 sm4_decrypt(tmp, out, key, SM4_BLOCK_SIZE);
261 for (n = 0; n < SM4_BLOCK_SIZE; ++n) {
262 out[n] ^= ivec[n];
263 }
264 if (memcpy_s((hi_void *)ivec, SM4_BLOCK_SIZE, tmp, sizeof(tmp)) != EOK) {
265 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
266 return;
267 }
268 len -= SM4_BLOCK_SIZE;
269 in += SM4_BLOCK_SIZE;
270 out += SM4_BLOCK_SIZE;
271 }
272 }
273
274 return;
275 }
276
277 /* increment counter (128bit hi_s32) by 2^64 */
sm4_ctr128_inc(hi_u8 * counter)278 static hi_void sm4_ctr128_inc(hi_u8 *counter)
279 {
280 hi_s32 i;
281
282 for (i = SM4_BLOCK_SIZE - BOUND_VAL_1; i >= 0; i--) {
283 counter[i]++;
284 if (counter[i] != 0) {
285 return;
286 }
287 }
288 return;
289 }
290
291 /* SM4 CTR Crypt, CTR mode is big-endian. The rest of SM4 code is endian-neutral */
sm4_ctr128_crypt(const hi_u8 * in,hi_u8 * out,hi_s32 length,const sm4_key * key,hi_u8 counter[SM4_BLOCK_SIZE],hi_u8 ecount_buf[SM4_BLOCK_SIZE],hi_s32 * num)292 static hi_void sm4_ctr128_crypt(const hi_u8 *in, hi_u8 *out, hi_s32 length,
293 const sm4_key *key, hi_u8 counter[SM4_BLOCK_SIZE], hi_u8 ecount_buf[SM4_BLOCK_SIZE], hi_s32 *num)
294 {
295 hi_s32 n, l;
296
297 l = length;
298 n = *num;
299 while (l--) {
300 if (n == 0) {
301 sm4_encrypt(counter, ecount_buf, key, SM4_BLOCK_SIZE);
302 sm4_ctr128_inc(counter);
303 }
304 *(out++) = *(in++) ^ ecount_buf[n];
305 n = (n + 1) % SM4_BLOCK_SIZE;
306 }
307 *num = n;
308
309 return;
310 }
311
312 /* SM4 ECB RM */
sm4_ecb_rm(const hi_u8 * data_in,hi_u8 * data_out,hi_s32 data_len,const hi_u8 * key,hi_s32 bit,hi_u32 decrypt)313 static hi_u32 sm4_ecb_rm(const hi_u8 *data_in,
314 hi_u8 *data_out, hi_s32 data_len, const hi_u8 *key, hi_s32 bit, hi_u32 decrypt)
315 {
316 sm4_key ctx;
317
318 sm4_set_encrypt_key(key, bit, &ctx);
319
320 while (data_len >= SM4_BLOCK_SIZE) {
321 sm4_ecb_crypt(data_in, data_out, SM4_BLOCK_SIZE, &ctx, decrypt);
322 data_len = data_len - SM4_BLOCK_SIZE;
323 data_in = data_in + SM4_BLOCK_SIZE;
324 data_out = data_out + SM4_BLOCK_SIZE;
325 }
326
327 return data_len;
328 }
329
330 /* SM4 CBC RM */
sm4_cbc_rm(const hi_u8 * data_in,hi_u8 * data_out,hi_s32 data_len,const hi_u8 * key,hi_s32 bit,hi_u32 decrypt,hi_u8 * iv)331 static hi_u32 sm4_cbc_rm(const hi_u8 *data_in, hi_u8 *data_out, hi_s32 data_len,
332 const hi_u8 *key, hi_s32 bit, hi_u32 decrypt, hi_u8 *iv)
333 {
334 hi_s32 left_len;
335 hi_s32 valid_data_len;
336 sm4_key ctx;
337
338 sm4_set_encrypt_key(key, bit, &ctx);
339
340 left_len = data_len % SM4_BLOCK_SIZE;
341 valid_data_len = data_len - left_len;
342 sm4_cbc_crypt(data_in, data_out, valid_data_len, &ctx, iv, decrypt);
343
344 return left_len;
345 }
346
347 /* SM4 CTR RM */
sm4_ctr_rm(const hi_u8 * data_in,hi_u8 * data_out,hi_s32 data_len,const hi_u8 * key,hi_s32 bit,hi_u32 decrypt,const hi_u8 * iv)348 static hi_u32 sm4_ctr_rm(const hi_u8 *data_in, hi_u8 *data_out, hi_s32 data_len,
349 const hi_u8 *key, hi_s32 bit, hi_u32 decrypt, const hi_u8 *iv)
350 {
351 hi_s32 num = 0;
352 hi_u32 valid_data_len;
353 hi_u8 encrypt_cnt[SM4_BLOCK_SIZE] = {0};
354 sm4_key ctx;
355
356 /* The SM4_ctr128_crypt request:
357 * The extra state information to record how much of the
358 * 128bit block we have used is contained in *num, and the
359 * encrypted counter is kept in ecount_buf. Both *num and
360 * ecount_buf must be initialized with zeros before the first
361 * called to SM4_ctr128_crypt().
362 */
363 valid_data_len = data_len;
364
365 sm4_set_encrypt_key(key, bit, &ctx);
366 sm4_ctr128_crypt(data_in, data_out, valid_data_len, &ctx, (hi_u8 *)iv, encrypt_cnt, &num);
367
368 return 0;
369 }
370
ext_sm4_create(hi_u32 hard_chn)371 hi_void *ext_sm4_create(hi_u32 hard_chn)
372 {
373 ext_sm4_context *ctx = HI_NULL;
374
375 hi_log_func_enter();
376
377 ctx = crypto_malloc(sizeof(ext_sm4_context));
378 if (ctx == HI_NULL) {
379 hi_log_print_err_code(HI_ERR_CIPHER_FAILED_MEM);
380 return HI_NULL;
381 }
382 (hi_void)memset_s(ctx, sizeof(ext_sm4_context), 0, sizeof(ext_sm4_context));
383
384 hi_log_func_exit();
385
386 return ctx;
387 }
388
ext_sm4_destory(hi_void * ctx)389 hi_s32 ext_sm4_destory(hi_void *ctx)
390 {
391 hi_log_func_enter();
392
393 if (ctx != HI_NULL) {
394 crypto_free(ctx);
395 ctx = HI_NULL;
396 }
397
398 hi_log_func_exit();
399 return HI_SUCCESS;
400 }
401
ext_sm4_setmode(hi_void * ctx,symc_alg alg,symc_mode mode,symc_width width)402 hi_void ext_sm4_setmode(hi_void *ctx, symc_alg alg, symc_mode mode, symc_width width)
403 {
404 ext_sm4_context *symc = ctx;
405
406 hi_log_func_enter();
407
408 if (symc == HI_NULL) {
409 hi_log_error("ctx is null\n");
410 return;
411 }
412
413 if (width != SYMC_DAT_WIDTH_128) {
414 hi_log_error("Invalid width: 0x%x\n", width);
415 return;
416 }
417
418 switch (mode) {
419 case SYMC_MODE_ECB:
420 symc->mode = SYMC_MODE_ECB;
421 break;
422 case SYMC_MODE_CBC:
423 symc->mode = SYMC_MODE_CBC;
424 break;
425 case SYMC_MODE_CTR:
426 symc->mode = SYMC_MODE_CTR;
427 break;
428 default:
429 hi_log_error("unsupported mode %d\n", mode);
430 return;
431 }
432
433 hi_log_func_exit();
434 return;
435 }
436
ext_sm4_setiv(hi_void * ctx,const hi_u8 * iv,hi_u32 ivlen,hi_u32 usage)437 hi_s32 ext_sm4_setiv(hi_void *ctx, const hi_u8 *iv, hi_u32 ivlen, hi_u32 usage)
438 {
439 ext_sm4_context *symc = ctx;
440
441 hi_log_func_enter();
442
443 hi_log_chk_param_return(symc == HI_NULL);
444 hi_log_chk_param_return(iv == HI_NULL);
445 hi_log_chk_param_return(ivlen != HI_NULL);
446
447 if (memcpy_s(symc->iv, AES_IV_SIZE, iv, ivlen) != EOK) {
448 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
449 return HI_ERR_CIPHER_MEMCPY_S_FAILED;
450 }
451
452 hi_log_func_exit();
453 return HI_SUCCESS;
454 }
455
ext_sm4_getiv(hi_void * ctx,hi_u8 * iv,hi_u32 * ivlen)456 hi_void ext_sm4_getiv(hi_void *ctx, hi_u8 *iv, hi_u32 *ivlen)
457 {
458 ext_sm4_context *symc = ctx;
459
460 hi_log_func_enter();
461
462 if ((symc == HI_NULL) || (iv == HI_NULL) || (ivlen == HI_NULL)) {
463 return;
464 }
465
466 if (memcpy_s(iv, AES_IV_SIZE, symc->iv, sizeof(symc->iv)) != EOK) {
467 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
468 return;
469 }
470 *ivlen = AES_IV_SIZE;
471
472 hi_log_func_exit();
473
474 return;
475 }
476
ext_sm4_setkey(hi_void * ctx,const hi_u8 * fkey,const hi_u8 * skey,hi_u32 * hisi_klen)477 hi_s32 ext_sm4_setkey(hi_void *ctx, const hi_u8 *fkey, const hi_u8 *skey, hi_u32 *hisi_klen)
478 {
479 hi_u32 klen = 0;
480 ext_sm4_context *symc = ctx;
481
482 hi_log_func_enter();
483
484 hi_log_chk_param_return(symc == HI_NULL);
485 hi_log_chk_param_return(fkey == HI_NULL);
486 hi_log_chk_param_return(hisi_klen == HI_NULL);
487
488 switch (*hisi_klen) {
489 case HI_CIPHER_KEY_AES_128BIT:
490 klen = AES_KEY_128BIT;
491 break;
492 default:
493 hi_log_error("Invalid aes key len: 0x%u\n", *hisi_klen);
494 hi_log_print_err_code(HI_ERR_CIPHER_INVALID_PARAM);
495 return HI_ERR_CIPHER_INVALID_PARAM;
496 }
497 hi_log_info("key len %u, type %u\n", klen, *hisi_klen);
498
499 if (memcpy_s(symc->key, sizeof(symc->key), fkey, klen) != EOK) {
500 hi_log_print_func_err(memcpy_s, HI_ERR_CIPHER_MEMCPY_S_FAILED);
501 return HI_ERR_CIPHER_MEMCPY_S_FAILED;
502 }
503 symc->klen = klen;
504 *hisi_klen = klen;
505
506 hi_log_func_exit();
507 return HI_SUCCESS;
508 }
509
ext_sm4_rm(ext_sm4_context * symc,crypto_mem * mem_in,crypto_mem * mem_out,hi_u32 len,hi_u32 operation)510 static hi_s32 ext_sm4_rm(ext_sm4_context *symc, crypto_mem *mem_in, crypto_mem *mem_out, hi_u32 len, hi_u32 operation)
511 {
512 switch (symc->mode) {
513 case SYMC_MODE_ECB: {
514 sm4_ecb_rm(crypto_mem_virt(mem_in), crypto_mem_virt(mem_out),
515 len, symc->key, symc->klen, operation);
516 break;
517 }
518 case SYMC_MODE_CBC: {
519 sm4_cbc_rm(crypto_mem_virt(mem_in), crypto_mem_virt(mem_out),
520 len, symc->key, symc->klen, operation, symc->iv);
521 break;
522 }
523 case SYMC_MODE_CTR: {
524 sm4_ctr_rm(crypto_mem_virt(mem_in), crypto_mem_virt(mem_out),
525 len, symc->key, symc->klen, operation, symc->iv);
526 break;
527 }
528 default: {
529 HI_PRINT("Err, Invalid mode 0x%x\n", symc->mode);
530 hi_log_print_err_code(HI_ERR_CIPHER_INVALID_PARAM);
531 return HI_ERR_CIPHER_INVALID_PARAM;
532 }
533 }
534
535 return HI_SUCCESS;
536 }
537
ext_sm4_crypto(hi_void * ctx,hi_u32 operation,symc_multi_pack * pack,hi_u32 last)538 hi_s32 ext_sm4_crypto(hi_void *ctx, hi_u32 operation, symc_multi_pack *pack, hi_u32 last)
539 {
540 ext_sm4_context *symc = ctx;
541 crypto_mem mem_in, mem_out;
542 hi_s32 ret;
543
544 hi_log_func_enter();
545
546 hi_log_chk_param_return(symc == HI_NULL);
547 hi_log_chk_param_return(pack == HI_NULL);
548 hi_log_chk_param_return(pack->len == HI_NULL);
549 hi_log_chk_param_return(pack->in == HI_NULL);
550 hi_log_chk_param_return(pack->out == HI_NULL);
551 hi_log_chk_param_return(pack->usage == HI_NULL);
552 hi_log_chk_param_return(pack->num != 0x01);
553
554 (hi_void)memset_s(&mem_in, sizeof(mem_in), 0, sizeof(mem_in));
555 (hi_void)memset_s(&mem_out, sizeof(mem_out), 0, sizeof(mem_out));
556
557 ret = crypto_mem_open(&mem_in, pack->in[0], pack->len[0]);
558 if (ret != HI_SUCCESS) {
559 hi_log_print_func_err(crypto_mem_open, ret);
560 return ret;
561 }
562
563 ret = crypto_mem_open(&mem_out, pack->out[0], pack->len[0]);
564 if (ret != HI_SUCCESS) {
565 hi_log_print_func_err(crypto_mem_open, ret);
566 crypto_mem_close(&mem_in);
567 return ret;
568 }
569
570 ret = ext_sm4_rm(symc, &mem_in, &mem_out, pack->len[0], operation);
571 if (ret != HI_SUCCESS) {
572 hi_log_print_func_err(ext_sm4_rm, ret);
573 crypto_mem_close(&mem_out);
574 crypto_mem_close(&mem_in);
575 return ret;
576 }
577
578 crypto_mem_close(&mem_out);
579 crypto_mem_close(&mem_in);
580 hi_log_func_exit();
581 return HI_SUCCESS;
582 }
583
584 #endif
585