1 /*
2 * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include <stddef.h>
16 #include <hvb_sysdeps.h>
17 #include "hvb_crypto.h"
18 #include "hvb_hash_sha256.h"
19
20 #ifndef htobe32
21 #define htobe32(value) \
22 ((((value)&0x000000FF) << 24) | (((value)&0x0000FF00) << 8) | (((value)&0x00FF0000) >> 8) | \
23 (((value)&0xFF000000) >> 24))
24 #endif
25
26 #define word2byte(w) ((w) * sizeof(uint32_t))
27 #define PAD_BLK_WORD_SIZE_SHA256 (BLK_WORD_SIZE_SHA256 * 2)
28 #define PAD_BLK_BYTE_SIZE_SHA256 WOR2BYTE(PAD_BLK_WORD_SIZE_SHA256)
29 #define PAD_INFO_BYTE_LEN_SHA256 8
30
31 #define shr(x, n) (((uint32_t)(x)) >> (n))
32 #define rotr(x, n) (shr(x, n) | (((uint32_t)(x)) << (32 - (n))))
33
34 #define sigma_0(x) (rotr(x, 2) ^ rotr(x, 13) ^ rotr(x, 22))
35 #define sigma_1(x) (rotr(x, 6) ^ rotr(x, 11) ^ rotr(x, 25))
36 #define sigma_2(x) (rotr(x, 7) ^ rotr(x, 18) ^ shr(x, 3))
37 #define sigma_3(x) (rotr(x, 17) ^ rotr(x, 19) ^ shr(x, 10))
38
39 #define maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
40 #define ch(x, y, z) (((x) & (y)) ^ ((~(x)) & (z)))
41
42 static const uint32_t const_key[] = {
43 0x428A2F98,
44 0x71374491,
45 0xB5C0FBCF,
46 0xE9B5DBA5,
47 0x3956C25B,
48 0x59F111F1,
49 0x923F82A4,
50 0xAB1C5ED5,
51 0xD807AA98,
52 0x12835B01,
53 0x243185BE,
54 0x550C7DC3,
55 0x72BE5D74,
56 0x80DEB1FE,
57 0x9BDC06A7,
58 0xC19BF174,
59 0xE49B69C1,
60 0xEFBE4786,
61 0x0FC19DC6,
62 0x240CA1CC,
63 0x2DE92C6F,
64 0x4A7484AA,
65 0x5CB0A9DC,
66 0x76F988DA,
67 0x983E5152,
68 0xA831C66D,
69 0xB00327C8,
70 0xBF597FC7,
71 0xC6E00BF3,
72 0xD5A79147,
73 0x06CA6351,
74 0x14292967,
75 0x27B70A85,
76 0x2E1B2138,
77 0x4D2C6DFC,
78 0x53380D13,
79 0x650A7354,
80 0x766A0ABB,
81 0x81C2C92E,
82 0x92722C85,
83 0xA2BFE8A1,
84 0xA81A664B,
85 0xC24B8B70,
86 0xC76C51A3,
87 0xD192E819,
88 0xD6990624,
89 0xF40E3585,
90 0x106AA070,
91 0x19A4C116,
92 0x1E376C08,
93 0x2748774C,
94 0x34B0BCB5,
95 0x391C0CB3,
96 0x4ED8AA4A,
97 0x5B9CCA4F,
98 0x682E6FF3,
99 0x748F82EE,
100 0x78A5636F,
101 0x84C87814,
102 0x8CC70208,
103 0x90BEFFFA,
104 0xA4506CEB,
105 0xBEF9A3F7,
106 0xC67178F2,
107 };
108
109
110 static uint32_t sha256_iv_init[IV_WORD_SIZE_SHA256] = {
111 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 };
112
bigend_read_word(const uint8_t * data)113 static inline uint32_t bigend_read_word(const uint8_t *data)
114 {
115 uint32_t res;
116
117 res = data[0];
118 res = (res << 8) | data[1];
119 res = (res << 8) | data[2];
120 res = (res << 8) | data[3];
121
122 return res;
123 }
124
w_schedule(uint32_t w[64],uint32_t t)125 static inline uint32_t w_schedule(uint32_t w[64], uint32_t t)
126 {
127 return sigma_3(w[t - 2]) + w[t - 7] + sigma_2(w[t - 15]) + w[t - 16];
128 }
129
rotate_regs(uint32_t regs[8])130 static inline void rotate_regs(uint32_t regs[8])
131 {
132 uint32_t backup;
133 backup = regs[6];
134 regs[6] = regs[5];
135 regs[5] = regs[4];
136 regs[4] = regs[3];
137 regs[3] = regs[2];
138 regs[2] = regs[1];
139 regs[1] = regs[0];
140 regs[0] = regs[7];
141 regs[7] = backup;
142 }
143
sha256_block_calc(uint32_t regs[8],const uint8_t * data)144 static void sha256_block_calc(uint32_t regs[8], const uint8_t *data)
145 {
146 uint32_t t1;
147 uint32_t w[64];
148 uint32_t t;
149
150 for (t = 0; t < 64; t++, data += 4) {
151 w[t] = t < 16 ? bigend_read_word(data) : w_schedule(w, t);
152 t1 = regs[7] + sigma_1(regs[4]) + ch(regs[4], regs[5], regs[6]) + const_key[t] + w[t];
153 regs[3] += t1;
154 regs[7] = sigma_0(regs[0]) + maj(regs[0], regs[1], regs[2]) + t1;
155
156 rotate_regs(regs);
157 }
158 }
159
160 int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len);
161
sha256_data_blk_update(uint32_t * iv,const void * msg,uint64_t len)162 __attribute__((weak)) int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len)
163 {
164 uint32_t regs[8];
165 const uint8_t *pdata = msg;
166 uint64_t i;
167 uint32_t j;
168
169 for (i = 0; i < len / 64; i++, pdata += 64) {
170 for (j = 0; j < 8; j++) {
171 regs[j] = iv[j];
172 }
173
174 sha256_block_calc(regs, pdata);
175
176 for (j = 0; j < 8; j++) {
177 iv[j] += regs[j];
178 }
179 }
180 return 0;
181 }
182
hash_sha256_pad_update(uint32_t * iv,const void * left_msg,uint64_t left_len,uint64_t total_bit_len)183 static void hash_sha256_pad_update(uint32_t *iv, const void *left_msg, uint64_t left_len, uint64_t total_bit_len)
184 {
185 uint32_t pad_word_len;
186 uint32_t sha256_pad[PAD_BLK_WORD_SIZE_SHA256];
187 uint8_t *pad_ptr = NULL;
188 uint32_t fill_zero_len;
189
190 if (left_len != 0) {
191 hvb_memcpy(sha256_pad, left_msg, (uint32_t)left_len);
192 }
193
194 pad_ptr = (uint8_t *)sha256_pad;
195 pad_ptr[left_len] = 0x80; // padding 0x80
196 left_len++;
197
198 if (left_len + PAD_INFO_BYTE_LEN_SHA256 <= BLK_BYTE_SIZE_SHA256) {
199 pad_word_len = BLK_WORD_SIZE_SHA256;
200 } else {
201 pad_word_len = PAD_BLK_WORD_SIZE_SHA256;
202 }
203
204 fill_zero_len = word2byte(pad_word_len) - (uint32_t)left_len - PAD_INFO_BYTE_LEN_SHA256;
205 hvb_memset(pad_ptr + left_len, 0, fill_zero_len);
206
207 sha256_pad[pad_word_len - 1] = htobe32((uint32_t)total_bit_len);
208 total_bit_len = total_bit_len >> 32;
209 sha256_pad[pad_word_len - 2] = htobe32((uint32_t)total_bit_len);
210
211 sha256_data_blk_update(iv, sha256_pad, word2byte(pad_word_len));
212 }
213
hash_sha256_output_iv(uint32_t * iv,uint8_t * out,uint32_t out_len)214 static int hash_sha256_output_iv(uint32_t *iv, uint8_t *out, uint32_t out_len)
215 {
216 if (out_len < IV_BYTE_SIZE_SHA256) {
217 return HASH_ERR_OUTBUF_NO_ENOUGH;
218 }
219
220 for (int i = 0; i < IV_WORD_SIZE_SHA256; i++) {
221 iv[i] = htobe32(iv[i]);
222 }
223
224 hvb_memcpy(out, iv, IV_BYTE_SIZE_SHA256);
225
226 return HASH_OK;
227 }
228
hash_sha256_single(const void * msg,uint32_t msg_len,uint8_t * out,uint32_t out_len)229 int hash_sha256_single(const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len)
230 {
231 uint64_t data_size;
232 uint64_t total_bit_len;
233 uint32_t iv[IV_WORD_SIZE_SHA256];
234
235 total_bit_len = (uint64_t)msg_len * 8; // 8bit per byte
236 if (total_bit_len < msg_len) {
237 return HASH_ERR_TOTAL_LEN;
238 }
239
240 hvb_memcpy(iv, sha256_iv_init, sizeof(sha256_iv_init));
241
242 data_size = (msg_len / BLK_BYTE_SIZE_SHA256) * BLK_BYTE_SIZE_SHA256;
243
244 if (data_size > 0) {
245 sha256_data_blk_update(iv, msg, data_size);
246 }
247
248 hash_sha256_pad_update(iv, (uint8_t *)msg + data_size, msg_len - data_size, total_bit_len);
249
250 return hash_sha256_output_iv(iv, out, out_len);
251 }
252
hash_alg_get_blklen(enum hash_alg_type alg_type)253 static uint32_t hash_alg_get_blklen(enum hash_alg_type alg_type)
254 {
255 switch (alg_type) {
256 case HASH_ALG_SHA256:
257 return BLK_BYTE_SIZE_SHA256;
258 default:
259 return 0;
260 }
261 return 0;
262 }
263
264
hash_ctx_init(struct hash_ctx_t * hash_ctx,enum hash_alg_type alg_type)265 int hash_ctx_init(struct hash_ctx_t *hash_ctx, enum hash_alg_type alg_type)
266 {
267 if (alg_type != HASH_ALG_SHA256) {
268 return HASH_ERR_ALG_NO_SUPPORT;
269 }
270
271 if (hash_ctx == NULL) {
272 return HASH_ERR_PARAM_NULL;
273 }
274
275 hash_ctx->alg_type = (uint32_t)alg_type;
276 hash_ctx->buf_len = 0;
277 hash_ctx->total_len = 0;
278
279 (void)hvb_memcpy(hash_ctx->iv, sha256_iv_init, sizeof(sha256_iv_init));
280
281 return HASH_OK;
282 }
283
hash_calc_update(struct hash_ctx_t * hash_ctx,const void * msg,uint32_t msg_len)284 int hash_calc_update(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len)
285 {
286 uint32_t left_len;
287 uint32_t blk_len;
288 uint32_t calc_len;
289
290 if (hash_ctx == NULL) {
291 return HASH_ERR_PARAM_NULL;
292 }
293
294 blk_len = hash_alg_get_blklen(hash_ctx->alg_type);
295 if (blk_len == 0) {
296 return HASH_ERR_ALG_NO_SUPPORT;
297 }
298
299 if (hash_ctx->buf_len >= blk_len) {
300 return HASH_ERR_BUF_LEN;
301 }
302
303 hash_ctx->total_len = hash_ctx->total_len + msg_len;
304
305 left_len = blk_len - hash_ctx->buf_len;
306
307 if (hash_ctx->buf_len != 0 && msg_len >= left_len) {
308 hvb_memcpy(hash_ctx->blk_buf + hash_ctx->buf_len, msg, left_len);
309 sha256_data_blk_update(hash_ctx->iv, hash_ctx->blk_buf, blk_len);
310
311 hash_ctx->buf_len = 0;
312
313 msg_len = msg_len - left_len;
314 msg = (uint8_t *)msg + left_len;
315 }
316
317 if (msg_len >= blk_len) {
318 calc_len = msg_len / blk_len * blk_len;
319 sha256_data_blk_update(hash_ctx->iv, msg, calc_len);
320
321 msg_len = msg_len - calc_len;
322 msg = (uint8_t *)msg + calc_len;
323 }
324
325 if (msg_len != 0) {
326 hvb_memcpy(hash_ctx->blk_buf + hash_ctx->buf_len, msg, msg_len);
327 hash_ctx->buf_len = hash_ctx->buf_len + msg_len;
328 }
329
330 return HASH_OK;
331 }
332
hash_calc_do_final(struct hash_ctx_t * hash_ctx,const void * msg,uint32_t msg_len,uint8_t * out,uint32_t out_len)333 int hash_calc_do_final(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len)
334 {
335 uint64_t total_bit_len;
336 int ret;
337
338 ret = hash_calc_update(hash_ctx, msg, msg_len);
339 if (ret != HASH_OK) {
340 return ret;
341 }
342
343 total_bit_len = hash_ctx->total_len * 8;
344 if (total_bit_len <= hash_ctx->total_len) {
345 return HASH_ERR_TOTAL_LEN;
346 }
347
348 hash_sha256_pad_update(hash_ctx->iv, hash_ctx->blk_buf, hash_ctx->buf_len, total_bit_len);
349
350 return hash_sha256_output_iv(hash_ctx->iv, out, out_len);
351 }
352