1 /**
2 * Copyright (c) 2020 HiSilicon (Shanghai) Technologies CO., LIMITED.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 *
15 * Description: Provides cipher driver common utils. \n
16 *
17 * History: \n
18 * 2023-03-22, Create file. \n
19 */
20 #include "drv_common.h"
21 #include "drv_trng.h"
22 #include "crypto_drv_common.h"
23
24 /* crypto dump functions. */
25 #define MAX_DUMP_LENGHT (512)
26 #define BYTES_IN_ONE_LINE (16)
27 #ifndef KERN_CONT
28 #define KERN_CONT
29 #endif
30
31 static crypto_drv_func g_drv_func = {
32 .malloc_coherent = TD_NULL,
33 .free_coherent = TD_NULL,
34 .get_phys_addr = TD_NULL
35 };
36
crypto_malloc_coherent(td_u32 size)37 td_void *crypto_malloc_coherent(td_u32 size)
38 {
39 crypto_mem_type mem_type = CRYPTO_MEM_TYPE_MMZ;
40 if (g_drv_func.malloc_coherent == TD_NULL) {
41 return TD_NULL;
42 }
43 if (crypto_smmu_support()) {
44 mem_type = CRYPTO_MEM_TYPE_SMMU;
45 }
46 return g_drv_func.malloc_coherent(size, mem_type);
47 }
48
crypto_malloc_mmz(td_u32 size)49 td_void *crypto_malloc_mmz(td_u32 size)
50 {
51 if (g_drv_func.malloc_coherent == TD_NULL) {
52 return TD_NULL;
53 }
54 return g_drv_func.malloc_coherent(size, CRYPTO_MEM_TYPE_MMZ);
55 }
56
crypto_free_coherent(td_void * ptr)57 td_void crypto_free_coherent(td_void *ptr)
58 {
59 if (g_drv_func.free_coherent == TD_NULL) {
60 return;
61 }
62 g_drv_func.free_coherent(ptr);
63 }
64
crypto_get_phys_addr(td_void * ptr)65 td_phys_addr_t crypto_get_phys_addr(td_void *ptr)
66 {
67 if (g_drv_func.get_phys_addr == TD_NULL) {
68 return (td_phys_addr_t)(uintptr_t)(ptr);
69 }
70 return g_drv_func.get_phys_addr(ptr);
71 }
72
crypto_smmu_support(td_void)73 td_bool crypto_smmu_support(td_void)
74 {
75 if (g_drv_func.get_smmu_table_addr != TD_NULL) {
76 return TD_TRUE;
77 } else {
78 return TD_FALSE;
79 }
80 }
81
crypto_get_smmu_table_addr(unsigned long * table,unsigned long * rdaddr,unsigned long * wraddr)82 td_void crypto_get_smmu_table_addr(unsigned long *table, unsigned long *rdaddr, unsigned long *wraddr)
83 {
84 if (g_drv_func.get_smmu_table_addr == TD_NULL) {
85 return;
86 }
87 g_drv_func.get_smmu_table_addr(table, rdaddr, wraddr);
88 }
89
drv_cipher_register_func(const crypto_drv_func * drv_func_list)90 td_s32 drv_cipher_register_func(const crypto_drv_func *drv_func_list)
91 {
92 if (drv_func_list == TD_NULL) {
93 return TD_FAILURE;
94 }
95
96 g_drv_func.malloc_coherent = drv_func_list->malloc_coherent;
97 g_drv_func.free_coherent = drv_func_list->free_coherent;
98 g_drv_func.get_phys_addr = drv_func_list->get_phys_addr;
99 g_drv_func.get_smmu_table_addr = drv_func_list->get_smmu_table_addr;
100 g_drv_func.get_cpu_type = drv_func_list->get_cpu_type;
101 g_drv_func.get_pke_rom_lib_start_addr = drv_func_list->get_pke_rom_lib_start_addr;
102
103 return TD_SUCCESS;
104 }
105
crypto_get_cpu_type(td_void)106 crypto_cpu_type crypto_get_cpu_type(td_void)
107 {
108 if (g_drv_func.get_cpu_type == TD_NULL) {
109 return CRYPTO_CPU_TYPE_ACPU;
110 }
111 return g_drv_func.get_cpu_type();
112 }
113
crypto_get_pke_rom_lib_start_addr(td_void)114 td_u32 crypto_get_pke_rom_lib_start_addr(td_void)
115 {
116 if (g_drv_func.get_pke_rom_lib_start_addr == TD_NULL) {
117 return PKE_ROM_LIB_INVALID_ADDR;
118 }
119 return g_drv_func.get_pke_rom_lib_start_addr();
120 }
121
crypto_sm_support(crypto_sm_alg alg)122 td_bool __attribute__((weak)) crypto_sm_support(crypto_sm_alg alg)
123 {
124 crypto_unused(alg);
125 return TD_TRUE;
126 }
127
crypto_rsa_support(td_u32 klen,drv_pke_rsa_scheme scheme)128 td_bool crypto_rsa_support(td_u32 klen, drv_pke_rsa_scheme scheme)
129 {
130 crypto_unused(klen);
131 crypto_unused(scheme);
132
133 return TD_TRUE;
134 }
135
crypto_hash_support(crypto_hash_type hash_type)136 td_bool crypto_hash_support(crypto_hash_type hash_type)
137 {
138 crypto_unused(hash_type);
139
140 return TD_TRUE;
141 }
142
crypto_symc_support(crypto_symc_alg alg,crypto_symc_work_mode mode,crypto_symc_key_length key_len,crypto_symc_bit_width bit_width)143 td_bool crypto_symc_support(crypto_symc_alg alg, crypto_symc_work_mode mode, crypto_symc_key_length key_len,
144 crypto_symc_bit_width bit_width)
145 {
146 crypto_unused(alg);
147 crypto_unused(mode);
148 crypto_unused(key_len);
149 crypto_unused(bit_width);
150
151 return TD_TRUE;
152 }
153
crypto_dump_data(const char * name,const td_u8 * data,td_u32 data_len)154 void crypto_dump_data(const char *name, const td_u8 *data, td_u32 data_len)
155 {
156 td_u32 i;
157 td_u32 dump_length = data_len;
158 crypto_unused(name);
159 crypto_unused(data);
160 crypto_print("%s(addr is 0x%x, size is %d Bytes):\r\n", name, (uintptr_t)data, data_len);
161
162 if (dump_length > MAX_DUMP_LENGHT) {
163 dump_length = MAX_DUMP_LENGHT;
164 }
165 for (i = 0; i < dump_length; i++) {
166 crypto_print(KERN_CONT"%02x ", data[i]);
167 if ((i + 1) % BYTES_IN_ONE_LINE == 0) {
168 crypto_print("\r\n");
169 }
170 }
171 if (dump_length % BYTES_IN_ONE_LINE != 0) {
172 crypto_print("\r\n");
173 }
174 }
175
crypto_get_multi_random(td_u32 size,td_u8 * randnum)176 td_s32 __attribute__((weak)) crypto_get_multi_random(td_u32 size, td_u8 *randnum)
177 {
178 return drv_cipher_trng_get_multi_random(size, randnum);
179 }
180
181 #if defined(CRYPTO_SYMC_ADDR_NOT_ALIGN_SUPPORT)
crypto_virt_xor_phys_copy_to_phys(td_phys_addr_t dst_phys_addr,const td_u8 * a_virt_addr,td_phys_addr_t b_phys_addr,td_u32 length)182 td_s32 __attribute__((weak)) crypto_virt_xor_phys_copy_to_phys(td_phys_addr_t dst_phys_addr, const td_u8 *a_virt_addr,
183 td_phys_addr_t b_phys_addr, td_u32 length)
184 {
185 td_s32 ret;
186 td_u32 i;
187 td_u8 *b_virt_addr = TD_NULL;
188 td_u8 *dst_virt_addr = TD_NULL;
189
190 b_virt_addr = crypto_ioremap_nocache(b_phys_addr, length);
191 crypto_chk_goto_with_ret(ret, b_virt_addr == TD_NULL, exit, TD_FAILURE, "crypto_ioremap_nocache failed\n");
192
193 dst_virt_addr = crypto_ioremap_nocache(dst_phys_addr, length);
194 crypto_chk_goto_with_ret(ret, dst_virt_addr == TD_NULL, exit, TD_FAILURE, "crypto_ioremap_nocache failed\n");
195
196 #if defined(CRYPTO_CTR_TRACE_ENABLE)
197 crypto_dump_data("a_virt_addr", a_virt_addr, length);
198 crypto_dump_data("b_virt_addr", b_virt_addr, length);
199 #endif
200
201 for (i = 0; i < length; i++) {
202 dst_virt_addr[i] = a_virt_addr[i] ^ b_virt_addr[i];
203 }
204
205 #if defined(CRYPTO_CTR_TRACE_ENABLE)
206 crypto_dump_data("dst_virt_addr", dst_virt_addr, length);
207 #endif
208 ret = TD_SUCCESS;
209 exit:
210 if (b_virt_addr != TD_NULL) {
211 crypto_iounmap(b_virt_addr, length);
212 }
213 if (dst_virt_addr != TD_NULL) {
214 crypto_iounmap(dst_virt_addr, length);
215 }
216 return ret;
217 }
218
crypto_virt_copy_to_phys(td_phys_addr_t dst_phys_addr,const td_u8 * src_virt_addr,td_u32 length)219 td_s32 __attribute__((weak)) crypto_virt_copy_to_phys(td_phys_addr_t dst_phys_addr,
220 const td_u8 *src_virt_addr, td_u32 length)
221 {
222 td_s32 ret;
223 td_u8 *dst_virt_addr = TD_NULL;
224
225 dst_virt_addr = crypto_ioremap_nocache(dst_phys_addr, length);
226 crypto_chk_goto_with_ret(ret, dst_virt_addr == TD_NULL, exit, TD_FAILURE, "crypto_ioremap_nocache failed\n");
227
228 ret = memcpy_s(dst_virt_addr, length, src_virt_addr, length);
229 crypto_chk_goto_with_ret(ret, ret != EOK, exit, TD_FAILURE, "memcpy_s failed\n");
230
231 ret = TD_SUCCESS;
232 exit:
233 if (dst_virt_addr != TD_NULL) {
234 crypto_iounmap(dst_virt_addr, length);
235 }
236 return ret;
237 }
238
crypto_phys_copy_to_virt(td_u8 * dst_virt_addr,td_phys_addr_t src_phys_addr,td_u32 length)239 td_s32 __attribute__((weak)) crypto_phys_copy_to_virt(td_u8 *dst_virt_addr, td_phys_addr_t src_phys_addr, td_u32 length)
240 {
241 td_s32 ret;
242 td_u8 *src_virt_addr = TD_NULL;
243
244 src_virt_addr = crypto_ioremap_nocache(src_phys_addr, length);
245 crypto_chk_goto_with_ret(ret, src_virt_addr == TD_NULL, exit, TD_FAILURE, "crypto_ioremap_nocache failed\n");
246
247 ret = memcpy_s(dst_virt_addr, length, src_virt_addr, length);
248 crypto_chk_goto_with_ret(ret, ret != EOK, exit, TD_FAILURE, "memcpy_s failed\n");
249
250 ret = TD_SUCCESS;
251 exit:
252 if (src_virt_addr != TD_NULL) {
253 crypto_iounmap(src_virt_addr, length);
254 }
255 return ret;
256 }
257 #endif /* CRYPTO_SYMC_ADDR_NOT_ALIGN_SUPPORT */
258
crypto_get_value_by_index(const crypto_table_item * table,td_u32 table_size,td_u32 index,td_u32 * value)259 td_s32 crypto_get_value_by_index(const crypto_table_item *table, td_u32 table_size,
260 td_u32 index, td_u32 *value)
261 {
262 const crypto_table_item *item = TD_NULL;
263 td_u32 i;
264
265 crypto_chk_return(table == TD_NULL, TD_FAILURE, "table is NULL\n");
266 crypto_chk_return(value == TD_NULL, TD_FAILURE, "value is NULL\n");
267
268 for (i = 0; i < table_size; i++) {
269 item = &table[i];
270 if (item->index == index) {
271 *value = item->value;
272 return TD_SUCCESS;
273 }
274 }
275 crypto_log_err("Invalid Index!\n");
276 return TD_FAILURE;
277 }