• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6  *
7  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
8  * and Nettle, by Niels M鰈ler.
9  *
10  * This program is free software; you can redistribute it and/or modify it
11  * under the terms of the GNU General Public License as published by the Free
12  * Software Foundation; either version 2 of the License, or (at your option)
13  * any later version.
14  *
15  */
16 #ifndef _LINUX_CRYPTO_H
17 #define _LINUX_CRYPTO_H
18 
19 #include <linux/module.h>
20 #include <linux/kernel.h>
21 #include <linux/types.h>
22 #include <linux/list.h>
23 #include <linux/string.h>
24 #include <asm/page.h>
25 #include <linux/errno.h>
26 
27 #define crypto_register_alg crypto_register_alg_rsl
28 #define crypto_unregister_alg crypto_unregister_alg_rsl
29 #define crypto_alloc_tfm crypto_alloc_tfm_rsl
30 #define crypto_free_tfm crypto_free_tfm_rsl
31 #define crypto_alg_available crypto_alg_available_rsl
32 
33 /*
34  * Algorithm masks and types.
35  */
36 #define CRYPTO_ALG_TYPE_MASK		0x000000ff
37 #define CRYPTO_ALG_TYPE_CIPHER		0x00000001
38 #define CRYPTO_ALG_TYPE_DIGEST		0x00000002
39 #define CRYPTO_ALG_TYPE_COMPRESS	0x00000004
40 
41 /*
42  * Transform masks and values (for crt_flags).
43  */
44 #define CRYPTO_TFM_MODE_MASK		0x000000ff
45 #define CRYPTO_TFM_REQ_MASK		0x000fff00
46 #define CRYPTO_TFM_RES_MASK		0xfff00000
47 
48 #define CRYPTO_TFM_MODE_ECB		0x00000001
49 #define CRYPTO_TFM_MODE_CBC		0x00000002
50 #define CRYPTO_TFM_MODE_CFB		0x00000004
51 #define CRYPTO_TFM_MODE_CTR		0x00000008
52 
53 #define CRYPTO_TFM_REQ_WEAK_KEY		0x00000100
54 #define CRYPTO_TFM_RES_WEAK_KEY		0x00100000
55 #define CRYPTO_TFM_RES_BAD_KEY_LEN	0x00200000
56 #define CRYPTO_TFM_RES_BAD_KEY_SCHED	0x00400000
57 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN	0x00800000
58 #define CRYPTO_TFM_RES_BAD_FLAGS	0x01000000
59 
60 /*
61  * Miscellaneous stuff.
62  */
63 #define CRYPTO_UNSPEC			0
64 #define CRYPTO_MAX_ALG_NAME		64
65 
66 struct scatterlist;
67 
68 /*
69  * Algorithms: modular crypto algorithm implementations, managed
70  * via crypto_register_alg() and crypto_unregister_alg().
71  */
72 struct cipher_alg {
73 	unsigned int cia_min_keysize;
74 	unsigned int cia_max_keysize;
75 	int (*cia_setkey)(void *ctx, const u8 *key,
76 			  unsigned int keylen, u32 *flags);
77 	void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
78 	void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
79 };
80 
81 struct digest_alg {
82 	unsigned int dia_digestsize;
83 	void (*dia_init)(void *ctx);
84 	void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
85 	void (*dia_final)(void *ctx, u8 *out);
86 	int (*dia_setkey)(void *ctx, const u8 *key,
87 			  unsigned int keylen, u32 *flags);
88 };
89 
90 struct compress_alg {
91 	int (*coa_init)(void *ctx);
92 	void (*coa_exit)(void *ctx);
93 	int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
94 			    u8 *dst, unsigned int *dlen);
95 	int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
96 			      u8 *dst, unsigned int *dlen);
97 };
98 
99 #define cra_cipher	cra_u.cipher
100 #define cra_digest	cra_u.digest
101 #define cra_compress	cra_u.compress
102 
103 struct crypto_alg {
104 	struct list_head cra_list;
105 	u32 cra_flags;
106 	unsigned int cra_blocksize;
107 	unsigned int cra_ctxsize;
108 	const char cra_name[CRYPTO_MAX_ALG_NAME];
109 
110 	union {
111 		struct cipher_alg cipher;
112 		struct digest_alg digest;
113 		struct compress_alg compress;
114 	} cra_u;
115 
116 	struct module *cra_module;
117 };
118 
119 /*
120  * Algorithm registration interface.
121  */
122 int crypto_register_alg(struct crypto_alg *alg);
123 int crypto_unregister_alg(struct crypto_alg *alg);
124 
125 /*
126  * Algorithm query interface.
127  */
128 int crypto_alg_available(const char *name, u32 flags);
129 
130 /*
131  * Transforms: user-instantiated objects which encapsulate algorithms
132  * and core processing logic.  Managed via crypto_alloc_tfm() and
133  * crypto_free_tfm(), as well as the various helpers below.
134  */
135 struct crypto_tfm;
136 
137 struct cipher_tfm {
138 	void *cit_iv;
139 	unsigned int cit_ivsize;
140 	u32 cit_mode;
141 	int (*cit_setkey)(struct crypto_tfm *tfm,
142 			  const u8 *key, unsigned int keylen);
143 	int (*cit_encrypt)(struct crypto_tfm *tfm,
144 			   struct scatterlist *dst,
145 			   struct scatterlist *src,
146 			   unsigned int nbytes);
147 	int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
148 			      struct scatterlist *dst,
149 			      struct scatterlist *src,
150 			      unsigned int nbytes, u8 *iv);
151 	int (*cit_decrypt)(struct crypto_tfm *tfm,
152 			   struct scatterlist *dst,
153 			   struct scatterlist *src,
154 			   unsigned int nbytes);
155 	int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
156 			   struct scatterlist *dst,
157 			   struct scatterlist *src,
158 			   unsigned int nbytes, u8 *iv);
159 	void (*cit_xor_block)(u8 *dst, const u8 *src);
160 };
161 
162 struct digest_tfm {
163 	void (*dit_init)(struct crypto_tfm *tfm);
164 	void (*dit_update)(struct crypto_tfm *tfm,
165 			   struct scatterlist *sg, unsigned int nsg);
166 	void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
167 	void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
168 			   unsigned int nsg, u8 *out);
169 	int (*dit_setkey)(struct crypto_tfm *tfm,
170 			  const u8 *key, unsigned int keylen);
171 };
172 
173 struct compress_tfm {
174 	int (*cot_compress)(struct crypto_tfm *tfm,
175 			    const u8 *src, unsigned int slen,
176 			    u8 *dst, unsigned int *dlen);
177 	int (*cot_decompress)(struct crypto_tfm *tfm,
178 			      const u8 *src, unsigned int slen,
179 			      u8 *dst, unsigned int *dlen);
180 };
181 
182 #define crt_cipher	crt_u.cipher
183 #define crt_digest	crt_u.digest
184 #define crt_compress	crt_u.compress
185 
186 struct crypto_tfm {
187 
188 	u32 crt_flags;
189 
190 	union {
191 		struct cipher_tfm cipher;
192 		struct digest_tfm digest;
193 		struct compress_tfm compress;
194 	} crt_u;
195 
196 	struct crypto_alg *__crt_alg;
197 };
198 
199 /*
200  * Transform user interface.
201  */
202 
203 /*
204  * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
205  * If that fails and the kernel supports dynamically loadable modules, it
206  * will then attempt to load a module of the same name or alias.  A refcount
207  * is grabbed on the algorithm which is then associated with the new transform.
208  *
209  * crypto_free_tfm() frees up the transform and any associated resources,
210  * then drops the refcount on the associated algorithm.
211  */
212 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
213 void crypto_free_tfm(struct crypto_tfm *tfm);
214 
215 /*
216  * Transform helpers which query the underlying algorithm.
217  */
crypto_tfm_alg_name(struct crypto_tfm * tfm)218 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
219 {
220 	return tfm->__crt_alg->cra_name;
221 }
222 
crypto_tfm_alg_modname(struct crypto_tfm * tfm)223 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
224 {
225 	struct crypto_alg *alg = tfm->__crt_alg;
226 
227 	if (alg->cra_module)
228 		return alg->cra_module->name;
229 	else
230 		return NULL;
231 }
232 
crypto_tfm_alg_type(struct crypto_tfm * tfm)233 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
234 {
235 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
236 }
237 
crypto_tfm_alg_min_keysize(struct crypto_tfm * tfm)238 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
239 {
240 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
241 	return tfm->__crt_alg->cra_cipher.cia_min_keysize;
242 }
243 
crypto_tfm_alg_max_keysize(struct crypto_tfm * tfm)244 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
245 {
246 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
247 	return tfm->__crt_alg->cra_cipher.cia_max_keysize;
248 }
249 
crypto_tfm_alg_ivsize(struct crypto_tfm * tfm)250 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
251 {
252 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
253 	return tfm->crt_cipher.cit_ivsize;
254 }
255 
crypto_tfm_alg_blocksize(struct crypto_tfm * tfm)256 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
257 {
258 	return tfm->__crt_alg->cra_blocksize;
259 }
260 
crypto_tfm_alg_digestsize(struct crypto_tfm * tfm)261 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
262 {
263 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
264 	return tfm->__crt_alg->cra_digest.dia_digestsize;
265 }
266 
267 /*
268  * API wrappers.
269  */
crypto_digest_init(struct crypto_tfm * tfm)270 static inline void crypto_digest_init(struct crypto_tfm *tfm)
271 {
272 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
273 	tfm->crt_digest.dit_init(tfm);
274 }
275 
crypto_digest_update(struct crypto_tfm * tfm,struct scatterlist * sg,unsigned int nsg)276 static inline void crypto_digest_update(struct crypto_tfm *tfm,
277 					struct scatterlist *sg,
278 					unsigned int nsg)
279 {
280 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
281 	tfm->crt_digest.dit_update(tfm, sg, nsg);
282 }
283 
crypto_digest_final(struct crypto_tfm * tfm,u8 * out)284 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
285 {
286 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
287 	tfm->crt_digest.dit_final(tfm, out);
288 }
289 
crypto_digest_digest(struct crypto_tfm * tfm,struct scatterlist * sg,unsigned int nsg,u8 * out)290 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
291 					struct scatterlist *sg,
292 					unsigned int nsg, u8 *out)
293 {
294 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
295 	tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
296 }
297 
crypto_digest_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)298 static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
299 				       const u8 *key, unsigned int keylen)
300 {
301 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
302 	if (tfm->crt_digest.dit_setkey == NULL)
303 		return -ENOSYS;
304 	return tfm->crt_digest.dit_setkey(tfm, key, keylen);
305 }
306 
crypto_cipher_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)307 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
308 				       const u8 *key, unsigned int keylen)
309 {
310 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
311 	return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
312 }
313 
crypto_cipher_encrypt(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)314 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
315 					struct scatterlist *dst,
316 					struct scatterlist *src,
317 					unsigned int nbytes)
318 {
319 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
320 	return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
321 }
322 
crypto_cipher_encrypt_iv(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,u8 * iv)323 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
324 					   struct scatterlist *dst,
325 					   struct scatterlist *src,
326 					   unsigned int nbytes, u8 *iv)
327 {
328 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
329 	BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
330 	return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
331 }
332 
crypto_cipher_decrypt(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)333 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
334 					struct scatterlist *dst,
335 					struct scatterlist *src,
336 					unsigned int nbytes)
337 {
338 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
339 	return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
340 }
341 
crypto_cipher_decrypt_iv(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,u8 * iv)342 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
343 					   struct scatterlist *dst,
344 					   struct scatterlist *src,
345 					   unsigned int nbytes, u8 *iv)
346 {
347 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
348 	BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
349 	return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
350 }
351 
crypto_cipher_set_iv(struct crypto_tfm * tfm,const u8 * src,unsigned int len)352 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
353 					const u8 *src, unsigned int len)
354 {
355 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
356 	memcpy(tfm->crt_cipher.cit_iv, src, len);
357 }
358 
crypto_cipher_get_iv(struct crypto_tfm * tfm,u8 * dst,unsigned int len)359 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
360 					u8 *dst, unsigned int len)
361 {
362 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
363 	memcpy(dst, tfm->crt_cipher.cit_iv, len);
364 }
365 
crypto_comp_compress(struct crypto_tfm * tfm,const u8 * src,unsigned int slen,u8 * dst,unsigned int * dlen)366 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
367 				       const u8 *src, unsigned int slen,
368 				       u8 *dst, unsigned int *dlen)
369 {
370 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
371 	return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
372 }
373 
crypto_comp_decompress(struct crypto_tfm * tfm,const u8 * src,unsigned int slen,u8 * dst,unsigned int * dlen)374 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
375 					 const u8 *src, unsigned int slen,
376 					 u8 *dst, unsigned int *dlen)
377 {
378 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
379 	return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
380 }
381 
382 #endif	/* _LINUX_CRYPTO_H */
383