• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10  * and Nettle, by Niels Möller.
11  */
12 #ifndef _LINUX_CRYPTO_H
13 #define _LINUX_CRYPTO_H
14 
15 #include <linux/atomic.h>
16 #include <linux/kernel.h>
17 #include <linux/list.h>
18 #include <linux/bug.h>
19 #include <linux/refcount.h>
20 #include <linux/slab.h>
21 #include <linux/completion.h>
22 
23 /*
24  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
25  * arbitrary modules to be loaded. Loading from userspace may still need the
26  * unprefixed names, so retains those aliases as well.
27  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
28  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
29  * expands twice on the same line. Instead, use a separate base name for the
30  * alias.
31  */
32 #define MODULE_ALIAS_CRYPTO(name)	\
33 		__MODULE_INFO(alias, alias_userspace, name);	\
34 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
35 
36 /*
37  * Algorithm masks and types.
38  */
39 #define CRYPTO_ALG_TYPE_MASK		0x0000000f
40 #define CRYPTO_ALG_TYPE_CIPHER		0x00000001
41 #define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
42 #define CRYPTO_ALG_TYPE_AEAD		0x00000003
43 #define CRYPTO_ALG_TYPE_SKCIPHER	0x00000005
44 #define CRYPTO_ALG_TYPE_KPP		0x00000008
45 #define CRYPTO_ALG_TYPE_ACOMPRESS	0x0000000a
46 #define CRYPTO_ALG_TYPE_SCOMPRESS	0x0000000b
47 #define CRYPTO_ALG_TYPE_RNG		0x0000000c
48 #define CRYPTO_ALG_TYPE_AKCIPHER	0x0000000d
49 #define CRYPTO_ALG_TYPE_HASH		0x0000000e
50 #define CRYPTO_ALG_TYPE_SHASH		0x0000000e
51 #define CRYPTO_ALG_TYPE_AHASH		0x0000000f
52 
53 #define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
54 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
55 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK	0x0000000e
56 
57 #define CRYPTO_ALG_LARVAL		0x00000010
58 #define CRYPTO_ALG_DEAD			0x00000020
59 #define CRYPTO_ALG_DYING		0x00000040
60 #define CRYPTO_ALG_ASYNC		0x00000080
61 
62 /*
63  * Set if the algorithm (or an algorithm which it uses) requires another
64  * algorithm of the same type to handle corner cases.
65  */
66 #define CRYPTO_ALG_NEED_FALLBACK	0x00000100
67 
68 /*
69  * Set if the algorithm has passed automated run-time testing.  Note that
70  * if there is no run-time testing for a given algorithm it is considered
71  * to have passed.
72  */
73 
74 #define CRYPTO_ALG_TESTED		0x00000400
75 
76 /*
77  * Set if the algorithm is an instance that is built from templates.
78  */
79 #define CRYPTO_ALG_INSTANCE		0x00000800
80 
81 /* Set this bit if the algorithm provided is hardware accelerated but
82  * not available to userspace via instruction set or so.
83  */
84 #define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
85 
86 /*
87  * Mark a cipher as a service implementation only usable by another
88  * cipher and never by a normal user of the kernel crypto API
89  */
90 #define CRYPTO_ALG_INTERNAL		0x00002000
91 
92 /*
93  * Set if the algorithm has a ->setkey() method but can be used without
94  * calling it first, i.e. there is a default key.
95  */
96 #define CRYPTO_ALG_OPTIONAL_KEY		0x00004000
97 
98 /*
99  * Don't trigger module loading
100  */
101 #define CRYPTO_NOLOAD			0x00008000
102 
103 /*
104  * The algorithm may allocate memory during request processing, i.e. during
105  * encryption, decryption, or hashing.  Users can request an algorithm with this
106  * flag unset if they can't handle memory allocation failures.
107  *
108  * This flag is currently only implemented for algorithms of type "skcipher",
109  * "aead", "ahash", "shash", and "cipher".  Algorithms of other types might not
110  * have this flag set even if they allocate memory.
111  *
112  * In some edge cases, algorithms can allocate memory regardless of this flag.
113  * To avoid these cases, users must obey the following usage constraints:
114  *    skcipher:
115  *	- The IV buffer and all scatterlist elements must be aligned to the
116  *	  algorithm's alignmask.
117  *	- If the data were to be divided into chunks of size
118  *	  crypto_skcipher_walksize() (with any remainder going at the end), no
119  *	  chunk can cross a page boundary or a scatterlist element boundary.
120  *    aead:
121  *	- The IV buffer and all scatterlist elements must be aligned to the
122  *	  algorithm's alignmask.
123  *	- The first scatterlist element must contain all the associated data,
124  *	  and its pages must be !PageHighMem.
125  *	- If the plaintext/ciphertext were to be divided into chunks of size
126  *	  crypto_aead_walksize() (with the remainder going at the end), no chunk
127  *	  can cross a page boundary or a scatterlist element boundary.
128  *    ahash:
129  *	- The result buffer must be aligned to the algorithm's alignmask.
130  *	- crypto_ahash_finup() must not be used unless the algorithm implements
131  *	  ->finup() natively.
132  */
133 #define CRYPTO_ALG_ALLOCATES_MEMORY	0x00010000
134 
135 /*
136  * Transform masks and values (for crt_flags).
137  */
138 #define CRYPTO_TFM_NEED_KEY		0x00000001
139 
140 #define CRYPTO_TFM_REQ_MASK		0x000fff00
141 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS	0x00000100
142 #define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
143 #define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
144 
145 /*
146  * Miscellaneous stuff.
147  */
148 #define CRYPTO_MAX_ALG_NAME		128
149 
150 /*
151  * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
152  * declaration) is used to ensure that the crypto_tfm context structure is
153  * aligned correctly for the given architecture so that there are no alignment
154  * faults for C data types.  On architectures that support non-cache coherent
155  * DMA, such as ARM or arm64, it also takes into account the minimal alignment
156  * that is required to ensure that the context struct member does not share any
157  * cachelines with the rest of the struct. This is needed to ensure that cache
158  * maintenance for non-coherent DMA (cache invalidation in particular) does not
159  * affect data that may be accessed by the CPU concurrently.
160  */
161 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
162 
163 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
164 
165 struct scatterlist;
166 struct crypto_async_request;
167 struct crypto_tfm;
168 struct crypto_type;
169 
170 typedef struct crypto_async_request crypto_completion_data_t;
171 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
172 
173 /**
174  * DOC: Block Cipher Context Data Structures
175  *
176  * These data structures define the operating context for each block cipher
177  * type.
178  */
179 
180 struct crypto_async_request {
181 	struct list_head list;
182 	crypto_completion_t complete;
183 	void *data;
184 	struct crypto_tfm *tfm;
185 
186 	u32 flags;
187 };
188 
189 /**
190  * DOC: Block Cipher Algorithm Definitions
191  *
192  * These data structures define modular crypto algorithm implementations,
193  * managed via crypto_register_alg() and crypto_unregister_alg().
194  */
195 
196 /**
197  * struct cipher_alg - single-block symmetric ciphers definition
198  * @cia_min_keysize: Minimum key size supported by the transformation. This is
199  *		     the smallest key length supported by this transformation
200  *		     algorithm. This must be set to one of the pre-defined
201  *		     values as this is not hardware specific. Possible values
202  *		     for this field can be found via git grep "_MIN_KEY_SIZE"
203  *		     include/crypto/
204  * @cia_max_keysize: Maximum key size supported by the transformation. This is
205  *		    the largest key length supported by this transformation
206  *		    algorithm. This must be set to one of the pre-defined values
207  *		    as this is not hardware specific. Possible values for this
208  *		    field can be found via git grep "_MAX_KEY_SIZE"
209  *		    include/crypto/
210  * @cia_setkey: Set key for the transformation. This function is used to either
211  *	        program a supplied key into the hardware or store the key in the
212  *	        transformation context for programming it later. Note that this
213  *	        function does modify the transformation context. This function
214  *	        can be called multiple times during the existence of the
215  *	        transformation object, so one must make sure the key is properly
216  *	        reprogrammed into the hardware. This function is also
217  *	        responsible for checking the key length for validity.
218  * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
219  *		 single block of data, which must be @cra_blocksize big. This
220  *		 always operates on a full @cra_blocksize and it is not possible
221  *		 to encrypt a block of smaller size. The supplied buffers must
222  *		 therefore also be at least of @cra_blocksize size. Both the
223  *		 input and output buffers are always aligned to @cra_alignmask.
224  *		 In case either of the input or output buffer supplied by user
225  *		 of the crypto API is not aligned to @cra_alignmask, the crypto
226  *		 API will re-align the buffers. The re-alignment means that a
227  *		 new buffer will be allocated, the data will be copied into the
228  *		 new buffer, then the processing will happen on the new buffer,
229  *		 then the data will be copied back into the original buffer and
230  *		 finally the new buffer will be freed. In case a software
231  *		 fallback was put in place in the @cra_init call, this function
232  *		 might need to use the fallback if the algorithm doesn't support
233  *		 all of the key sizes. In case the key was stored in
234  *		 transformation context, the key might need to be re-programmed
235  *		 into the hardware in this function. This function shall not
236  *		 modify the transformation context, as this function may be
237  *		 called in parallel with the same transformation object.
238  * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
239  *		 @cia_encrypt, and the conditions are exactly the same.
240  *
241  * All fields are mandatory and must be filled.
242  */
243 struct cipher_alg {
244 	unsigned int cia_min_keysize;
245 	unsigned int cia_max_keysize;
246 	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
247 	                  unsigned int keylen);
248 	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
249 	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
250 };
251 
252 /**
253  * struct compress_alg - compression/decompression algorithm
254  * @coa_compress: Compress a buffer of specified length, storing the resulting
255  *		  data in the specified buffer. Return the length of the
256  *		  compressed data in dlen.
257  * @coa_decompress: Decompress the source buffer, storing the uncompressed
258  *		    data in the specified buffer. The length of the data is
259  *		    returned in dlen.
260  *
261  * All fields are mandatory.
262  */
263 struct compress_alg {
264 	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
265 			    unsigned int slen, u8 *dst, unsigned int *dlen);
266 	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
267 			      unsigned int slen, u8 *dst, unsigned int *dlen);
268 };
269 
270 #ifdef CONFIG_CRYPTO_STATS
271 /*
272  * struct crypto_istat_aead - statistics for AEAD algorithm
273  * @encrypt_cnt:	number of encrypt requests
274  * @encrypt_tlen:	total data size handled by encrypt requests
275  * @decrypt_cnt:	number of decrypt requests
276  * @decrypt_tlen:	total data size handled by decrypt requests
277  * @err_cnt:		number of error for AEAD requests
278  */
279 struct crypto_istat_aead {
280 	atomic64_t encrypt_cnt;
281 	atomic64_t encrypt_tlen;
282 	atomic64_t decrypt_cnt;
283 	atomic64_t decrypt_tlen;
284 	atomic64_t err_cnt;
285 };
286 
287 /*
288  * struct crypto_istat_akcipher - statistics for akcipher algorithm
289  * @encrypt_cnt:	number of encrypt requests
290  * @encrypt_tlen:	total data size handled by encrypt requests
291  * @decrypt_cnt:	number of decrypt requests
292  * @decrypt_tlen:	total data size handled by decrypt requests
293  * @verify_cnt:		number of verify operation
294  * @sign_cnt:		number of sign requests
295  * @err_cnt:		number of error for akcipher requests
296  */
297 struct crypto_istat_akcipher {
298 	atomic64_t encrypt_cnt;
299 	atomic64_t encrypt_tlen;
300 	atomic64_t decrypt_cnt;
301 	atomic64_t decrypt_tlen;
302 	atomic64_t verify_cnt;
303 	atomic64_t sign_cnt;
304 	atomic64_t err_cnt;
305 };
306 
307 /*
308  * struct crypto_istat_cipher - statistics for cipher algorithm
309  * @encrypt_cnt:	number of encrypt requests
310  * @encrypt_tlen:	total data size handled by encrypt requests
311  * @decrypt_cnt:	number of decrypt requests
312  * @decrypt_tlen:	total data size handled by decrypt requests
313  * @err_cnt:		number of error for cipher requests
314  */
315 struct crypto_istat_cipher {
316 	atomic64_t encrypt_cnt;
317 	atomic64_t encrypt_tlen;
318 	atomic64_t decrypt_cnt;
319 	atomic64_t decrypt_tlen;
320 	atomic64_t err_cnt;
321 };
322 
323 /*
324  * struct crypto_istat_compress - statistics for compress algorithm
325  * @compress_cnt:	number of compress requests
326  * @compress_tlen:	total data size handled by compress requests
327  * @decompress_cnt:	number of decompress requests
328  * @decompress_tlen:	total data size handled by decompress requests
329  * @err_cnt:		number of error for compress requests
330  */
331 struct crypto_istat_compress {
332 	atomic64_t compress_cnt;
333 	atomic64_t compress_tlen;
334 	atomic64_t decompress_cnt;
335 	atomic64_t decompress_tlen;
336 	atomic64_t err_cnt;
337 };
338 
339 /*
340  * struct crypto_istat_hash - statistics for has algorithm
341  * @hash_cnt:		number of hash requests
342  * @hash_tlen:		total data size hashed
343  * @err_cnt:		number of error for hash requests
344  */
345 struct crypto_istat_hash {
346 	atomic64_t hash_cnt;
347 	atomic64_t hash_tlen;
348 	atomic64_t err_cnt;
349 };
350 
351 /*
352  * struct crypto_istat_kpp - statistics for KPP algorithm
353  * @setsecret_cnt:		number of setsecrey operation
354  * @generate_public_key_cnt:	number of generate_public_key operation
355  * @compute_shared_secret_cnt:	number of compute_shared_secret operation
356  * @err_cnt:			number of error for KPP requests
357  */
358 struct crypto_istat_kpp {
359 	atomic64_t setsecret_cnt;
360 	atomic64_t generate_public_key_cnt;
361 	atomic64_t compute_shared_secret_cnt;
362 	atomic64_t err_cnt;
363 };
364 
365 /*
366  * struct crypto_istat_rng: statistics for RNG algorithm
367  * @generate_cnt:	number of RNG generate requests
368  * @generate_tlen:	total data size of generated data by the RNG
369  * @seed_cnt:		number of times the RNG was seeded
370  * @err_cnt:		number of error for RNG requests
371  */
372 struct crypto_istat_rng {
373 	atomic64_t generate_cnt;
374 	atomic64_t generate_tlen;
375 	atomic64_t seed_cnt;
376 	atomic64_t err_cnt;
377 };
378 #endif /* CONFIG_CRYPTO_STATS */
379 
380 #define cra_cipher	cra_u.cipher
381 #define cra_compress	cra_u.compress
382 
383 /**
384  * struct crypto_alg - definition of a cryptograpic cipher algorithm
385  * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
386  *	       CRYPTO_ALG_* flags for the flags which go in here. Those are
387  *	       used for fine-tuning the description of the transformation
388  *	       algorithm.
389  * @cra_blocksize: Minimum block size of this transformation. The size in bytes
390  *		   of the smallest possible unit which can be transformed with
391  *		   this algorithm. The users must respect this value.
392  *		   In case of HASH transformation, it is possible for a smaller
393  *		   block than @cra_blocksize to be passed to the crypto API for
394  *		   transformation, in case of any other transformation type, an
395  * 		   error will be returned upon any attempt to transform smaller
396  *		   than @cra_blocksize chunks.
397  * @cra_ctxsize: Size of the operational context of the transformation. This
398  *		 value informs the kernel crypto API about the memory size
399  *		 needed to be allocated for the transformation context.
400  * @cra_alignmask: Alignment mask for the input and output data buffer. The data
401  *		   buffer containing the input data for the algorithm must be
402  *		   aligned to this alignment mask. The data buffer for the
403  *		   output data must be aligned to this alignment mask. Note that
404  *		   the Crypto API will do the re-alignment in software, but
405  *		   only under special conditions and there is a performance hit.
406  *		   The re-alignment happens at these occasions for different
407  *		   @cra_u types: cipher -- For both input data and output data
408  *		   buffer; ahash -- For output hash destination buf; shash --
409  *		   For output hash destination buf.
410  *		   This is needed on hardware which is flawed by design and
411  *		   cannot pick data from arbitrary addresses.
412  * @cra_priority: Priority of this transformation implementation. In case
413  *		  multiple transformations with same @cra_name are available to
414  *		  the Crypto API, the kernel will use the one with highest
415  *		  @cra_priority.
416  * @cra_name: Generic name (usable by multiple implementations) of the
417  *	      transformation algorithm. This is the name of the transformation
418  *	      itself. This field is used by the kernel when looking up the
419  *	      providers of particular transformation.
420  * @cra_driver_name: Unique name of the transformation provider. This is the
421  *		     name of the provider of the transformation. This can be any
422  *		     arbitrary value, but in the usual case, this contains the
423  *		     name of the chip or provider and the name of the
424  *		     transformation algorithm.
425  * @cra_type: Type of the cryptographic transformation. This is a pointer to
426  *	      struct crypto_type, which implements callbacks common for all
427  *	      transformation types. There are multiple options, such as
428  *	      &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
429  *	      This field might be empty. In that case, there are no common
430  *	      callbacks. This is the case for: cipher, compress, shash.
431  * @cra_u: Callbacks implementing the transformation. This is a union of
432  *	   multiple structures. Depending on the type of transformation selected
433  *	   by @cra_type and @cra_flags above, the associated structure must be
434  *	   filled with callbacks. This field might be empty. This is the case
435  *	   for ahash, shash.
436  * @cra_init: Initialize the cryptographic transformation object. This function
437  *	      is used to initialize the cryptographic transformation object.
438  *	      This function is called only once at the instantiation time, right
439  *	      after the transformation context was allocated. In case the
440  *	      cryptographic hardware has some special requirements which need to
441  *	      be handled by software, this function shall check for the precise
442  *	      requirement of the transformation and put any software fallbacks
443  *	      in place.
444  * @cra_exit: Deinitialize the cryptographic transformation object. This is a
445  *	      counterpart to @cra_init, used to remove various changes set in
446  *	      @cra_init.
447  * @cra_u.cipher: Union member which contains a single-block symmetric cipher
448  *		  definition. See @struct @cipher_alg.
449  * @cra_u.compress: Union member which contains a (de)compression algorithm.
450  *		    See @struct @compress_alg.
451  * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
452  * @cra_list: internally used
453  * @cra_users: internally used
454  * @cra_refcnt: internally used
455  * @cra_destroy: internally used
456  *
457  * @stats: union of all possible crypto_istat_xxx structures
458  * @stats.aead:		statistics for AEAD algorithm
459  * @stats.akcipher:	statistics for akcipher algorithm
460  * @stats.cipher:	statistics for cipher algorithm
461  * @stats.compress:	statistics for compress algorithm
462  * @stats.hash:		statistics for hash algorithm
463  * @stats.rng:		statistics for rng algorithm
464  * @stats.kpp:		statistics for KPP algorithm
465  *
466  * The struct crypto_alg describes a generic Crypto API algorithm and is common
467  * for all of the transformations. Any variable not documented here shall not
468  * be used by a cipher implementation as it is internal to the Crypto API.
469  */
470 struct crypto_alg {
471 	struct list_head cra_list;
472 	struct list_head cra_users;
473 
474 	u32 cra_flags;
475 	unsigned int cra_blocksize;
476 	unsigned int cra_ctxsize;
477 	unsigned int cra_alignmask;
478 
479 	int cra_priority;
480 	refcount_t cra_refcnt;
481 
482 	char cra_name[CRYPTO_MAX_ALG_NAME];
483 	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
484 
485 	const struct crypto_type *cra_type;
486 
487 	union {
488 		struct cipher_alg cipher;
489 		struct compress_alg compress;
490 	} cra_u;
491 
492 	int (*cra_init)(struct crypto_tfm *tfm);
493 	void (*cra_exit)(struct crypto_tfm *tfm);
494 	void (*cra_destroy)(struct crypto_alg *alg);
495 
496 	struct module *cra_module;
497 
498 #ifdef CONFIG_CRYPTO_STATS
499 	union {
500 		struct crypto_istat_aead aead;
501 		struct crypto_istat_akcipher akcipher;
502 		struct crypto_istat_cipher cipher;
503 		struct crypto_istat_compress compress;
504 		struct crypto_istat_hash hash;
505 		struct crypto_istat_rng rng;
506 		struct crypto_istat_kpp kpp;
507 	} stats;
508 #endif /* CONFIG_CRYPTO_STATS */
509 
510 } CRYPTO_MINALIGN_ATTR;
511 
512 #ifdef CONFIG_CRYPTO_STATS
513 void crypto_stats_init(struct crypto_alg *alg);
514 void crypto_stats_get(struct crypto_alg *alg);
515 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
516 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
517 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
518 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
519 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
520 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
521 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
522 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
523 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
524 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
525 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
526 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
527 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
528 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
529 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
530 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
531 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
532 #else
crypto_stats_init(struct crypto_alg * alg)533 static inline void crypto_stats_init(struct crypto_alg *alg)
534 {}
crypto_stats_get(struct crypto_alg * alg)535 static inline void crypto_stats_get(struct crypto_alg *alg)
536 {}
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)537 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
538 {}
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)539 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
540 {}
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)541 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
542 {}
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)543 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
544 {}
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)545 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
546 {}
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)547 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
548 {}
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)549 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
550 {}
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)551 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
552 {}
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)553 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
554 {}
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)555 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
556 {}
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)557 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
558 {}
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)559 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
560 {}
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)561 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
562 {}
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)563 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
564 {}
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)565 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
566 {}
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)567 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
568 {}
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)569 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
570 {}
571 #endif
572 /*
573  * A helper struct for waiting for completion of async crypto ops
574  */
575 struct crypto_wait {
576 	struct completion completion;
577 	int err;
578 };
579 
580 /*
581  * Macro for declaring a crypto op async wait object on stack
582  */
583 #define DECLARE_CRYPTO_WAIT(_wait) \
584 	struct crypto_wait _wait = { \
585 		COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
586 
587 /*
588  * Async ops completion helper functioons
589  */
crypto_get_completion_data(crypto_completion_data_t * req)590 static inline void *crypto_get_completion_data(crypto_completion_data_t *req)
591 {
592 	return req->data;
593 }
594 
595 void crypto_req_done(struct crypto_async_request *req, int err);
596 
crypto_wait_req(int err,struct crypto_wait * wait)597 static inline int crypto_wait_req(int err, struct crypto_wait *wait)
598 {
599 	switch (err) {
600 	case -EINPROGRESS:
601 	case -EBUSY:
602 		wait_for_completion(&wait->completion);
603 		reinit_completion(&wait->completion);
604 		err = wait->err;
605 		break;
606 	}
607 
608 	return err;
609 }
610 
crypto_init_wait(struct crypto_wait * wait)611 static inline void crypto_init_wait(struct crypto_wait *wait)
612 {
613 	init_completion(&wait->completion);
614 }
615 
616 /*
617  * Algorithm registration interface.
618  */
619 int crypto_register_alg(struct crypto_alg *alg);
620 void crypto_unregister_alg(struct crypto_alg *alg);
621 int crypto_register_algs(struct crypto_alg *algs, int count);
622 void crypto_unregister_algs(struct crypto_alg *algs, int count);
623 
624 /*
625  * Algorithm query interface.
626  */
627 int crypto_has_alg(const char *name, u32 type, u32 mask);
628 
629 /*
630  * Transforms: user-instantiated objects which encapsulate algorithms
631  * and core processing logic.  Managed via crypto_alloc_*() and
632  * crypto_free_*(), as well as the various helpers below.
633  */
634 
635 struct crypto_tfm {
636 
637 	u32 crt_flags;
638 
639 	int node;
640 
641 	void (*exit)(struct crypto_tfm *tfm);
642 
643 	struct crypto_alg *__crt_alg;
644 
645 	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
646 };
647 
648 struct crypto_comp {
649 	struct crypto_tfm base;
650 };
651 
652 /*
653  * Transform user interface.
654  */
655 
656 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
657 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
658 
crypto_free_tfm(struct crypto_tfm * tfm)659 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
660 {
661 	return crypto_destroy_tfm(tfm, tfm);
662 }
663 
664 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
665 
666 /*
667  * Transform helpers which query the underlying algorithm.
668  */
crypto_tfm_alg_name(struct crypto_tfm * tfm)669 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
670 {
671 	return tfm->__crt_alg->cra_name;
672 }
673 
crypto_tfm_alg_driver_name(struct crypto_tfm * tfm)674 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
675 {
676 	return tfm->__crt_alg->cra_driver_name;
677 }
678 
crypto_tfm_alg_priority(struct crypto_tfm * tfm)679 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
680 {
681 	return tfm->__crt_alg->cra_priority;
682 }
683 
crypto_tfm_alg_type(struct crypto_tfm * tfm)684 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
685 {
686 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
687 }
688 
crypto_tfm_alg_blocksize(struct crypto_tfm * tfm)689 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
690 {
691 	return tfm->__crt_alg->cra_blocksize;
692 }
693 
crypto_tfm_alg_alignmask(struct crypto_tfm * tfm)694 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
695 {
696 	return tfm->__crt_alg->cra_alignmask;
697 }
698 
crypto_tfm_get_flags(struct crypto_tfm * tfm)699 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
700 {
701 	return tfm->crt_flags;
702 }
703 
crypto_tfm_set_flags(struct crypto_tfm * tfm,u32 flags)704 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
705 {
706 	tfm->crt_flags |= flags;
707 }
708 
crypto_tfm_clear_flags(struct crypto_tfm * tfm,u32 flags)709 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
710 {
711 	tfm->crt_flags &= ~flags;
712 }
713 
crypto_tfm_ctx(struct crypto_tfm * tfm)714 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
715 {
716 	return tfm->__crt_ctx;
717 }
718 
crypto_tfm_ctx_alignment(void)719 static inline unsigned int crypto_tfm_ctx_alignment(void)
720 {
721 	struct crypto_tfm *tfm;
722 	return __alignof__(tfm->__crt_ctx);
723 }
724 
__crypto_comp_cast(struct crypto_tfm * tfm)725 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
726 {
727 	return (struct crypto_comp *)tfm;
728 }
729 
crypto_alloc_comp(const char * alg_name,u32 type,u32 mask)730 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
731 						    u32 type, u32 mask)
732 {
733 	type &= ~CRYPTO_ALG_TYPE_MASK;
734 	type |= CRYPTO_ALG_TYPE_COMPRESS;
735 	mask |= CRYPTO_ALG_TYPE_MASK;
736 
737 	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
738 }
739 
crypto_comp_tfm(struct crypto_comp * tfm)740 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
741 {
742 	return &tfm->base;
743 }
744 
crypto_free_comp(struct crypto_comp * tfm)745 static inline void crypto_free_comp(struct crypto_comp *tfm)
746 {
747 	crypto_free_tfm(crypto_comp_tfm(tfm));
748 }
749 
crypto_has_comp(const char * alg_name,u32 type,u32 mask)750 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
751 {
752 	type &= ~CRYPTO_ALG_TYPE_MASK;
753 	type |= CRYPTO_ALG_TYPE_COMPRESS;
754 	mask |= CRYPTO_ALG_TYPE_MASK;
755 
756 	return crypto_has_alg(alg_name, type, mask);
757 }
758 
crypto_comp_name(struct crypto_comp * tfm)759 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
760 {
761 	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
762 }
763 
764 int crypto_comp_compress(struct crypto_comp *tfm,
765 			 const u8 *src, unsigned int slen,
766 			 u8 *dst, unsigned int *dlen);
767 
768 int crypto_comp_decompress(struct crypto_comp *tfm,
769 			   const u8 *src, unsigned int slen,
770 			   u8 *dst, unsigned int *dlen);
771 
772 #endif	/* _LINUX_CRYPTO_H */
773 
774