xref: /linux/include/crypto/skcipher.h (revision 82f89f1aa6ca33a6c1e50ddb165e2d5b882025e1)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_SKCIPHER_H
9 #define _CRYPTO_SKCIPHER_H
10 
11 #include <linux/atomic.h>
12 #include <linux/container_of.h>
13 #include <linux/crypto.h>
14 #include <linux/slab.h>
15 #include <linux/string.h>
16 #include <linux/types.h>
17 
18 /* Set this bit if the lskcipher operation is a continuation. */
19 #define CRYPTO_LSKCIPHER_FLAG_CONT	0x00000001
20 /* Set this bit if the lskcipher operation is final. */
21 #define CRYPTO_LSKCIPHER_FLAG_FINAL	0x00000002
22 /* The bit CRYPTO_TFM_REQ_MAY_SLEEP can also be set if needed. */
23 
24 /* Set this bit if the skcipher operation is a continuation. */
25 #define CRYPTO_SKCIPHER_REQ_CONT	0x00000001
26 /* Set this bit if the skcipher operation is not final. */
27 #define CRYPTO_SKCIPHER_REQ_NOTFINAL	0x00000002
28 
29 struct scatterlist;
30 
31 /**
32  *	struct skcipher_request - Symmetric key cipher request
33  *	@cryptlen: Number of bytes to encrypt or decrypt
34  *	@iv: Initialisation Vector
35  *	@src: Source SG list
36  *	@dst: Destination SG list
37  *	@base: Underlying async request
38  *	@__ctx: Start of private context data
39  */
40 struct skcipher_request {
41 	unsigned int cryptlen;
42 
43 	u8 *iv;
44 
45 	struct scatterlist *src;
46 	struct scatterlist *dst;
47 
48 	struct crypto_async_request base;
49 
50 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
51 };
52 
53 struct crypto_skcipher {
54 	unsigned int reqsize;
55 
56 	struct crypto_tfm base;
57 };
58 
59 struct crypto_sync_skcipher {
60 	struct crypto_skcipher base;
61 };
62 
63 struct crypto_lskcipher {
64 	struct crypto_tfm base;
65 };
66 
67 /*
68  * struct crypto_istat_cipher - statistics for cipher algorithm
69  * @encrypt_cnt:	number of encrypt requests
70  * @encrypt_tlen:	total data size handled by encrypt requests
71  * @decrypt_cnt:	number of decrypt requests
72  * @decrypt_tlen:	total data size handled by decrypt requests
73  * @err_cnt:		number of error for cipher requests
74  */
75 struct crypto_istat_cipher {
76 	atomic64_t encrypt_cnt;
77 	atomic64_t encrypt_tlen;
78 	atomic64_t decrypt_cnt;
79 	atomic64_t decrypt_tlen;
80 	atomic64_t err_cnt;
81 };
82 
83 #ifdef CONFIG_CRYPTO_STATS
84 #define SKCIPHER_ALG_COMMON_STAT struct crypto_istat_cipher stat;
85 #else
86 #define SKCIPHER_ALG_COMMON_STAT
87 #endif
88 
89 /*
90  * struct skcipher_alg_common - common properties of skcipher_alg
91  * @min_keysize: Minimum key size supported by the transformation. This is the
92  *		 smallest key length supported by this transformation algorithm.
93  *		 This must be set to one of the pre-defined values as this is
94  *		 not hardware specific. Possible values for this field can be
95  *		 found via git grep "_MIN_KEY_SIZE" include/crypto/
96  * @max_keysize: Maximum key size supported by the transformation. This is the
97  *		 largest key length supported by this transformation algorithm.
98  *		 This must be set to one of the pre-defined values as this is
99  *		 not hardware specific. Possible values for this field can be
100  *		 found via git grep "_MAX_KEY_SIZE" include/crypto/
101  * @ivsize: IV size applicable for transformation. The consumer must provide an
102  *	    IV of exactly that size to perform the encrypt or decrypt operation.
103  * @chunksize: Equal to the block size except for stream ciphers such as
104  *	       CTR where it is set to the underlying block size.
105  * @statesize: Size of the internal state for the algorithm.
106  * @stat: Statistics for cipher algorithm
107  * @base: Definition of a generic crypto algorithm.
108  */
109 #define SKCIPHER_ALG_COMMON {		\
110 	unsigned int min_keysize;	\
111 	unsigned int max_keysize;	\
112 	unsigned int ivsize;		\
113 	unsigned int chunksize;		\
114 	unsigned int statesize;		\
115 					\
116 	SKCIPHER_ALG_COMMON_STAT	\
117 					\
118 	struct crypto_alg base;		\
119 }
120 struct skcipher_alg_common SKCIPHER_ALG_COMMON;
121 
122 /**
123  * struct skcipher_alg - symmetric key cipher definition
124  * @min_keysize: Minimum key size supported by the transformation. This is the
125  *		 smallest key length supported by this transformation algorithm.
126  *		 This must be set to one of the pre-defined values as this is
127  *		 not hardware specific. Possible values for this field can be
128  *		 found via git grep "_MIN_KEY_SIZE" include/crypto/
129  * @max_keysize: Maximum key size supported by the transformation. This is the
130  *		 largest key length supported by this transformation algorithm.
131  *		 This must be set to one of the pre-defined values as this is
132  *		 not hardware specific. Possible values for this field can be
133  *		 found via git grep "_MAX_KEY_SIZE" include/crypto/
134  * @setkey: Set key for the transformation. This function is used to either
135  *	    program a supplied key into the hardware or store the key in the
136  *	    transformation context for programming it later. Note that this
137  *	    function does modify the transformation context. This function can
138  *	    be called multiple times during the existence of the transformation
139  *	    object, so one must make sure the key is properly reprogrammed into
140  *	    the hardware. This function is also responsible for checking the key
141  *	    length for validity. In case a software fallback was put in place in
142  *	    the @cra_init call, this function might need to use the fallback if
143  *	    the algorithm doesn't support all of the key sizes.
144  * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
145  *	     the supplied scatterlist containing the blocks of data. The crypto
146  *	     API consumer is responsible for aligning the entries of the
147  *	     scatterlist properly and making sure the chunks are correctly
148  *	     sized. In case a software fallback was put in place in the
149  *	     @cra_init call, this function might need to use the fallback if
150  *	     the algorithm doesn't support all of the key sizes. In case the
151  *	     key was stored in transformation context, the key might need to be
152  *	     re-programmed into the hardware in this function. This function
153  *	     shall not modify the transformation context, as this function may
154  *	     be called in parallel with the same transformation object.
155  * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
156  *	     and the conditions are exactly the same.
157  * @export: Export partial state of the transformation. This function dumps the
158  *	    entire state of the ongoing transformation into a provided block of
159  *	    data so it can be @import 'ed back later on. This is useful in case
160  *	    you want to save partial result of the transformation after
161  *	    processing certain amount of data and reload this partial result
162  *	    multiple times later on for multiple re-use. No data processing
163  *	    happens at this point.
164  * @import: Import partial state of the transformation. This function loads the
165  *	    entire state of the ongoing transformation from a provided block of
166  *	    data so the transformation can continue from this point onward. No
167  *	    data processing happens at this point.
168  * @init: Initialize the cryptographic transformation object. This function
169  *	  is used to initialize the cryptographic transformation object.
170  *	  This function is called only once at the instantiation time, right
171  *	  after the transformation context was allocated. In case the
172  *	  cryptographic hardware has some special requirements which need to
173  *	  be handled by software, this function shall check for the precise
174  *	  requirement of the transformation and put any software fallbacks
175  *	  in place.
176  * @exit: Deinitialize the cryptographic transformation object. This is a
177  *	  counterpart to @init, used to remove various changes set in
178  *	  @init.
179  * @ivsize: IV size applicable for transformation. The consumer must provide an
180  *	    IV of exactly that size to perform the encrypt or decrypt operation.
181  * @chunksize: Equal to the block size except for stream ciphers such as
182  *	       CTR where it is set to the underlying block size.
183  * @walksize: Equal to the chunk size except in cases where the algorithm is
184  * 	      considerably more efficient if it can operate on multiple chunks
185  * 	      in parallel. Should be a multiple of chunksize.
186  * @stat: Statistics for cipher algorithm
187  * @base: Definition of a generic crypto algorithm.
188  * @co: see struct skcipher_alg_common
189  *
190  * All fields except @ivsize are mandatory and must be filled.
191  */
192 struct skcipher_alg {
193 	int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
194 	              unsigned int keylen);
195 	int (*encrypt)(struct skcipher_request *req);
196 	int (*decrypt)(struct skcipher_request *req);
197 	int (*export)(struct skcipher_request *req, void *out);
198 	int (*import)(struct skcipher_request *req, const void *in);
199 	int (*init)(struct crypto_skcipher *tfm);
200 	void (*exit)(struct crypto_skcipher *tfm);
201 
202 	unsigned int walksize;
203 
204 	union {
205 		struct SKCIPHER_ALG_COMMON;
206 		struct skcipher_alg_common co;
207 	};
208 };
209 
210 /**
211  * struct lskcipher_alg - linear symmetric key cipher definition
212  * @setkey: Set key for the transformation. This function is used to either
213  *	    program a supplied key into the hardware or store the key in the
214  *	    transformation context for programming it later. Note that this
215  *	    function does modify the transformation context. This function can
216  *	    be called multiple times during the existence of the transformation
217  *	    object, so one must make sure the key is properly reprogrammed into
218  *	    the hardware. This function is also responsible for checking the key
219  *	    length for validity. In case a software fallback was put in place in
220  *	    the @cra_init call, this function might need to use the fallback if
221  *	    the algorithm doesn't support all of the key sizes.
222  * @encrypt: Encrypt a number of bytes. This function is used to encrypt
223  *	     the supplied data.  This function shall not modify
224  *	     the transformation context, as this function may be called
225  *	     in parallel with the same transformation object.  Data
226  *	     may be left over if length is not a multiple of blocks
227  *	     and there is more to come (final == false).  The number of
228  *	     left-over bytes should be returned in case of success.
229  *	     The siv field shall be as long as ivsize + statesize with
230  *	     the IV placed at the front.  The state will be used by the
231  *	     algorithm internally.
232  * @decrypt: Decrypt a number of bytes. This is a reverse counterpart to
233  *	     @encrypt and the conditions are exactly the same.
234  * @init: Initialize the cryptographic transformation object. This function
235  *	  is used to initialize the cryptographic transformation object.
236  *	  This function is called only once at the instantiation time, right
237  *	  after the transformation context was allocated.
238  * @exit: Deinitialize the cryptographic transformation object. This is a
239  *	  counterpart to @init, used to remove various changes set in
240  *	  @init.
241  * @co: see struct skcipher_alg_common
242  */
243 struct lskcipher_alg {
244 	int (*setkey)(struct crypto_lskcipher *tfm, const u8 *key,
245 	              unsigned int keylen);
246 	int (*encrypt)(struct crypto_lskcipher *tfm, const u8 *src,
247 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
248 	int (*decrypt)(struct crypto_lskcipher *tfm, const u8 *src,
249 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
250 	int (*init)(struct crypto_lskcipher *tfm);
251 	void (*exit)(struct crypto_lskcipher *tfm);
252 
253 	struct skcipher_alg_common co;
254 };
255 
256 #define MAX_SYNC_SKCIPHER_REQSIZE      384
257 /*
258  * This performs a type-check against the "tfm" argument to make sure
259  * all users have the correct skcipher tfm for doing on-stack requests.
260  */
261 #define SYNC_SKCIPHER_REQUEST_ON_STACK(name, tfm) \
262 	char __##name##_desc[sizeof(struct skcipher_request) + \
263 			     MAX_SYNC_SKCIPHER_REQSIZE + \
264 			     (!(sizeof((struct crypto_sync_skcipher *)1 == \
265 				       (typeof(tfm))1))) \
266 			    ] CRYPTO_MINALIGN_ATTR; \
267 	struct skcipher_request *name = (void *)__##name##_desc
268 
269 /**
270  * DOC: Symmetric Key Cipher API
271  *
272  * Symmetric key cipher API is used with the ciphers of type
273  * CRYPTO_ALG_TYPE_SKCIPHER (listed as type "skcipher" in /proc/crypto).
274  *
275  * Asynchronous cipher operations imply that the function invocation for a
276  * cipher request returns immediately before the completion of the operation.
277  * The cipher request is scheduled as a separate kernel thread and therefore
278  * load-balanced on the different CPUs via the process scheduler. To allow
279  * the kernel crypto API to inform the caller about the completion of a cipher
280  * request, the caller must provide a callback function. That function is
281  * invoked with the cipher handle when the request completes.
282  *
283  * To support the asynchronous operation, additional information than just the
284  * cipher handle must be supplied to the kernel crypto API. That additional
285  * information is given by filling in the skcipher_request data structure.
286  *
287  * For the symmetric key cipher API, the state is maintained with the tfm
288  * cipher handle. A single tfm can be used across multiple calls and in
289  * parallel. For asynchronous block cipher calls, context data supplied and
290  * only used by the caller can be referenced the request data structure in
291  * addition to the IV used for the cipher request. The maintenance of such
292  * state information would be important for a crypto driver implementer to
293  * have, because when calling the callback function upon completion of the
294  * cipher operation, that callback function may need some information about
295  * which operation just finished if it invoked multiple in parallel. This
296  * state information is unused by the kernel crypto API.
297  */
298 
299 static inline struct crypto_skcipher *__crypto_skcipher_cast(
300 	struct crypto_tfm *tfm)
301 {
302 	return container_of(tfm, struct crypto_skcipher, base);
303 }
304 
305 /**
306  * crypto_alloc_skcipher() - allocate symmetric key cipher handle
307  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
308  *	      skcipher cipher
309  * @type: specifies the type of the cipher
310  * @mask: specifies the mask for the cipher
311  *
312  * Allocate a cipher handle for an skcipher. The returned struct
313  * crypto_skcipher is the cipher handle that is required for any subsequent
314  * API invocation for that skcipher.
315  *
316  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
317  *	   of an error, PTR_ERR() returns the error code.
318  */
319 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
320 					      u32 type, u32 mask);
321 
322 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(const char *alg_name,
323 					      u32 type, u32 mask);
324 
325 
326 /**
327  * crypto_alloc_lskcipher() - allocate linear symmetric key cipher handle
328  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
329  *	      lskcipher
330  * @type: specifies the type of the cipher
331  * @mask: specifies the mask for the cipher
332  *
333  * Allocate a cipher handle for an lskcipher. The returned struct
334  * crypto_lskcipher is the cipher handle that is required for any subsequent
335  * API invocation for that lskcipher.
336  *
337  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
338  *	   of an error, PTR_ERR() returns the error code.
339  */
340 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name,
341 						u32 type, u32 mask);
342 
343 static inline struct crypto_tfm *crypto_skcipher_tfm(
344 	struct crypto_skcipher *tfm)
345 {
346 	return &tfm->base;
347 }
348 
349 static inline struct crypto_tfm *crypto_lskcipher_tfm(
350 	struct crypto_lskcipher *tfm)
351 {
352 	return &tfm->base;
353 }
354 
355 /**
356  * crypto_free_skcipher() - zeroize and free cipher handle
357  * @tfm: cipher handle to be freed
358  *
359  * If @tfm is a NULL or error pointer, this function does nothing.
360  */
361 static inline void crypto_free_skcipher(struct crypto_skcipher *tfm)
362 {
363 	crypto_destroy_tfm(tfm, crypto_skcipher_tfm(tfm));
364 }
365 
366 static inline void crypto_free_sync_skcipher(struct crypto_sync_skcipher *tfm)
367 {
368 	crypto_free_skcipher(&tfm->base);
369 }
370 
371 /**
372  * crypto_free_lskcipher() - zeroize and free cipher handle
373  * @tfm: cipher handle to be freed
374  *
375  * If @tfm is a NULL or error pointer, this function does nothing.
376  */
377 static inline void crypto_free_lskcipher(struct crypto_lskcipher *tfm)
378 {
379 	crypto_destroy_tfm(tfm, crypto_lskcipher_tfm(tfm));
380 }
381 
382 /**
383  * crypto_has_skcipher() - Search for the availability of an skcipher.
384  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
385  *	      skcipher
386  * @type: specifies the type of the skcipher
387  * @mask: specifies the mask for the skcipher
388  *
389  * Return: true when the skcipher is known to the kernel crypto API; false
390  *	   otherwise
391  */
392 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask);
393 
394 static inline const char *crypto_skcipher_driver_name(
395 	struct crypto_skcipher *tfm)
396 {
397 	return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
398 }
399 
400 static inline const char *crypto_lskcipher_driver_name(
401 	struct crypto_lskcipher *tfm)
402 {
403 	return crypto_tfm_alg_driver_name(crypto_lskcipher_tfm(tfm));
404 }
405 
406 static inline struct skcipher_alg_common *crypto_skcipher_alg_common(
407 	struct crypto_skcipher *tfm)
408 {
409 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
410 			    struct skcipher_alg_common, base);
411 }
412 
413 static inline struct skcipher_alg *crypto_skcipher_alg(
414 	struct crypto_skcipher *tfm)
415 {
416 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
417 			    struct skcipher_alg, base);
418 }
419 
420 static inline struct lskcipher_alg *crypto_lskcipher_alg(
421 	struct crypto_lskcipher *tfm)
422 {
423 	return container_of(crypto_lskcipher_tfm(tfm)->__crt_alg,
424 			    struct lskcipher_alg, co.base);
425 }
426 
427 /**
428  * crypto_skcipher_ivsize() - obtain IV size
429  * @tfm: cipher handle
430  *
431  * The size of the IV for the skcipher referenced by the cipher handle is
432  * returned. This IV size may be zero if the cipher does not need an IV.
433  *
434  * Return: IV size in bytes
435  */
436 static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm)
437 {
438 	return crypto_skcipher_alg_common(tfm)->ivsize;
439 }
440 
441 static inline unsigned int crypto_sync_skcipher_ivsize(
442 	struct crypto_sync_skcipher *tfm)
443 {
444 	return crypto_skcipher_ivsize(&tfm->base);
445 }
446 
447 /**
448  * crypto_lskcipher_ivsize() - obtain IV size
449  * @tfm: cipher handle
450  *
451  * The size of the IV for the lskcipher referenced by the cipher handle is
452  * returned. This IV size may be zero if the cipher does not need an IV.
453  *
454  * Return: IV size in bytes
455  */
456 static inline unsigned int crypto_lskcipher_ivsize(
457 	struct crypto_lskcipher *tfm)
458 {
459 	return crypto_lskcipher_alg(tfm)->co.ivsize;
460 }
461 
462 /**
463  * crypto_skcipher_blocksize() - obtain block size of cipher
464  * @tfm: cipher handle
465  *
466  * The block size for the skcipher referenced with the cipher handle is
467  * returned. The caller may use that information to allocate appropriate
468  * memory for the data returned by the encryption or decryption operation
469  *
470  * Return: block size of cipher
471  */
472 static inline unsigned int crypto_skcipher_blocksize(
473 	struct crypto_skcipher *tfm)
474 {
475 	return crypto_tfm_alg_blocksize(crypto_skcipher_tfm(tfm));
476 }
477 
478 /**
479  * crypto_lskcipher_blocksize() - obtain block size of cipher
480  * @tfm: cipher handle
481  *
482  * The block size for the lskcipher referenced with the cipher handle is
483  * returned. The caller may use that information to allocate appropriate
484  * memory for the data returned by the encryption or decryption operation
485  *
486  * Return: block size of cipher
487  */
488 static inline unsigned int crypto_lskcipher_blocksize(
489 	struct crypto_lskcipher *tfm)
490 {
491 	return crypto_tfm_alg_blocksize(crypto_lskcipher_tfm(tfm));
492 }
493 
494 /**
495  * crypto_skcipher_chunksize() - obtain chunk size
496  * @tfm: cipher handle
497  *
498  * The block size is set to one for ciphers such as CTR.  However,
499  * you still need to provide incremental updates in multiples of
500  * the underlying block size as the IV does not have sub-block
501  * granularity.  This is known in this API as the chunk size.
502  *
503  * Return: chunk size in bytes
504  */
505 static inline unsigned int crypto_skcipher_chunksize(
506 	struct crypto_skcipher *tfm)
507 {
508 	return crypto_skcipher_alg_common(tfm)->chunksize;
509 }
510 
511 /**
512  * crypto_lskcipher_chunksize() - obtain chunk size
513  * @tfm: cipher handle
514  *
515  * The block size is set to one for ciphers such as CTR.  However,
516  * you still need to provide incremental updates in multiples of
517  * the underlying block size as the IV does not have sub-block
518  * granularity.  This is known in this API as the chunk size.
519  *
520  * Return: chunk size in bytes
521  */
522 static inline unsigned int crypto_lskcipher_chunksize(
523 	struct crypto_lskcipher *tfm)
524 {
525 	return crypto_lskcipher_alg(tfm)->co.chunksize;
526 }
527 
528 /**
529  * crypto_skcipher_statesize() - obtain state size
530  * @tfm: cipher handle
531  *
532  * Some algorithms cannot be chained with the IV alone.  They carry
533  * internal state which must be replicated if data is to be processed
534  * incrementally.  The size of that state can be obtained with this
535  * function.
536  *
537  * Return: state size in bytes
538  */
539 static inline unsigned int crypto_skcipher_statesize(
540 	struct crypto_skcipher *tfm)
541 {
542 	return crypto_skcipher_alg_common(tfm)->statesize;
543 }
544 
545 /**
546  * crypto_lskcipher_statesize() - obtain state size
547  * @tfm: cipher handle
548  *
549  * Some algorithms cannot be chained with the IV alone.  They carry
550  * internal state which must be replicated if data is to be processed
551  * incrementally.  The size of that state can be obtained with this
552  * function.
553  *
554  * Return: state size in bytes
555  */
556 static inline unsigned int crypto_lskcipher_statesize(
557 	struct crypto_lskcipher *tfm)
558 {
559 	return crypto_lskcipher_alg(tfm)->co.statesize;
560 }
561 
562 static inline unsigned int crypto_sync_skcipher_blocksize(
563 	struct crypto_sync_skcipher *tfm)
564 {
565 	return crypto_skcipher_blocksize(&tfm->base);
566 }
567 
568 static inline unsigned int crypto_skcipher_alignmask(
569 	struct crypto_skcipher *tfm)
570 {
571 	return crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm));
572 }
573 
574 static inline unsigned int crypto_lskcipher_alignmask(
575 	struct crypto_lskcipher *tfm)
576 {
577 	return crypto_tfm_alg_alignmask(crypto_lskcipher_tfm(tfm));
578 }
579 
580 static inline u32 crypto_skcipher_get_flags(struct crypto_skcipher *tfm)
581 {
582 	return crypto_tfm_get_flags(crypto_skcipher_tfm(tfm));
583 }
584 
585 static inline void crypto_skcipher_set_flags(struct crypto_skcipher *tfm,
586 					       u32 flags)
587 {
588 	crypto_tfm_set_flags(crypto_skcipher_tfm(tfm), flags);
589 }
590 
591 static inline void crypto_skcipher_clear_flags(struct crypto_skcipher *tfm,
592 						 u32 flags)
593 {
594 	crypto_tfm_clear_flags(crypto_skcipher_tfm(tfm), flags);
595 }
596 
597 static inline u32 crypto_sync_skcipher_get_flags(
598 	struct crypto_sync_skcipher *tfm)
599 {
600 	return crypto_skcipher_get_flags(&tfm->base);
601 }
602 
603 static inline void crypto_sync_skcipher_set_flags(
604 	struct crypto_sync_skcipher *tfm, u32 flags)
605 {
606 	crypto_skcipher_set_flags(&tfm->base, flags);
607 }
608 
609 static inline void crypto_sync_skcipher_clear_flags(
610 	struct crypto_sync_skcipher *tfm, u32 flags)
611 {
612 	crypto_skcipher_clear_flags(&tfm->base, flags);
613 }
614 
615 static inline u32 crypto_lskcipher_get_flags(struct crypto_lskcipher *tfm)
616 {
617 	return crypto_tfm_get_flags(crypto_lskcipher_tfm(tfm));
618 }
619 
620 static inline void crypto_lskcipher_set_flags(struct crypto_lskcipher *tfm,
621 					       u32 flags)
622 {
623 	crypto_tfm_set_flags(crypto_lskcipher_tfm(tfm), flags);
624 }
625 
626 static inline void crypto_lskcipher_clear_flags(struct crypto_lskcipher *tfm,
627 						 u32 flags)
628 {
629 	crypto_tfm_clear_flags(crypto_lskcipher_tfm(tfm), flags);
630 }
631 
632 /**
633  * crypto_skcipher_setkey() - set key for cipher
634  * @tfm: cipher handle
635  * @key: buffer holding the key
636  * @keylen: length of the key in bytes
637  *
638  * The caller provided key is set for the skcipher referenced by the cipher
639  * handle.
640  *
641  * Note, the key length determines the cipher type. Many block ciphers implement
642  * different cipher modes depending on the key size, such as AES-128 vs AES-192
643  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
644  * is performed.
645  *
646  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
647  */
648 int crypto_skcipher_setkey(struct crypto_skcipher *tfm,
649 			   const u8 *key, unsigned int keylen);
650 
651 static inline int crypto_sync_skcipher_setkey(struct crypto_sync_skcipher *tfm,
652 					 const u8 *key, unsigned int keylen)
653 {
654 	return crypto_skcipher_setkey(&tfm->base, key, keylen);
655 }
656 
657 /**
658  * crypto_lskcipher_setkey() - set key for cipher
659  * @tfm: cipher handle
660  * @key: buffer holding the key
661  * @keylen: length of the key in bytes
662  *
663  * The caller provided key is set for the lskcipher referenced by the cipher
664  * handle.
665  *
666  * Note, the key length determines the cipher type. Many block ciphers implement
667  * different cipher modes depending on the key size, such as AES-128 vs AES-192
668  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
669  * is performed.
670  *
671  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
672  */
673 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm,
674 			    const u8 *key, unsigned int keylen);
675 
676 static inline unsigned int crypto_skcipher_min_keysize(
677 	struct crypto_skcipher *tfm)
678 {
679 	return crypto_skcipher_alg_common(tfm)->min_keysize;
680 }
681 
682 static inline unsigned int crypto_skcipher_max_keysize(
683 	struct crypto_skcipher *tfm)
684 {
685 	return crypto_skcipher_alg_common(tfm)->max_keysize;
686 }
687 
688 static inline unsigned int crypto_lskcipher_min_keysize(
689 	struct crypto_lskcipher *tfm)
690 {
691 	return crypto_lskcipher_alg(tfm)->co.min_keysize;
692 }
693 
694 static inline unsigned int crypto_lskcipher_max_keysize(
695 	struct crypto_lskcipher *tfm)
696 {
697 	return crypto_lskcipher_alg(tfm)->co.max_keysize;
698 }
699 
700 /**
701  * crypto_skcipher_reqtfm() - obtain cipher handle from request
702  * @req: skcipher_request out of which the cipher handle is to be obtained
703  *
704  * Return the crypto_skcipher handle when furnishing an skcipher_request
705  * data structure.
706  *
707  * Return: crypto_skcipher handle
708  */
709 static inline struct crypto_skcipher *crypto_skcipher_reqtfm(
710 	struct skcipher_request *req)
711 {
712 	return __crypto_skcipher_cast(req->base.tfm);
713 }
714 
715 static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm(
716 	struct skcipher_request *req)
717 {
718 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
719 
720 	return container_of(tfm, struct crypto_sync_skcipher, base);
721 }
722 
723 /**
724  * crypto_skcipher_encrypt() - encrypt plaintext
725  * @req: reference to the skcipher_request handle that holds all information
726  *	 needed to perform the cipher operation
727  *
728  * Encrypt plaintext data using the skcipher_request handle. That data
729  * structure and how it is filled with data is discussed with the
730  * skcipher_request_* functions.
731  *
732  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
733  */
734 int crypto_skcipher_encrypt(struct skcipher_request *req);
735 
736 /**
737  * crypto_skcipher_decrypt() - decrypt ciphertext
738  * @req: reference to the skcipher_request handle that holds all information
739  *	 needed to perform the cipher operation
740  *
741  * Decrypt ciphertext data using the skcipher_request handle. That data
742  * structure and how it is filled with data is discussed with the
743  * skcipher_request_* functions.
744  *
745  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
746  */
747 int crypto_skcipher_decrypt(struct skcipher_request *req);
748 
749 /**
750  * crypto_skcipher_export() - export partial state
751  * @req: reference to the skcipher_request handle that holds all information
752  *	 needed to perform the operation
753  * @out: output buffer of sufficient size that can hold the state
754  *
755  * Export partial state of the transformation. This function dumps the
756  * entire state of the ongoing transformation into a provided block of
757  * data so it can be @import 'ed back later on. This is useful in case
758  * you want to save partial result of the transformation after
759  * processing certain amount of data and reload this partial result
760  * multiple times later on for multiple re-use. No data processing
761  * happens at this point.
762  *
763  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
764  */
765 int crypto_skcipher_export(struct skcipher_request *req, void *out);
766 
767 /**
768  * crypto_skcipher_import() - import partial state
769  * @req: reference to the skcipher_request handle that holds all information
770  *	 needed to perform the operation
771  * @in: buffer holding the state
772  *
773  * Import partial state of the transformation. This function loads the
774  * entire state of the ongoing transformation from a provided block of
775  * data so the transformation can continue from this point onward. No
776  * data processing happens at this point.
777  *
778  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
779  */
780 int crypto_skcipher_import(struct skcipher_request *req, const void *in);
781 
782 /**
783  * crypto_lskcipher_encrypt() - encrypt plaintext
784  * @tfm: lskcipher handle
785  * @src: source buffer
786  * @dst: destination buffer
787  * @len: number of bytes to process
788  * @siv: IV + state for the cipher operation.  The length of the IV must
789  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
790  *	 IV is then followed with a buffer with the length as specified by
791  *	 crypto_lskcipher_statesize.
792  * Encrypt plaintext data using the lskcipher handle.
793  *
794  * Return: >=0 if the cipher operation was successful, if positive
795  *	   then this many bytes have been left unprocessed;
796  *	   < 0 if an error occurred
797  */
798 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
799 			     u8 *dst, unsigned len, u8 *siv);
800 
801 /**
802  * crypto_lskcipher_decrypt() - decrypt ciphertext
803  * @tfm: lskcipher handle
804  * @src: source buffer
805  * @dst: destination buffer
806  * @len: number of bytes to process
807  * @siv: IV + state for the cipher operation.  The length of the IV must
808  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
809  *	 IV is then followed with a buffer with the length as specified by
810  *	 crypto_lskcipher_statesize.
811  *
812  * Decrypt ciphertext data using the lskcipher handle.
813  *
814  * Return: >=0 if the cipher operation was successful, if positive
815  *	   then this many bytes have been left unprocessed;
816  *	   < 0 if an error occurred
817  */
818 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
819 			     u8 *dst, unsigned len, u8 *siv);
820 
821 /**
822  * DOC: Symmetric Key Cipher Request Handle
823  *
824  * The skcipher_request data structure contains all pointers to data
825  * required for the symmetric key cipher operation. This includes the cipher
826  * handle (which can be used by multiple skcipher_request instances), pointer
827  * to plaintext and ciphertext, asynchronous callback function, etc. It acts
828  * as a handle to the skcipher_request_* API calls in a similar way as
829  * skcipher handle to the crypto_skcipher_* API calls.
830  */
831 
832 /**
833  * crypto_skcipher_reqsize() - obtain size of the request data structure
834  * @tfm: cipher handle
835  *
836  * Return: number of bytes
837  */
838 static inline unsigned int crypto_skcipher_reqsize(struct crypto_skcipher *tfm)
839 {
840 	return tfm->reqsize;
841 }
842 
843 /**
844  * skcipher_request_set_tfm() - update cipher handle reference in request
845  * @req: request handle to be modified
846  * @tfm: cipher handle that shall be added to the request handle
847  *
848  * Allow the caller to replace the existing skcipher handle in the request
849  * data structure with a different one.
850  */
851 static inline void skcipher_request_set_tfm(struct skcipher_request *req,
852 					    struct crypto_skcipher *tfm)
853 {
854 	req->base.tfm = crypto_skcipher_tfm(tfm);
855 }
856 
857 static inline void skcipher_request_set_sync_tfm(struct skcipher_request *req,
858 					    struct crypto_sync_skcipher *tfm)
859 {
860 	skcipher_request_set_tfm(req, &tfm->base);
861 }
862 
863 static inline struct skcipher_request *skcipher_request_cast(
864 	struct crypto_async_request *req)
865 {
866 	return container_of(req, struct skcipher_request, base);
867 }
868 
869 /**
870  * skcipher_request_alloc() - allocate request data structure
871  * @tfm: cipher handle to be registered with the request
872  * @gfp: memory allocation flag that is handed to kmalloc by the API call.
873  *
874  * Allocate the request data structure that must be used with the skcipher
875  * encrypt and decrypt API calls. During the allocation, the provided skcipher
876  * handle is registered in the request data structure.
877  *
878  * Return: allocated request handle in case of success, or NULL if out of memory
879  */
880 static inline struct skcipher_request *skcipher_request_alloc(
881 	struct crypto_skcipher *tfm, gfp_t gfp)
882 {
883 	struct skcipher_request *req;
884 
885 	req = kmalloc(sizeof(struct skcipher_request) +
886 		      crypto_skcipher_reqsize(tfm), gfp);
887 
888 	if (likely(req))
889 		skcipher_request_set_tfm(req, tfm);
890 
891 	return req;
892 }
893 
894 /**
895  * skcipher_request_free() - zeroize and free request data structure
896  * @req: request data structure cipher handle to be freed
897  */
898 static inline void skcipher_request_free(struct skcipher_request *req)
899 {
900 	kfree_sensitive(req);
901 }
902 
903 static inline void skcipher_request_zero(struct skcipher_request *req)
904 {
905 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
906 
907 	memzero_explicit(req, sizeof(*req) + crypto_skcipher_reqsize(tfm));
908 }
909 
910 /**
911  * skcipher_request_set_callback() - set asynchronous callback function
912  * @req: request handle
913  * @flags: specify zero or an ORing of the flags
914  *	   CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
915  *	   increase the wait queue beyond the initial maximum size;
916  *	   CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
917  * @compl: callback function pointer to be registered with the request handle
918  * @data: The data pointer refers to memory that is not used by the kernel
919  *	  crypto API, but provided to the callback function for it to use. Here,
920  *	  the caller can provide a reference to memory the callback function can
921  *	  operate on. As the callback function is invoked asynchronously to the
922  *	  related functionality, it may need to access data structures of the
923  *	  related functionality which can be referenced using this pointer. The
924  *	  callback function can access the memory via the "data" field in the
925  *	  crypto_async_request data structure provided to the callback function.
926  *
927  * This function allows setting the callback function that is triggered once the
928  * cipher operation completes.
929  *
930  * The callback function is registered with the skcipher_request handle and
931  * must comply with the following template::
932  *
933  *	void callback_function(struct crypto_async_request *req, int error)
934  */
935 static inline void skcipher_request_set_callback(struct skcipher_request *req,
936 						 u32 flags,
937 						 crypto_completion_t compl,
938 						 void *data)
939 {
940 	req->base.complete = compl;
941 	req->base.data = data;
942 	req->base.flags = flags;
943 }
944 
945 /**
946  * skcipher_request_set_crypt() - set data buffers
947  * @req: request handle
948  * @src: source scatter / gather list
949  * @dst: destination scatter / gather list
950  * @cryptlen: number of bytes to process from @src
951  * @iv: IV for the cipher operation which must comply with the IV size defined
952  *      by crypto_skcipher_ivsize
953  *
954  * This function allows setting of the source data and destination data
955  * scatter / gather lists.
956  *
957  * For encryption, the source is treated as the plaintext and the
958  * destination is the ciphertext. For a decryption operation, the use is
959  * reversed - the source is the ciphertext and the destination is the plaintext.
960  */
961 static inline void skcipher_request_set_crypt(
962 	struct skcipher_request *req,
963 	struct scatterlist *src, struct scatterlist *dst,
964 	unsigned int cryptlen, void *iv)
965 {
966 	req->src = src;
967 	req->dst = dst;
968 	req->cryptlen = cryptlen;
969 	req->iv = iv;
970 }
971 
972 #endif	/* _CRYPTO_SKCIPHER_H */
973 
974