xref: /linux/include/crypto/cryptd.h (revision 4ab5a5d2a4a2289c2af07accbec7170ca5671f41)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Software async crypto daemon
4  *
5  * Added AEAD support to cryptd.
6  *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7  *             Adrian Hoban <adrian.hoban@intel.com>
8  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
9  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
10  *    Copyright (c) 2010, Intel Corporation.
11  */
12 
13 #ifndef _CRYPTO_CRYPT_H
14 #define _CRYPTO_CRYPT_H
15 
16 #include <linux/kernel.h>
17 #include <crypto/aead.h>
18 #include <crypto/hash.h>
19 #include <crypto/skcipher.h>
20 
21 struct cryptd_ablkcipher {
22 	struct crypto_ablkcipher base;
23 };
24 
25 static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
26 	struct crypto_ablkcipher *tfm)
27 {
28 	return (struct cryptd_ablkcipher *)tfm;
29 }
30 
31 /* alg_name should be algorithm to be cryptd-ed */
32 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
33 						  u32 type, u32 mask);
34 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
35 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
36 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
37 
38 struct cryptd_skcipher {
39 	struct crypto_skcipher base;
40 };
41 
42 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
43 					      u32 type, u32 mask);
44 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
45 /* Must be called without moving CPUs. */
46 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
47 void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
48 
49 struct cryptd_ahash {
50 	struct crypto_ahash base;
51 };
52 
53 static inline struct cryptd_ahash *__cryptd_ahash_cast(
54 	struct crypto_ahash *tfm)
55 {
56 	return (struct cryptd_ahash *)tfm;
57 }
58 
59 /* alg_name should be algorithm to be cryptd-ed */
60 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
61 					u32 type, u32 mask);
62 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
63 struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
64 /* Must be called without moving CPUs. */
65 bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
66 void cryptd_free_ahash(struct cryptd_ahash *tfm);
67 
68 struct cryptd_aead {
69 	struct crypto_aead base;
70 };
71 
72 static inline struct cryptd_aead *__cryptd_aead_cast(
73 	struct crypto_aead *tfm)
74 {
75 	return (struct cryptd_aead *)tfm;
76 }
77 
78 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
79 					  u32 type, u32 mask);
80 
81 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
82 /* Must be called without moving CPUs. */
83 bool cryptd_aead_queued(struct cryptd_aead *tfm);
84 
85 void cryptd_free_aead(struct cryptd_aead *tfm);
86 
87 #endif
88