xref: /linux/include/crypto/algapi.h (revision fc4bd01d9ff592f620c499686245c093440db0e8)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 #ifndef _CRYPTO_ALGAPI_H
8 #define _CRYPTO_ALGAPI_H
9 
10 #include <crypto/utils.h>
11 #include <linux/align.h>
12 #include <linux/cache.h>
13 #include <linux/crypto.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16 #include <linux/workqueue.h>
17 
18 /*
19  * Maximum values for blocksize and alignmask, used to allocate
20  * static buffers that are big enough for any combination of
21  * algs and architectures. Ciphers have a lower maximum size.
22  */
23 #define MAX_ALGAPI_BLOCKSIZE		160
24 #define MAX_ALGAPI_ALIGNMASK		127
25 #define MAX_CIPHER_BLOCKSIZE		16
26 #define MAX_CIPHER_ALIGNMASK		15
27 
28 #ifdef ARCH_DMA_MINALIGN
29 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
30 #else
31 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
32 #endif
33 
34 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
35 
36 /*
37  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
38  * arbitrary modules to be loaded. Loading from userspace may still need the
39  * unprefixed names, so retains those aliases as well.
40  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
41  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
42  * expands twice on the same line. Instead, use a separate base name for the
43  * alias.
44  */
45 #define MODULE_ALIAS_CRYPTO(name)	\
46 		__MODULE_INFO(alias, alias_userspace, name);	\
47 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
48 
49 struct crypto_aead;
50 struct crypto_instance;
51 struct module;
52 struct notifier_block;
53 struct rtattr;
54 struct scatterlist;
55 struct seq_file;
56 struct sk_buff;
57 
58 struct crypto_instance {
59 	struct crypto_alg alg;
60 
61 	struct crypto_template *tmpl;
62 
63 	union {
64 		/* Node in list of instances after registration. */
65 		struct hlist_node list;
66 		/* List of attached spawns before registration. */
67 		struct crypto_spawn *spawns;
68 	};
69 
70 	struct work_struct free_work;
71 
72 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
73 };
74 
75 struct crypto_template {
76 	struct list_head list;
77 	struct hlist_head instances;
78 	struct module *module;
79 
80 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
81 
82 	char name[CRYPTO_MAX_ALG_NAME];
83 };
84 
85 struct crypto_spawn {
86 	struct list_head list;
87 	struct crypto_alg *alg;
88 	union {
89 		/* Back pointer to instance after registration.*/
90 		struct crypto_instance *inst;
91 		/* Spawn list pointer prior to registration. */
92 		struct crypto_spawn *next;
93 	};
94 	const struct crypto_type *frontend;
95 	u32 mask;
96 	bool dead;
97 	bool registered;
98 };
99 
100 struct crypto_queue {
101 	struct list_head list;
102 	struct list_head *backlog;
103 
104 	unsigned int qlen;
105 	unsigned int max_qlen;
106 };
107 
108 struct scatter_walk {
109 	struct scatterlist *sg;
110 	unsigned int offset;
111 };
112 
113 struct crypto_attr_alg {
114 	char name[CRYPTO_MAX_ALG_NAME];
115 };
116 
117 struct crypto_attr_type {
118 	u32 type;
119 	u32 mask;
120 };
121 
122 /*
123  * Algorithm registration interface.
124  */
125 int crypto_register_alg(struct crypto_alg *alg);
126 void crypto_unregister_alg(struct crypto_alg *alg);
127 int crypto_register_algs(struct crypto_alg *algs, int count);
128 void crypto_unregister_algs(struct crypto_alg *algs, int count);
129 
130 void crypto_mod_put(struct crypto_alg *alg);
131 
132 int crypto_register_template(struct crypto_template *tmpl);
133 int crypto_register_templates(struct crypto_template *tmpls, int count);
134 void crypto_unregister_template(struct crypto_template *tmpl);
135 void crypto_unregister_templates(struct crypto_template *tmpls, int count);
136 struct crypto_template *crypto_lookup_template(const char *name);
137 
138 int crypto_register_instance(struct crypto_template *tmpl,
139 			     struct crypto_instance *inst);
140 void crypto_unregister_instance(struct crypto_instance *inst);
141 
142 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
143 		      const char *name, u32 type, u32 mask);
144 void crypto_drop_spawn(struct crypto_spawn *spawn);
145 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
146 				    u32 mask);
147 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
148 
149 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
150 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
151 const char *crypto_attr_alg_name(struct rtattr *rta);
152 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
153 			struct crypto_alg *alg);
154 
155 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
156 int crypto_enqueue_request(struct crypto_queue *queue,
157 			   struct crypto_async_request *request);
158 void crypto_enqueue_request_head(struct crypto_queue *queue,
159 				 struct crypto_async_request *request);
160 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
161 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
162 {
163 	return queue->qlen;
164 }
165 
166 void crypto_inc(u8 *a, unsigned int size);
167 
168 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
169 {
170 	return tfm->__crt_ctx;
171 }
172 
173 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
174 					 unsigned int align)
175 {
176 	if (align <= crypto_tfm_ctx_alignment())
177 		align = 1;
178 
179 	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
180 }
181 
182 static inline unsigned int crypto_dma_align(void)
183 {
184 	return CRYPTO_DMA_ALIGN;
185 }
186 
187 static inline unsigned int crypto_dma_padding(void)
188 {
189 	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
190 }
191 
192 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
193 {
194 	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
195 }
196 
197 static inline struct crypto_instance *crypto_tfm_alg_instance(
198 	struct crypto_tfm *tfm)
199 {
200 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
201 }
202 
203 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
204 {
205 	return inst->__ctx;
206 }
207 
208 static inline struct crypto_async_request *crypto_get_backlog(
209 	struct crypto_queue *queue)
210 {
211 	return queue->backlog == &queue->list ? NULL :
212 	       container_of(queue->backlog, struct crypto_async_request, list);
213 }
214 
215 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
216 {
217 	return (algt->type ^ off) & algt->mask & off;
218 }
219 
220 /*
221  * When an algorithm uses another algorithm (e.g., if it's an instance of a
222  * template), these are the flags that should always be set on the "outer"
223  * algorithm if any "inner" algorithm has them set.
224  */
225 #define CRYPTO_ALG_INHERITED_FLAGS	\
226 	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
227 	 CRYPTO_ALG_ALLOCATES_MEMORY)
228 
229 /*
230  * Given the type and mask that specify the flags restrictions on a template
231  * instance being created, return the mask that should be passed to
232  * crypto_grab_*() (along with type=0) to honor any request the user made to
233  * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
234  */
235 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
236 {
237 	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
238 }
239 
240 int crypto_register_notifier(struct notifier_block *nb);
241 int crypto_unregister_notifier(struct notifier_block *nb);
242 
243 /* Crypto notification events. */
244 enum {
245 	CRYPTO_MSG_ALG_REQUEST,
246 	CRYPTO_MSG_ALG_REGISTER,
247 	CRYPTO_MSG_ALG_LOADED,
248 };
249 
250 static inline void crypto_request_complete(struct crypto_async_request *req,
251 					   int err)
252 {
253 	req->complete(req->data, err);
254 }
255 
256 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
257 {
258 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
259 }
260 
261 static inline bool crypto_request_chained(struct crypto_async_request *req)
262 {
263 	return !list_empty(&req->list);
264 }
265 
266 static inline bool crypto_tfm_req_chain(struct crypto_tfm *tfm)
267 {
268 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_CHAIN;
269 }
270 
271 #endif	/* _CRYPTO_ALGAPI_H */
272