1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7 #ifndef _CRYPTO_ALGAPI_H
8 #define _CRYPTO_ALGAPI_H
9
10 #include <crypto/utils.h>
11 #include <linux/align.h>
12 #include <linux/cache.h>
13 #include <linux/crypto.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16 #include <linux/workqueue.h>
17
18 /*
19 * Maximum values for blocksize and alignmask, used to allocate
20 * static buffers that are big enough for any combination of
21 * algs and architectures. Ciphers have a lower maximum size.
22 */
23 #define MAX_ALGAPI_BLOCKSIZE 160
24 #define MAX_ALGAPI_ALIGNMASK 127
25 #define MAX_CIPHER_BLOCKSIZE 16
26 #define MAX_CIPHER_ALIGNMASK 15
27
28 #ifdef ARCH_DMA_MINALIGN
29 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
30 #else
31 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
32 #endif
33
34 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
35
36 /*
37 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
38 * arbitrary modules to be loaded. Loading from userspace may still need the
39 * unprefixed names, so retains those aliases as well.
40 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
41 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
42 * expands twice on the same line. Instead, use a separate base name for the
43 * alias.
44 */
45 #define MODULE_ALIAS_CRYPTO(name) \
46 MODULE_INFO(alias, name); \
47 MODULE_INFO(alias, "crypto-" name)
48
49 struct crypto_aead;
50 struct crypto_instance;
51 struct module;
52 struct notifier_block;
53 struct rtattr;
54 struct scatterlist;
55 struct seq_file;
56 struct sk_buff;
57 union crypto_no_such_thing;
58
59 struct crypto_instance {
60 struct crypto_alg alg;
61
62 struct crypto_template *tmpl;
63
64 union {
65 /* Node in list of instances after registration. */
66 struct hlist_node list;
67 /* List of attached spawns before registration. */
68 struct crypto_spawn *spawns;
69 };
70
71 void *__ctx[] CRYPTO_MINALIGN_ATTR;
72 };
73
74 struct crypto_template {
75 struct list_head list;
76 struct hlist_head instances;
77 struct hlist_head dead;
78 struct module *module;
79
80 struct work_struct free_work;
81
82 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
83
84 char name[CRYPTO_MAX_ALG_NAME];
85 };
86
87 struct crypto_spawn {
88 struct list_head list;
89 struct crypto_alg *alg;
90 union {
91 /* Back pointer to instance after registration.*/
92 struct crypto_instance *inst;
93 /* Spawn list pointer prior to registration. */
94 struct crypto_spawn *next;
95 };
96 const struct crypto_type *frontend;
97 u32 mask;
98 bool dead;
99 bool registered;
100 };
101
102 struct crypto_queue {
103 struct list_head list;
104 struct list_head *backlog;
105
106 unsigned int qlen;
107 unsigned int max_qlen;
108 };
109
110 struct scatter_walk {
111 /* Must be the first member, see struct skcipher_walk. */
112 union {
113 void *const addr;
114
115 /* Private API field, do not touch. */
116 union crypto_no_such_thing *__addr;
117 };
118 struct scatterlist *sg;
119 unsigned int offset;
120 };
121
122 struct crypto_attr_alg {
123 char name[CRYPTO_MAX_ALG_NAME];
124 };
125
126 struct crypto_attr_type {
127 u32 type;
128 u32 mask;
129 };
130
131 /*
132 * Algorithm registration interface.
133 */
134 int crypto_register_alg(struct crypto_alg *alg);
135 void crypto_unregister_alg(struct crypto_alg *alg);
136 int crypto_register_algs(struct crypto_alg *algs, int count);
137 void crypto_unregister_algs(struct crypto_alg *algs, int count);
138
139 void crypto_mod_put(struct crypto_alg *alg);
140
141 int crypto_register_template(struct crypto_template *tmpl);
142 int crypto_register_templates(struct crypto_template *tmpls, int count);
143 void crypto_unregister_template(struct crypto_template *tmpl);
144 void crypto_unregister_templates(struct crypto_template *tmpls, int count);
145 struct crypto_template *crypto_lookup_template(const char *name);
146
147 int crypto_register_instance(struct crypto_template *tmpl,
148 struct crypto_instance *inst);
149 void crypto_unregister_instance(struct crypto_instance *inst);
150
151 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
152 const char *name, u32 type, u32 mask);
153 void crypto_drop_spawn(struct crypto_spawn *spawn);
154 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
155 u32 mask);
156 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
157
158 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
159 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
160 const char *crypto_attr_alg_name(struct rtattr *rta);
161 int __crypto_inst_setname(struct crypto_instance *inst, const char *name,
162 const char *driver, struct crypto_alg *alg);
163
164 #define crypto_inst_setname(inst, name, ...) \
165 CONCATENATE(crypto_inst_setname_, COUNT_ARGS(__VA_ARGS__))( \
166 inst, name, ##__VA_ARGS__)
167 #define crypto_inst_setname_1(inst, name, alg) \
168 __crypto_inst_setname(inst, name, name, alg)
169 #define crypto_inst_setname_2(inst, name, driver, alg) \
170 __crypto_inst_setname(inst, name, driver, alg)
171
172 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
173 int crypto_enqueue_request(struct crypto_queue *queue,
174 struct crypto_async_request *request);
175 void crypto_enqueue_request_head(struct crypto_queue *queue,
176 struct crypto_async_request *request);
177 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
crypto_queue_len(struct crypto_queue * queue)178 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
179 {
180 return queue->qlen;
181 }
182
183 void crypto_inc(u8 *a, unsigned int size);
184
crypto_tfm_ctx(struct crypto_tfm * tfm)185 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
186 {
187 return tfm->__crt_ctx;
188 }
189
crypto_tfm_ctx_align(struct crypto_tfm * tfm,unsigned int align)190 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
191 unsigned int align)
192 {
193 if (align <= crypto_tfm_ctx_alignment())
194 align = 1;
195
196 return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
197 }
198
crypto_dma_align(void)199 static inline unsigned int crypto_dma_align(void)
200 {
201 return CRYPTO_DMA_ALIGN;
202 }
203
crypto_dma_padding(void)204 static inline unsigned int crypto_dma_padding(void)
205 {
206 return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
207 }
208
crypto_tfm_ctx_dma(struct crypto_tfm * tfm)209 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
210 {
211 return crypto_tfm_ctx_align(tfm, crypto_dma_align());
212 }
213
crypto_tfm_alg_instance(struct crypto_tfm * tfm)214 static inline struct crypto_instance *crypto_tfm_alg_instance(
215 struct crypto_tfm *tfm)
216 {
217 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
218 }
219
crypto_instance_ctx(struct crypto_instance * inst)220 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
221 {
222 return inst->__ctx;
223 }
224
crypto_get_backlog(struct crypto_queue * queue)225 static inline struct crypto_async_request *crypto_get_backlog(
226 struct crypto_queue *queue)
227 {
228 return queue->backlog == &queue->list ? NULL :
229 container_of(queue->backlog, struct crypto_async_request, list);
230 }
231
crypto_requires_off(struct crypto_attr_type * algt,u32 off)232 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
233 {
234 return (algt->type ^ off) & algt->mask & off;
235 }
236
237 /*
238 * When an algorithm uses another algorithm (e.g., if it's an instance of a
239 * template), these are the flags that should always be set on the "outer"
240 * algorithm if any "inner" algorithm has them set.
241 */
242 #define CRYPTO_ALG_INHERITED_FLAGS \
243 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
244 CRYPTO_ALG_ALLOCATES_MEMORY)
245
246 /*
247 * Given the type and mask that specify the flags restrictions on a template
248 * instance being created, return the mask that should be passed to
249 * crypto_grab_*() (along with type=0) to honor any request the user made to
250 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
251 */
crypto_algt_inherited_mask(struct crypto_attr_type * algt)252 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
253 {
254 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
255 }
256
257 int crypto_register_notifier(struct notifier_block *nb);
258 int crypto_unregister_notifier(struct notifier_block *nb);
259
260 /* Crypto notification events. */
261 enum {
262 CRYPTO_MSG_ALG_REQUEST,
263 CRYPTO_MSG_ALG_REGISTER,
264 CRYPTO_MSG_ALG_LOADED,
265 };
266
crypto_request_complete(struct crypto_async_request * req,int err)267 static inline void crypto_request_complete(struct crypto_async_request *req,
268 int err)
269 {
270 req->complete(req->data, err);
271 }
272
crypto_tfm_alg_type(struct crypto_tfm * tfm)273 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
274 {
275 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
276 }
277
crypto_tfm_req_virt(struct crypto_tfm * tfm)278 static inline bool crypto_tfm_req_virt(struct crypto_tfm *tfm)
279 {
280 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_VIRT;
281 }
282
crypto_request_flags(struct crypto_async_request * req)283 static inline u32 crypto_request_flags(struct crypto_async_request *req)
284 {
285 return req->flags & ~CRYPTO_TFM_REQ_ON_STACK;
286 }
287
288 #endif /* _CRYPTO_ALGAPI_H */
289