xref: /linux/include/crypto/internal/skcipher.h (revision 4999999ed7e099fcc2476c8b3a245c4c2c9026c0)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
10 
11 #include <crypto/algapi.h>
12 #include <crypto/internal/cipher.h>
13 #include <crypto/skcipher.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16 
17 /*
18  * Set this if your algorithm is sync but needs a reqsize larger
19  * than MAX_SYNC_SKCIPHER_REQSIZE.
20  *
21  * Reuse bit that is specific to hash algorithms.
22  */
23 #define CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE CRYPTO_ALG_OPTIONAL_KEY
24 
25 struct aead_request;
26 struct rtattr;
27 
28 struct skcipher_instance {
29 	void (*free)(struct skcipher_instance *inst);
30 	union {
31 		struct {
32 			char head[offsetof(struct skcipher_alg, base)];
33 			struct crypto_instance base;
34 		} s;
35 		struct skcipher_alg alg;
36 	};
37 };
38 
39 struct lskcipher_instance {
40 	void (*free)(struct lskcipher_instance *inst);
41 	union {
42 		struct {
43 			char head[offsetof(struct lskcipher_alg, co.base)];
44 			struct crypto_instance base;
45 		} s;
46 		struct lskcipher_alg alg;
47 	};
48 };
49 
50 struct crypto_skcipher_spawn {
51 	struct crypto_spawn base;
52 };
53 
54 struct crypto_lskcipher_spawn {
55 	struct crypto_spawn base;
56 };
57 
58 struct skcipher_walk {
59 	union {
60 		struct {
61 			struct page *page;
62 			unsigned long offset;
63 		} phys;
64 
65 		struct {
66 			u8 *page;
67 			void *addr;
68 		} virt;
69 	} src, dst;
70 
71 	struct scatter_walk in;
72 	unsigned int nbytes;
73 
74 	struct scatter_walk out;
75 	unsigned int total;
76 
77 	struct list_head buffers;
78 
79 	u8 *page;
80 	u8 *buffer;
81 	u8 *oiv;
82 	void *iv;
83 
84 	unsigned int ivsize;
85 
86 	int flags;
87 	unsigned int blocksize;
88 	unsigned int stride;
89 	unsigned int alignmask;
90 };
91 
92 static inline struct crypto_instance *skcipher_crypto_instance(
93 	struct skcipher_instance *inst)
94 {
95 	return &inst->s.base;
96 }
97 
98 static inline struct crypto_instance *lskcipher_crypto_instance(
99 	struct lskcipher_instance *inst)
100 {
101 	return &inst->s.base;
102 }
103 
104 static inline struct skcipher_instance *skcipher_alg_instance(
105 	struct crypto_skcipher *skcipher)
106 {
107 	return container_of(crypto_skcipher_alg(skcipher),
108 			    struct skcipher_instance, alg);
109 }
110 
111 static inline struct lskcipher_instance *lskcipher_alg_instance(
112 	struct crypto_lskcipher *lskcipher)
113 {
114 	return container_of(crypto_lskcipher_alg(lskcipher),
115 			    struct lskcipher_instance, alg);
116 }
117 
118 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
119 {
120 	return crypto_instance_ctx(skcipher_crypto_instance(inst));
121 }
122 
123 static inline void *lskcipher_instance_ctx(struct lskcipher_instance *inst)
124 {
125 	return crypto_instance_ctx(lskcipher_crypto_instance(inst));
126 }
127 
128 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
129 {
130 	crypto_request_complete(&req->base, err);
131 }
132 
133 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
134 			 struct crypto_instance *inst,
135 			 const char *name, u32 type, u32 mask);
136 
137 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn *spawn,
138 			  struct crypto_instance *inst,
139 			  const char *name, u32 type, u32 mask);
140 
141 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
142 {
143 	crypto_drop_spawn(&spawn->base);
144 }
145 
146 static inline void crypto_drop_lskcipher(struct crypto_lskcipher_spawn *spawn)
147 {
148 	crypto_drop_spawn(&spawn->base);
149 }
150 
151 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
152 	struct crypto_skcipher_spawn *spawn)
153 {
154 	return container_of(spawn->base.alg, struct skcipher_alg, base);
155 }
156 
157 static inline struct lskcipher_alg *crypto_lskcipher_spawn_alg(
158 	struct crypto_lskcipher_spawn *spawn)
159 {
160 	return container_of(spawn->base.alg, struct lskcipher_alg, co.base);
161 }
162 
163 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
164 	struct crypto_skcipher_spawn *spawn)
165 {
166 	return crypto_skcipher_spawn_alg(spawn);
167 }
168 
169 static inline struct lskcipher_alg *crypto_spawn_lskcipher_alg(
170 	struct crypto_lskcipher_spawn *spawn)
171 {
172 	return crypto_lskcipher_spawn_alg(spawn);
173 }
174 
175 static inline struct crypto_skcipher *crypto_spawn_skcipher(
176 	struct crypto_skcipher_spawn *spawn)
177 {
178 	return crypto_spawn_tfm2(&spawn->base);
179 }
180 
181 static inline struct crypto_lskcipher *crypto_spawn_lskcipher(
182 	struct crypto_lskcipher_spawn *spawn)
183 {
184 	return crypto_spawn_tfm2(&spawn->base);
185 }
186 
187 static inline void crypto_skcipher_set_reqsize(
188 	struct crypto_skcipher *skcipher, unsigned int reqsize)
189 {
190 	skcipher->reqsize = reqsize;
191 }
192 
193 static inline void crypto_skcipher_set_reqsize_dma(
194 	struct crypto_skcipher *skcipher, unsigned int reqsize)
195 {
196 	reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
197 	skcipher->reqsize = reqsize;
198 }
199 
200 int crypto_register_skcipher(struct skcipher_alg *alg);
201 void crypto_unregister_skcipher(struct skcipher_alg *alg);
202 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
203 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
204 int skcipher_register_instance(struct crypto_template *tmpl,
205 			       struct skcipher_instance *inst);
206 
207 int crypto_register_lskcipher(struct lskcipher_alg *alg);
208 void crypto_unregister_lskcipher(struct lskcipher_alg *alg);
209 int crypto_register_lskciphers(struct lskcipher_alg *algs, int count);
210 void crypto_unregister_lskciphers(struct lskcipher_alg *algs, int count);
211 int lskcipher_register_instance(struct crypto_template *tmpl,
212 				struct lskcipher_instance *inst);
213 
214 int skcipher_walk_done(struct skcipher_walk *walk, int err);
215 int skcipher_walk_virt(struct skcipher_walk *walk,
216 		       struct skcipher_request *req,
217 		       bool atomic);
218 int skcipher_walk_async(struct skcipher_walk *walk,
219 			struct skcipher_request *req);
220 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
221 			       struct aead_request *req, bool atomic);
222 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
223 			       struct aead_request *req, bool atomic);
224 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
225 
226 static inline void skcipher_walk_abort(struct skcipher_walk *walk)
227 {
228 	skcipher_walk_done(walk, -ECANCELED);
229 }
230 
231 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
232 {
233 	return crypto_tfm_ctx(&tfm->base);
234 }
235 
236 static inline void *crypto_lskcipher_ctx(struct crypto_lskcipher *tfm)
237 {
238 	return crypto_tfm_ctx(&tfm->base);
239 }
240 
241 static inline void *crypto_skcipher_ctx_dma(struct crypto_skcipher *tfm)
242 {
243 	return crypto_tfm_ctx_dma(&tfm->base);
244 }
245 
246 static inline void *skcipher_request_ctx(struct skcipher_request *req)
247 {
248 	return req->__ctx;
249 }
250 
251 static inline void *skcipher_request_ctx_dma(struct skcipher_request *req)
252 {
253 	unsigned int align = crypto_dma_align();
254 
255 	if (align <= crypto_tfm_ctx_alignment())
256 		align = 1;
257 
258 	return PTR_ALIGN(skcipher_request_ctx(req), align);
259 }
260 
261 static inline u32 skcipher_request_flags(struct skcipher_request *req)
262 {
263 	return req->base.flags;
264 }
265 
266 static inline unsigned int crypto_skcipher_alg_min_keysize(
267 	struct skcipher_alg *alg)
268 {
269 	return alg->min_keysize;
270 }
271 
272 static inline unsigned int crypto_skcipher_alg_max_keysize(
273 	struct skcipher_alg *alg)
274 {
275 	return alg->max_keysize;
276 }
277 
278 static inline unsigned int crypto_skcipher_alg_walksize(
279 	struct skcipher_alg *alg)
280 {
281 	return alg->walksize;
282 }
283 
284 static inline unsigned int crypto_lskcipher_alg_min_keysize(
285 	struct lskcipher_alg *alg)
286 {
287 	return alg->co.min_keysize;
288 }
289 
290 static inline unsigned int crypto_lskcipher_alg_max_keysize(
291 	struct lskcipher_alg *alg)
292 {
293 	return alg->co.max_keysize;
294 }
295 
296 /* Helpers for simple block cipher modes of operation */
297 struct skcipher_ctx_simple {
298 	struct crypto_cipher *cipher;	/* underlying block cipher */
299 };
300 static inline struct crypto_cipher *
301 skcipher_cipher_simple(struct crypto_skcipher *tfm)
302 {
303 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
304 
305 	return ctx->cipher;
306 }
307 
308 struct skcipher_instance *skcipher_alloc_instance_simple(
309 	struct crypto_template *tmpl, struct rtattr **tb);
310 
311 static inline struct crypto_alg *skcipher_ialg_simple(
312 	struct skcipher_instance *inst)
313 {
314 	struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
315 
316 	return crypto_spawn_cipher_alg(spawn);
317 }
318 
319 static inline struct crypto_lskcipher *lskcipher_cipher_simple(
320 	struct crypto_lskcipher *tfm)
321 {
322 	struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
323 
324 	return *ctx;
325 }
326 
327 struct lskcipher_instance *lskcipher_alloc_instance_simple(
328 	struct crypto_template *tmpl, struct rtattr **tb);
329 
330 static inline struct lskcipher_alg *lskcipher_ialg_simple(
331 	struct lskcipher_instance *inst)
332 {
333 	struct crypto_lskcipher_spawn *spawn = lskcipher_instance_ctx(inst);
334 
335 	return crypto_lskcipher_spawn_alg(spawn);
336 }
337 
338 #endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
339 
340