1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
4 * derived from authenc.c
5 *
6 * Copyright (C) 2010 secunet Security Networks AG
7 * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
8 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
9 */
10
11 #include <crypto/internal/aead.h>
12 #include <crypto/internal/hash.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/authenc.h>
15 #include <crypto/scatterwalk.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/rtnetlink.h>
21 #include <linux/slab.h>
22 #include <linux/spinlock.h>
23
24 struct authenc_esn_instance_ctx {
25 struct crypto_ahash_spawn auth;
26 struct crypto_skcipher_spawn enc;
27 };
28
29 struct crypto_authenc_esn_ctx {
30 unsigned int reqoff;
31 struct crypto_ahash *auth;
32 struct crypto_skcipher *enc;
33 };
34
35 struct authenc_esn_request_ctx {
36 struct scatterlist src[2];
37 struct scatterlist dst[2];
38 char tail[];
39 };
40
authenc_esn_request_complete(struct aead_request * req,int err)41 static void authenc_esn_request_complete(struct aead_request *req, int err)
42 {
43 if (err != -EINPROGRESS)
44 aead_request_complete(req, err);
45 }
46
crypto_authenc_esn_setauthsize(struct crypto_aead * authenc_esn,unsigned int authsize)47 static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
48 unsigned int authsize)
49 {
50 if (authsize > 0 && authsize < 4)
51 return -EINVAL;
52
53 return 0;
54 }
55
crypto_authenc_esn_setkey(struct crypto_aead * authenc_esn,const u8 * key,unsigned int keylen)56 static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
57 unsigned int keylen)
58 {
59 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
60 struct crypto_ahash *auth = ctx->auth;
61 struct crypto_skcipher *enc = ctx->enc;
62 struct crypto_authenc_keys keys;
63 int err = -EINVAL;
64
65 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
66 goto out;
67
68 crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
69 crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
70 CRYPTO_TFM_REQ_MASK);
71 err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
72 if (err)
73 goto out;
74
75 crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
76 crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
77 CRYPTO_TFM_REQ_MASK);
78 err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
79 out:
80 memzero_explicit(&keys, sizeof(keys));
81 return err;
82 }
83
crypto_authenc_esn_genicv_tail(struct aead_request * req,unsigned int flags)84 static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
85 unsigned int flags)
86 {
87 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
88 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
89 u8 *hash = areq_ctx->tail;
90 unsigned int authsize = crypto_aead_authsize(authenc_esn);
91 unsigned int assoclen = req->assoclen;
92 unsigned int cryptlen = req->cryptlen;
93 struct scatterlist *dst = req->dst;
94 u32 tmp[2];
95
96 /* Move high-order bits of sequence number back. */
97 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
98 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
99 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
100
101 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
102 return 0;
103 }
104
authenc_esn_geniv_ahash_done(void * data,int err)105 static void authenc_esn_geniv_ahash_done(void *data, int err)
106 {
107 struct aead_request *req = data;
108
109 err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
110 aead_request_complete(req, err);
111 }
112
crypto_authenc_esn_genicv(struct aead_request * req,unsigned int flags)113 static int crypto_authenc_esn_genicv(struct aead_request *req,
114 unsigned int flags)
115 {
116 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
117 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
118 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
119 struct crypto_ahash *auth = ctx->auth;
120 u8 *hash = areq_ctx->tail;
121 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
122 unsigned int authsize = crypto_aead_authsize(authenc_esn);
123 unsigned int assoclen = req->assoclen;
124 unsigned int cryptlen = req->cryptlen;
125 struct scatterlist *dst = req->dst;
126 u32 tmp[2];
127
128 if (!authsize)
129 return 0;
130
131 /* Move high-order bits of sequence number to the end. */
132 scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
133 scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
134 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
135
136 sg_init_table(areq_ctx->dst, 2);
137 dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
138
139 ahash_request_set_tfm(ahreq, auth);
140 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
141 ahash_request_set_callback(ahreq, flags,
142 authenc_esn_geniv_ahash_done, req);
143
144 return crypto_ahash_digest(ahreq) ?:
145 crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
146 }
147
148
crypto_authenc_esn_encrypt_done(void * data,int err)149 static void crypto_authenc_esn_encrypt_done(void *data, int err)
150 {
151 struct aead_request *areq = data;
152
153 if (!err)
154 err = crypto_authenc_esn_genicv(areq, 0);
155
156 authenc_esn_request_complete(areq, err);
157 }
158
crypto_authenc_esn_encrypt(struct aead_request * req)159 static int crypto_authenc_esn_encrypt(struct aead_request *req)
160 {
161 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
162 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
163 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
164 struct skcipher_request *skreq = (void *)(areq_ctx->tail +
165 ctx->reqoff);
166 struct crypto_skcipher *enc = ctx->enc;
167 unsigned int assoclen = req->assoclen;
168 unsigned int cryptlen = req->cryptlen;
169 struct scatterlist *src, *dst;
170 int err;
171
172 if (assoclen < 8)
173 return -EINVAL;
174
175 sg_init_table(areq_ctx->src, 2);
176 src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
177 dst = src;
178
179 if (req->src != req->dst) {
180 memcpy_sglist(req->dst, req->src, assoclen);
181 sg_init_table(areq_ctx->dst, 2);
182 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
183 }
184
185 skcipher_request_set_tfm(skreq, enc);
186 skcipher_request_set_callback(skreq, aead_request_flags(req),
187 crypto_authenc_esn_encrypt_done, req);
188 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
189
190 err = crypto_skcipher_encrypt(skreq);
191 if (err)
192 return err;
193
194 return crypto_authenc_esn_genicv(req, aead_request_flags(req));
195 }
196
crypto_authenc_esn_decrypt_tail(struct aead_request * req,unsigned int flags)197 static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
198 unsigned int flags)
199 {
200 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
201 unsigned int authsize = crypto_aead_authsize(authenc_esn);
202 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
203 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
204 struct skcipher_request *skreq = (void *)(areq_ctx->tail +
205 ctx->reqoff);
206 struct crypto_ahash *auth = ctx->auth;
207 u8 *ohash = areq_ctx->tail;
208 unsigned int cryptlen = req->cryptlen - authsize;
209 unsigned int assoclen = req->assoclen;
210 struct scatterlist *dst = req->dst;
211 u8 *ihash = ohash + crypto_ahash_digestsize(auth);
212 u32 tmp[2];
213
214 if (!authsize)
215 goto decrypt;
216
217 /* Move high-order bits of sequence number back. */
218 scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
219 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
220 scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
221
222 if (crypto_memneq(ihash, ohash, authsize))
223 return -EBADMSG;
224
225 decrypt:
226
227 sg_init_table(areq_ctx->dst, 2);
228 dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
229
230 skcipher_request_set_tfm(skreq, ctx->enc);
231 skcipher_request_set_callback(skreq, flags,
232 req->base.complete, req->base.data);
233 skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
234
235 return crypto_skcipher_decrypt(skreq);
236 }
237
authenc_esn_verify_ahash_done(void * data,int err)238 static void authenc_esn_verify_ahash_done(void *data, int err)
239 {
240 struct aead_request *req = data;
241
242 err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
243 authenc_esn_request_complete(req, err);
244 }
245
crypto_authenc_esn_decrypt(struct aead_request * req)246 static int crypto_authenc_esn_decrypt(struct aead_request *req)
247 {
248 struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
249 struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
250 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
251 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
252 unsigned int authsize = crypto_aead_authsize(authenc_esn);
253 struct crypto_ahash *auth = ctx->auth;
254 u8 *ohash = areq_ctx->tail;
255 unsigned int assoclen = req->assoclen;
256 unsigned int cryptlen = req->cryptlen;
257 u8 *ihash = ohash + crypto_ahash_digestsize(auth);
258 struct scatterlist *dst = req->dst;
259 u32 tmp[2];
260 int err;
261
262 if (assoclen < 8)
263 return -EINVAL;
264
265 cryptlen -= authsize;
266
267 if (req->src != dst)
268 memcpy_sglist(dst, req->src, assoclen + cryptlen);
269
270 scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
271 authsize, 0);
272
273 if (!authsize)
274 goto tail;
275
276 /* Move high-order bits of sequence number to the end. */
277 scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
278 scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
279 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
280
281 sg_init_table(areq_ctx->dst, 2);
282 dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
283
284 ahash_request_set_tfm(ahreq, auth);
285 ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
286 ahash_request_set_callback(ahreq, aead_request_flags(req),
287 authenc_esn_verify_ahash_done, req);
288
289 err = crypto_ahash_digest(ahreq);
290 if (err)
291 return err;
292
293 tail:
294 return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
295 }
296
crypto_authenc_esn_init_tfm(struct crypto_aead * tfm)297 static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
298 {
299 struct aead_instance *inst = aead_alg_instance(tfm);
300 struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
301 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
302 struct crypto_ahash *auth;
303 struct crypto_skcipher *enc;
304 int err;
305
306 auth = crypto_spawn_ahash(&ictx->auth);
307 if (IS_ERR(auth))
308 return PTR_ERR(auth);
309
310 enc = crypto_spawn_skcipher(&ictx->enc);
311 err = PTR_ERR(enc);
312 if (IS_ERR(enc))
313 goto err_free_ahash;
314
315 ctx->auth = auth;
316 ctx->enc = enc;
317
318 ctx->reqoff = 2 * crypto_ahash_digestsize(auth);
319
320 crypto_aead_set_reqsize(
321 tfm,
322 sizeof(struct authenc_esn_request_ctx) +
323 ctx->reqoff +
324 max_t(unsigned int,
325 crypto_ahash_reqsize(auth) +
326 sizeof(struct ahash_request),
327 sizeof(struct skcipher_request) +
328 crypto_skcipher_reqsize(enc)));
329
330 return 0;
331
332 err_free_ahash:
333 crypto_free_ahash(auth);
334 return err;
335 }
336
crypto_authenc_esn_exit_tfm(struct crypto_aead * tfm)337 static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
338 {
339 struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
340
341 crypto_free_ahash(ctx->auth);
342 crypto_free_skcipher(ctx->enc);
343 }
344
crypto_authenc_esn_free(struct aead_instance * inst)345 static void crypto_authenc_esn_free(struct aead_instance *inst)
346 {
347 struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
348
349 crypto_drop_skcipher(&ctx->enc);
350 crypto_drop_ahash(&ctx->auth);
351 kfree(inst);
352 }
353
crypto_authenc_esn_create(struct crypto_template * tmpl,struct rtattr ** tb)354 static int crypto_authenc_esn_create(struct crypto_template *tmpl,
355 struct rtattr **tb)
356 {
357 u32 mask;
358 struct aead_instance *inst;
359 struct authenc_esn_instance_ctx *ctx;
360 struct skcipher_alg_common *enc;
361 struct hash_alg_common *auth;
362 struct crypto_alg *auth_base;
363 int err;
364
365 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
366 if (err)
367 return err;
368
369 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
370 if (!inst)
371 return -ENOMEM;
372 ctx = aead_instance_ctx(inst);
373
374 err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
375 crypto_attr_alg_name(tb[1]), 0, mask);
376 if (err)
377 goto err_free_inst;
378 auth = crypto_spawn_ahash_alg(&ctx->auth);
379 auth_base = &auth->base;
380
381 err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
382 crypto_attr_alg_name(tb[2]), 0, mask);
383 if (err)
384 goto err_free_inst;
385 enc = crypto_spawn_skcipher_alg_common(&ctx->enc);
386
387 err = -ENAMETOOLONG;
388 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
389 "authencesn(%s,%s)", auth_base->cra_name,
390 enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
391 goto err_free_inst;
392
393 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
394 "authencesn(%s,%s)", auth_base->cra_driver_name,
395 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
396 goto err_free_inst;
397
398 inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
399 auth_base->cra_priority;
400 inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
401 inst->alg.base.cra_alignmask = enc->base.cra_alignmask;
402 inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
403
404 inst->alg.ivsize = enc->ivsize;
405 inst->alg.chunksize = enc->chunksize;
406 inst->alg.maxauthsize = auth->digestsize;
407
408 inst->alg.init = crypto_authenc_esn_init_tfm;
409 inst->alg.exit = crypto_authenc_esn_exit_tfm;
410
411 inst->alg.setkey = crypto_authenc_esn_setkey;
412 inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
413 inst->alg.encrypt = crypto_authenc_esn_encrypt;
414 inst->alg.decrypt = crypto_authenc_esn_decrypt;
415
416 inst->free = crypto_authenc_esn_free;
417
418 err = aead_register_instance(tmpl, inst);
419 if (err) {
420 err_free_inst:
421 crypto_authenc_esn_free(inst);
422 }
423 return err;
424 }
425
426 static struct crypto_template crypto_authenc_esn_tmpl = {
427 .name = "authencesn",
428 .create = crypto_authenc_esn_create,
429 .module = THIS_MODULE,
430 };
431
crypto_authenc_esn_module_init(void)432 static int __init crypto_authenc_esn_module_init(void)
433 {
434 return crypto_register_template(&crypto_authenc_esn_tmpl);
435 }
436
crypto_authenc_esn_module_exit(void)437 static void __exit crypto_authenc_esn_module_exit(void)
438 {
439 crypto_unregister_template(&crypto_authenc_esn_tmpl);
440 }
441
442 module_init(crypto_authenc_esn_module_init);
443 module_exit(crypto_authenc_esn_module_exit);
444
445 MODULE_LICENSE("GPL");
446 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
447 MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
448 MODULE_ALIAS_CRYPTO("authencesn");
449