xref: /linux/crypto/essiv.c (revision 0ae452440cb9fee9079dc925f40cd824c1a9de2a)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ESSIV skcipher and aead template for block encryption
4  *
5  * This template encapsulates the ESSIV IV generation algorithm used by
6  * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7  * used for block encryption, by encrypting it using the hash of the
8  * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9  * number in LE representation zero-padded to the size of the IV, but this
10  * is not assumed by this driver.
11  *
12  * The typical use of this template is to instantiate the skcipher
13  * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14  * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15  * also permits ESSIV to be used in combination with the authenc template,
16  * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17  * we need to instantiate an aead that accepts the same special key format
18  * as the authenc template, and deals with the way the encrypted IV is
19  * embedded into the AAD area of the aead request. This means the AEAD
20  * flavor produced by this template is tightly coupled to the way dm-crypt
21  * happens to use it.
22  *
23  * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
24  *
25  * Heavily based on:
26  * adiantum length-preserving encryption mode
27  *
28  * Copyright 2018 Google LLC
29  */
30 
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/cipher.h>
34 #include <crypto/internal/hash.h>
35 #include <crypto/internal/skcipher.h>
36 #include <crypto/scatterwalk.h>
37 #include <linux/module.h>
38 
39 #include "internal.h"
40 
41 struct essiv_instance_ctx {
42 	union {
43 		struct crypto_skcipher_spawn	skcipher_spawn;
44 		struct crypto_aead_spawn	aead_spawn;
45 	} u;
46 	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
47 	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
48 };
49 
50 struct essiv_tfm_ctx {
51 	union {
52 		struct crypto_skcipher	*skcipher;
53 		struct crypto_aead	*aead;
54 	} u;
55 	struct crypto_cipher		*essiv_cipher;
56 	struct crypto_shash		*hash;
57 	int				ivoffset;
58 };
59 
60 struct essiv_aead_request_ctx {
61 	struct scatterlist		sg[4];
62 	u8				*assoc;
63 	struct aead_request		aead_req;
64 };
65 
essiv_skcipher_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)66 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
67 				 const u8 *key, unsigned int keylen)
68 {
69 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
70 	u8 salt[HASH_MAX_DIGESTSIZE];
71 	int err;
72 
73 	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
74 	crypto_skcipher_set_flags(tctx->u.skcipher,
75 				  crypto_skcipher_get_flags(tfm) &
76 				  CRYPTO_TFM_REQ_MASK);
77 	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
78 	if (err)
79 		return err;
80 
81 	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
82 	if (err)
83 		return err;
84 
85 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
86 	crypto_cipher_set_flags(tctx->essiv_cipher,
87 				crypto_skcipher_get_flags(tfm) &
88 				CRYPTO_TFM_REQ_MASK);
89 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
90 				    crypto_shash_digestsize(tctx->hash));
91 }
92 
essiv_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)93 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
94 			     unsigned int keylen)
95 {
96 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
97 	SHASH_DESC_ON_STACK(desc, tctx->hash);
98 	struct crypto_authenc_keys keys;
99 	u8 salt[HASH_MAX_DIGESTSIZE];
100 	int err;
101 
102 	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
103 	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
104 					    CRYPTO_TFM_REQ_MASK);
105 	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
106 	if (err)
107 		return err;
108 
109 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
110 		return -EINVAL;
111 
112 	desc->tfm = tctx->hash;
113 	err = crypto_shash_init(desc) ?:
114 	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
115 	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
116 	if (err)
117 		return err;
118 
119 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
120 	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
121 						    CRYPTO_TFM_REQ_MASK);
122 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
123 				    crypto_shash_digestsize(tctx->hash));
124 }
125 
essiv_aead_setauthsize(struct crypto_aead * tfm,unsigned int authsize)126 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
127 				  unsigned int authsize)
128 {
129 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
130 
131 	return crypto_aead_setauthsize(tctx->u.aead, authsize);
132 }
133 
essiv_skcipher_done(void * data,int err)134 static void essiv_skcipher_done(void *data, int err)
135 {
136 	struct skcipher_request *req = data;
137 
138 	skcipher_request_complete(req, err);
139 }
140 
essiv_skcipher_crypt(struct skcipher_request * req,bool enc)141 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
142 {
143 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
144 	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
145 	struct skcipher_request *subreq = skcipher_request_ctx(req);
146 
147 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
148 
149 	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
150 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
151 				   req->iv);
152 	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
153 				      essiv_skcipher_done, req);
154 
155 	return enc ? crypto_skcipher_encrypt(subreq) :
156 		     crypto_skcipher_decrypt(subreq);
157 }
158 
essiv_skcipher_encrypt(struct skcipher_request * req)159 static int essiv_skcipher_encrypt(struct skcipher_request *req)
160 {
161 	return essiv_skcipher_crypt(req, true);
162 }
163 
essiv_skcipher_decrypt(struct skcipher_request * req)164 static int essiv_skcipher_decrypt(struct skcipher_request *req)
165 {
166 	return essiv_skcipher_crypt(req, false);
167 }
168 
essiv_aead_done(void * data,int err)169 static void essiv_aead_done(void *data, int err)
170 {
171 	struct aead_request *req = data;
172 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
173 
174 	if (err == -EINPROGRESS)
175 		goto out;
176 
177 	kfree(rctx->assoc);
178 
179 out:
180 	aead_request_complete(req, err);
181 }
182 
essiv_aead_crypt(struct aead_request * req,bool enc)183 static int essiv_aead_crypt(struct aead_request *req, bool enc)
184 {
185 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
186 	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
187 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
188 	struct aead_request *subreq = &rctx->aead_req;
189 	int ivsize = crypto_aead_ivsize(tfm);
190 	int ssize = req->assoclen - ivsize;
191 	struct scatterlist *src = req->src;
192 	int err;
193 
194 	if (ssize < 0)
195 		return -EINVAL;
196 
197 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
198 
199 	/*
200 	 * dm-crypt embeds the sector number and the IV in the AAD region, so
201 	 * we have to copy the converted IV into the right scatterlist before
202 	 * we pass it on.
203 	 */
204 	rctx->assoc = NULL;
205 	if (req->src == req->dst || !enc) {
206 		scatterwalk_map_and_copy(req->iv, req->dst, ssize, ivsize, 1);
207 	} else {
208 		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
209 		struct scatterlist *sg;
210 		int nents;
211 
212 		nents = sg_nents_for_len(req->src, ssize);
213 		if (nents < 0)
214 			return -EINVAL;
215 
216 		memcpy(iv, req->iv, ivsize);
217 		sg_init_table(rctx->sg, 4);
218 
219 		if (unlikely(nents > 1)) {
220 			/*
221 			 * This is a case that rarely occurs in practice, but
222 			 * for correctness, we have to deal with it nonetheless.
223 			 */
224 			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
225 			if (!rctx->assoc)
226 				return -ENOMEM;
227 
228 			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
229 						 ssize, 0);
230 			sg_set_buf(rctx->sg, rctx->assoc, ssize);
231 		} else {
232 			sg_set_page(rctx->sg, sg_page(req->src), ssize,
233 				    req->src->offset);
234 		}
235 
236 		sg_set_buf(rctx->sg + 1, iv, ivsize);
237 		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
238 		if (sg != rctx->sg + 2)
239 			sg_chain(rctx->sg, 3, sg);
240 
241 		src = rctx->sg;
242 	}
243 
244 	aead_request_set_tfm(subreq, tctx->u.aead);
245 	aead_request_set_ad(subreq, req->assoclen);
246 	aead_request_set_callback(subreq, aead_request_flags(req),
247 				  essiv_aead_done, req);
248 	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
249 
250 	err = enc ? crypto_aead_encrypt(subreq) :
251 		    crypto_aead_decrypt(subreq);
252 
253 	if (rctx->assoc && err != -EINPROGRESS && err != -EBUSY)
254 		kfree(rctx->assoc);
255 	return err;
256 }
257 
essiv_aead_encrypt(struct aead_request * req)258 static int essiv_aead_encrypt(struct aead_request *req)
259 {
260 	return essiv_aead_crypt(req, true);
261 }
262 
essiv_aead_decrypt(struct aead_request * req)263 static int essiv_aead_decrypt(struct aead_request *req)
264 {
265 	return essiv_aead_crypt(req, false);
266 }
267 
essiv_init_tfm(struct essiv_instance_ctx * ictx,struct essiv_tfm_ctx * tctx)268 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
269 			  struct essiv_tfm_ctx *tctx)
270 {
271 	struct crypto_cipher *essiv_cipher;
272 	struct crypto_shash *hash;
273 	int err;
274 
275 	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
276 	if (IS_ERR(essiv_cipher))
277 		return PTR_ERR(essiv_cipher);
278 
279 	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
280 	if (IS_ERR(hash)) {
281 		err = PTR_ERR(hash);
282 		goto err_free_essiv_cipher;
283 	}
284 
285 	tctx->essiv_cipher = essiv_cipher;
286 	tctx->hash = hash;
287 
288 	return 0;
289 
290 err_free_essiv_cipher:
291 	crypto_free_cipher(essiv_cipher);
292 	return err;
293 }
294 
essiv_skcipher_init_tfm(struct crypto_skcipher * tfm)295 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
296 {
297 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
298 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
299 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
300 	struct crypto_skcipher *skcipher;
301 	int err;
302 
303 	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
304 	if (IS_ERR(skcipher))
305 		return PTR_ERR(skcipher);
306 
307 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
308 				         crypto_skcipher_reqsize(skcipher));
309 
310 	err = essiv_init_tfm(ictx, tctx);
311 	if (err) {
312 		crypto_free_skcipher(skcipher);
313 		return err;
314 	}
315 
316 	tctx->u.skcipher = skcipher;
317 	return 0;
318 }
319 
essiv_aead_init_tfm(struct crypto_aead * tfm)320 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
321 {
322 	struct aead_instance *inst = aead_alg_instance(tfm);
323 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
324 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
325 	struct crypto_aead *aead;
326 	unsigned int subreq_size;
327 	int err;
328 
329 	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
330 		     sizeof(struct essiv_aead_request_ctx));
331 
332 	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
333 	if (IS_ERR(aead))
334 		return PTR_ERR(aead);
335 
336 	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
337 		      crypto_aead_reqsize(aead);
338 
339 	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
340 			 subreq_size;
341 	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
342 
343 	err = essiv_init_tfm(ictx, tctx);
344 	if (err) {
345 		crypto_free_aead(aead);
346 		return err;
347 	}
348 
349 	tctx->u.aead = aead;
350 	return 0;
351 }
352 
essiv_skcipher_exit_tfm(struct crypto_skcipher * tfm)353 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
354 {
355 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
356 
357 	crypto_free_skcipher(tctx->u.skcipher);
358 	crypto_free_cipher(tctx->essiv_cipher);
359 	crypto_free_shash(tctx->hash);
360 }
361 
essiv_aead_exit_tfm(struct crypto_aead * tfm)362 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
363 {
364 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
365 
366 	crypto_free_aead(tctx->u.aead);
367 	crypto_free_cipher(tctx->essiv_cipher);
368 	crypto_free_shash(tctx->hash);
369 }
370 
essiv_skcipher_free_instance(struct skcipher_instance * inst)371 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
372 {
373 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
374 
375 	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
376 	kfree(inst);
377 }
378 
essiv_aead_free_instance(struct aead_instance * inst)379 static void essiv_aead_free_instance(struct aead_instance *inst)
380 {
381 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
382 
383 	crypto_drop_aead(&ictx->u.aead_spawn);
384 	kfree(inst);
385 }
386 
parse_cipher_name(char * essiv_cipher_name,const char * cra_name)387 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
388 {
389 	const char *p, *q;
390 	int len;
391 
392 	/* find the last opening parens */
393 	p = strrchr(cra_name, '(');
394 	if (!p++)
395 		return false;
396 
397 	/* find the first closing parens in the tail of the string */
398 	q = strchr(p, ')');
399 	if (!q)
400 		return false;
401 
402 	len = q - p;
403 	if (len >= CRYPTO_MAX_ALG_NAME)
404 		return false;
405 
406 	strscpy(essiv_cipher_name, p, len + 1);
407 	return true;
408 }
409 
essiv_supported_algorithms(const char * essiv_cipher_name,struct shash_alg * hash_alg,int ivsize)410 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
411 				       struct shash_alg *hash_alg,
412 				       int ivsize)
413 {
414 	struct crypto_alg *alg;
415 	bool ret = false;
416 
417 	alg = crypto_alg_mod_lookup(essiv_cipher_name,
418 				    CRYPTO_ALG_TYPE_CIPHER,
419 				    CRYPTO_ALG_TYPE_MASK);
420 	if (IS_ERR(alg))
421 		return false;
422 
423 	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
424 	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
425 		goto out;
426 
427 	if (ivsize != alg->cra_blocksize)
428 		goto out;
429 
430 	if (crypto_shash_alg_needs_key(hash_alg))
431 		goto out;
432 
433 	ret = true;
434 
435 out:
436 	crypto_mod_put(alg);
437 	return ret;
438 }
439 
essiv_create(struct crypto_template * tmpl,struct rtattr ** tb)440 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
441 {
442 	struct skcipher_alg_common *skcipher_alg = NULL;
443 	struct crypto_attr_type *algt;
444 	const char *inner_cipher_name;
445 	const char *shash_name;
446 	struct skcipher_instance *skcipher_inst = NULL;
447 	struct aead_instance *aead_inst = NULL;
448 	struct crypto_instance *inst;
449 	struct crypto_alg *base, *block_base;
450 	struct essiv_instance_ctx *ictx;
451 	struct aead_alg *aead_alg = NULL;
452 	struct crypto_alg *_hash_alg;
453 	struct shash_alg *hash_alg;
454 	int ivsize;
455 	u32 type;
456 	u32 mask;
457 	int err;
458 
459 	algt = crypto_get_attr_type(tb);
460 	if (IS_ERR(algt))
461 		return PTR_ERR(algt);
462 
463 	inner_cipher_name = crypto_attr_alg_name(tb[1]);
464 	if (IS_ERR(inner_cipher_name))
465 		return PTR_ERR(inner_cipher_name);
466 
467 	shash_name = crypto_attr_alg_name(tb[2]);
468 	if (IS_ERR(shash_name))
469 		return PTR_ERR(shash_name);
470 
471 	type = algt->type & algt->mask;
472 	mask = crypto_algt_inherited_mask(algt);
473 
474 	switch (type) {
475 	case CRYPTO_ALG_TYPE_LSKCIPHER:
476 		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
477 					sizeof(*ictx), GFP_KERNEL);
478 		if (!skcipher_inst)
479 			return -ENOMEM;
480 		inst = skcipher_crypto_instance(skcipher_inst);
481 		base = &skcipher_inst->alg.base;
482 		ictx = crypto_instance_ctx(inst);
483 
484 		/* Symmetric cipher, e.g., "cbc(aes)" */
485 		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
486 					   inner_cipher_name, 0, mask);
487 		if (err)
488 			goto out_free_inst;
489 		skcipher_alg = crypto_spawn_skcipher_alg_common(
490 			&ictx->u.skcipher_spawn);
491 		block_base = &skcipher_alg->base;
492 		ivsize = skcipher_alg->ivsize;
493 		break;
494 
495 	case CRYPTO_ALG_TYPE_AEAD:
496 		aead_inst = kzalloc(sizeof(*aead_inst) +
497 				    sizeof(*ictx), GFP_KERNEL);
498 		if (!aead_inst)
499 			return -ENOMEM;
500 		inst = aead_crypto_instance(aead_inst);
501 		base = &aead_inst->alg.base;
502 		ictx = crypto_instance_ctx(inst);
503 
504 		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
505 		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
506 				       inner_cipher_name, 0, mask);
507 		if (err)
508 			goto out_free_inst;
509 		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
510 		block_base = &aead_alg->base;
511 		if (!strstarts(block_base->cra_name, "authenc(")) {
512 			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
513 			err = -EINVAL;
514 			goto out_drop_skcipher;
515 		}
516 		ivsize = aead_alg->ivsize;
517 		break;
518 
519 	default:
520 		return -EINVAL;
521 	}
522 
523 	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
524 		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
525 		err = -EINVAL;
526 		goto out_drop_skcipher;
527 	}
528 
529 	/* Synchronous hash, e.g., "sha256" */
530 	_hash_alg = crypto_alg_mod_lookup(shash_name,
531 					  CRYPTO_ALG_TYPE_SHASH,
532 					  CRYPTO_ALG_TYPE_MASK | mask);
533 	if (IS_ERR(_hash_alg)) {
534 		err = PTR_ERR(_hash_alg);
535 		goto out_drop_skcipher;
536 	}
537 	hash_alg = __crypto_shash_alg(_hash_alg);
538 
539 	/* Check the set of algorithms */
540 	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
541 					ivsize)) {
542 		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
543 			block_base->cra_name, hash_alg->base.cra_name);
544 		err = -EINVAL;
545 		goto out_free_hash;
546 	}
547 
548 	/* record the driver name so we can instantiate this exact algo later */
549 	strscpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name);
550 
551 	/* Instance fields */
552 
553 	err = -ENAMETOOLONG;
554 	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
555 		     "essiv(%s,%s)", block_base->cra_name,
556 		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
557 		goto out_free_hash;
558 	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
559 		     "essiv(%s,%s)", block_base->cra_driver_name,
560 		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
561 		goto out_free_hash;
562 
563 	/*
564 	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
565 	 * flags manually.
566 	 */
567 	base->cra_flags        |= (hash_alg->base.cra_flags &
568 				   CRYPTO_ALG_INHERITED_FLAGS);
569 	base->cra_blocksize	= block_base->cra_blocksize;
570 	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
571 	base->cra_alignmask	= block_base->cra_alignmask;
572 	base->cra_priority	= block_base->cra_priority;
573 
574 	if (type == CRYPTO_ALG_TYPE_LSKCIPHER) {
575 		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
576 		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
577 		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
578 		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
579 		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
580 
581 		skcipher_inst->alg.min_keysize	= skcipher_alg->min_keysize;
582 		skcipher_inst->alg.max_keysize	= skcipher_alg->max_keysize;
583 		skcipher_inst->alg.ivsize	= ivsize;
584 		skcipher_inst->alg.chunksize	= skcipher_alg->chunksize;
585 
586 		skcipher_inst->free		= essiv_skcipher_free_instance;
587 
588 		err = skcipher_register_instance(tmpl, skcipher_inst);
589 	} else {
590 		aead_inst->alg.setkey		= essiv_aead_setkey;
591 		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
592 		aead_inst->alg.encrypt		= essiv_aead_encrypt;
593 		aead_inst->alg.decrypt		= essiv_aead_decrypt;
594 		aead_inst->alg.init		= essiv_aead_init_tfm;
595 		aead_inst->alg.exit		= essiv_aead_exit_tfm;
596 
597 		aead_inst->alg.ivsize		= ivsize;
598 		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
599 		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
600 
601 		aead_inst->free			= essiv_aead_free_instance;
602 
603 		err = aead_register_instance(tmpl, aead_inst);
604 	}
605 
606 	if (err)
607 		goto out_free_hash;
608 
609 	crypto_mod_put(_hash_alg);
610 	return 0;
611 
612 out_free_hash:
613 	crypto_mod_put(_hash_alg);
614 out_drop_skcipher:
615 	if (type == CRYPTO_ALG_TYPE_LSKCIPHER)
616 		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
617 	else
618 		crypto_drop_aead(&ictx->u.aead_spawn);
619 out_free_inst:
620 	kfree(skcipher_inst);
621 	kfree(aead_inst);
622 	return err;
623 }
624 
625 /* essiv(cipher_name, shash_name) */
626 static struct crypto_template essiv_tmpl = {
627 	.name	= "essiv",
628 	.create	= essiv_create,
629 	.module	= THIS_MODULE,
630 };
631 
essiv_module_init(void)632 static int __init essiv_module_init(void)
633 {
634 	return crypto_register_template(&essiv_tmpl);
635 }
636 
essiv_module_exit(void)637 static void __exit essiv_module_exit(void)
638 {
639 	crypto_unregister_template(&essiv_tmpl);
640 }
641 
642 module_init(essiv_module_init);
643 module_exit(essiv_module_exit);
644 
645 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
646 MODULE_LICENSE("GPL v2");
647 MODULE_ALIAS_CRYPTO("essiv");
648 MODULE_IMPORT_NS("CRYPTO_INTERNAL");
649