xref: /linux/arch/s390/crypto/des_s390.c (revision b85d45947951d23cb22d90caecf4c1eb81342c96)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the DES Cipher Algorithm.
5  *
6  * Copyright IBM Corp. 2003, 2011
7  * Author(s): Thomas Spatzier
8  *	      Jan Glauber (jan.glauber@de.ibm.com)
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  */
16 
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <linux/crypto.h>
21 #include <crypto/algapi.h>
22 #include <crypto/des.h>
23 
24 #include "crypt_s390.h"
25 
26 #define DES3_KEY_SIZE	(3 * DES_KEY_SIZE)
27 
28 static u8 *ctrblk;
29 static DEFINE_SPINLOCK(ctrblk_lock);
30 
31 struct s390_des_ctx {
32 	u8 iv[DES_BLOCK_SIZE];
33 	u8 key[DES3_KEY_SIZE];
34 };
35 
36 static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
37 		      unsigned int key_len)
38 {
39 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
40 	u32 *flags = &tfm->crt_flags;
41 	u32 tmp[DES_EXPKEY_WORDS];
42 
43 	/* check for weak keys */
44 	if (!des_ekey(tmp, key) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
45 		*flags |= CRYPTO_TFM_RES_WEAK_KEY;
46 		return -EINVAL;
47 	}
48 
49 	memcpy(ctx->key, key, key_len);
50 	return 0;
51 }
52 
53 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
54 {
55 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
56 
57 	crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
58 }
59 
60 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
61 {
62 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
63 
64 	crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
65 }
66 
67 static struct crypto_alg des_alg = {
68 	.cra_name		=	"des",
69 	.cra_driver_name	=	"des-s390",
70 	.cra_priority		=	CRYPT_S390_PRIORITY,
71 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
72 	.cra_blocksize		=	DES_BLOCK_SIZE,
73 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
74 	.cra_module		=	THIS_MODULE,
75 	.cra_u			=	{
76 		.cipher = {
77 			.cia_min_keysize	=	DES_KEY_SIZE,
78 			.cia_max_keysize	=	DES_KEY_SIZE,
79 			.cia_setkey		=	des_setkey,
80 			.cia_encrypt		=	des_encrypt,
81 			.cia_decrypt		=	des_decrypt,
82 		}
83 	}
84 };
85 
86 static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
87 			    u8 *key, struct blkcipher_walk *walk)
88 {
89 	int ret = blkcipher_walk_virt(desc, walk);
90 	unsigned int nbytes;
91 
92 	while ((nbytes = walk->nbytes)) {
93 		/* only use complete blocks */
94 		unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
95 		u8 *out = walk->dst.virt.addr;
96 		u8 *in = walk->src.virt.addr;
97 
98 		ret = crypt_s390_km(func, key, out, in, n);
99 		if (ret < 0 || ret != n)
100 			return -EIO;
101 
102 		nbytes &= DES_BLOCK_SIZE - 1;
103 		ret = blkcipher_walk_done(desc, walk, nbytes);
104 	}
105 
106 	return ret;
107 }
108 
109 static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
110 			    struct blkcipher_walk *walk)
111 {
112 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
113 	int ret = blkcipher_walk_virt(desc, walk);
114 	unsigned int nbytes = walk->nbytes;
115 	struct {
116 		u8 iv[DES_BLOCK_SIZE];
117 		u8 key[DES3_KEY_SIZE];
118 	} param;
119 
120 	if (!nbytes)
121 		goto out;
122 
123 	memcpy(param.iv, walk->iv, DES_BLOCK_SIZE);
124 	memcpy(param.key, ctx->key, DES3_KEY_SIZE);
125 	do {
126 		/* only use complete blocks */
127 		unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
128 		u8 *out = walk->dst.virt.addr;
129 		u8 *in = walk->src.virt.addr;
130 
131 		ret = crypt_s390_kmc(func, &param, out, in, n);
132 		if (ret < 0 || ret != n)
133 			return -EIO;
134 
135 		nbytes &= DES_BLOCK_SIZE - 1;
136 		ret = blkcipher_walk_done(desc, walk, nbytes);
137 	} while ((nbytes = walk->nbytes));
138 	memcpy(walk->iv, param.iv, DES_BLOCK_SIZE);
139 
140 out:
141 	return ret;
142 }
143 
144 static int ecb_des_encrypt(struct blkcipher_desc *desc,
145 			   struct scatterlist *dst, struct scatterlist *src,
146 			   unsigned int nbytes)
147 {
148 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
149 	struct blkcipher_walk walk;
150 
151 	blkcipher_walk_init(&walk, dst, src, nbytes);
152 	return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk);
153 }
154 
155 static int ecb_des_decrypt(struct blkcipher_desc *desc,
156 			   struct scatterlist *dst, struct scatterlist *src,
157 			   unsigned int nbytes)
158 {
159 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
160 	struct blkcipher_walk walk;
161 
162 	blkcipher_walk_init(&walk, dst, src, nbytes);
163 	return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk);
164 }
165 
166 static struct crypto_alg ecb_des_alg = {
167 	.cra_name		=	"ecb(des)",
168 	.cra_driver_name	=	"ecb-des-s390",
169 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
170 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
171 	.cra_blocksize		=	DES_BLOCK_SIZE,
172 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
173 	.cra_type		=	&crypto_blkcipher_type,
174 	.cra_module		=	THIS_MODULE,
175 	.cra_u			=	{
176 		.blkcipher = {
177 			.min_keysize		=	DES_KEY_SIZE,
178 			.max_keysize		=	DES_KEY_SIZE,
179 			.setkey			=	des_setkey,
180 			.encrypt		=	ecb_des_encrypt,
181 			.decrypt		=	ecb_des_decrypt,
182 		}
183 	}
184 };
185 
186 static int cbc_des_encrypt(struct blkcipher_desc *desc,
187 			   struct scatterlist *dst, struct scatterlist *src,
188 			   unsigned int nbytes)
189 {
190 	struct blkcipher_walk walk;
191 
192 	blkcipher_walk_init(&walk, dst, src, nbytes);
193 	return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, &walk);
194 }
195 
196 static int cbc_des_decrypt(struct blkcipher_desc *desc,
197 			   struct scatterlist *dst, struct scatterlist *src,
198 			   unsigned int nbytes)
199 {
200 	struct blkcipher_walk walk;
201 
202 	blkcipher_walk_init(&walk, dst, src, nbytes);
203 	return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, &walk);
204 }
205 
206 static struct crypto_alg cbc_des_alg = {
207 	.cra_name		=	"cbc(des)",
208 	.cra_driver_name	=	"cbc-des-s390",
209 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
210 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
211 	.cra_blocksize		=	DES_BLOCK_SIZE,
212 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
213 	.cra_type		=	&crypto_blkcipher_type,
214 	.cra_module		=	THIS_MODULE,
215 	.cra_u			=	{
216 		.blkcipher = {
217 			.min_keysize		=	DES_KEY_SIZE,
218 			.max_keysize		=	DES_KEY_SIZE,
219 			.ivsize			=	DES_BLOCK_SIZE,
220 			.setkey			=	des_setkey,
221 			.encrypt		=	cbc_des_encrypt,
222 			.decrypt		=	cbc_des_decrypt,
223 		}
224 	}
225 };
226 
227 /*
228  * RFC2451:
229  *
230  *   For DES-EDE3, there is no known need to reject weak or
231  *   complementation keys.  Any weakness is obviated by the use of
232  *   multiple keys.
233  *
234  *   However, if the first two or last two independent 64-bit keys are
235  *   equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
236  *   same as DES.  Implementers MUST reject keys that exhibit this
237  *   property.
238  *
239  */
240 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
241 		       unsigned int key_len)
242 {
243 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
244 	u32 *flags = &tfm->crt_flags;
245 
246 	if (!(crypto_memneq(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
247 	    crypto_memneq(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
248 			  DES_KEY_SIZE)) &&
249 	    (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
250 		*flags |= CRYPTO_TFM_RES_WEAK_KEY;
251 		return -EINVAL;
252 	}
253 	memcpy(ctx->key, key, key_len);
254 	return 0;
255 }
256 
257 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
258 {
259 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
260 
261 	crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
262 }
263 
264 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
265 {
266 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
267 
268 	crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
269 }
270 
271 static struct crypto_alg des3_alg = {
272 	.cra_name		=	"des3_ede",
273 	.cra_driver_name	=	"des3_ede-s390",
274 	.cra_priority		=	CRYPT_S390_PRIORITY,
275 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
276 	.cra_blocksize		=	DES_BLOCK_SIZE,
277 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
278 	.cra_module		=	THIS_MODULE,
279 	.cra_u			=	{
280 		.cipher = {
281 			.cia_min_keysize	=	DES3_KEY_SIZE,
282 			.cia_max_keysize	=	DES3_KEY_SIZE,
283 			.cia_setkey		=	des3_setkey,
284 			.cia_encrypt		=	des3_encrypt,
285 			.cia_decrypt		=	des3_decrypt,
286 		}
287 	}
288 };
289 
290 static int ecb_des3_encrypt(struct blkcipher_desc *desc,
291 			    struct scatterlist *dst, struct scatterlist *src,
292 			    unsigned int nbytes)
293 {
294 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
295 	struct blkcipher_walk walk;
296 
297 	blkcipher_walk_init(&walk, dst, src, nbytes);
298 	return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk);
299 }
300 
301 static int ecb_des3_decrypt(struct blkcipher_desc *desc,
302 			    struct scatterlist *dst, struct scatterlist *src,
303 			    unsigned int nbytes)
304 {
305 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
306 	struct blkcipher_walk walk;
307 
308 	blkcipher_walk_init(&walk, dst, src, nbytes);
309 	return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk);
310 }
311 
312 static struct crypto_alg ecb_des3_alg = {
313 	.cra_name		=	"ecb(des3_ede)",
314 	.cra_driver_name	=	"ecb-des3_ede-s390",
315 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
316 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
317 	.cra_blocksize		=	DES_BLOCK_SIZE,
318 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
319 	.cra_type		=	&crypto_blkcipher_type,
320 	.cra_module		=	THIS_MODULE,
321 	.cra_u			=	{
322 		.blkcipher = {
323 			.min_keysize		=	DES3_KEY_SIZE,
324 			.max_keysize		=	DES3_KEY_SIZE,
325 			.setkey			=	des3_setkey,
326 			.encrypt		=	ecb_des3_encrypt,
327 			.decrypt		=	ecb_des3_decrypt,
328 		}
329 	}
330 };
331 
332 static int cbc_des3_encrypt(struct blkcipher_desc *desc,
333 			    struct scatterlist *dst, struct scatterlist *src,
334 			    unsigned int nbytes)
335 {
336 	struct blkcipher_walk walk;
337 
338 	blkcipher_walk_init(&walk, dst, src, nbytes);
339 	return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, &walk);
340 }
341 
342 static int cbc_des3_decrypt(struct blkcipher_desc *desc,
343 			    struct scatterlist *dst, struct scatterlist *src,
344 			    unsigned int nbytes)
345 {
346 	struct blkcipher_walk walk;
347 
348 	blkcipher_walk_init(&walk, dst, src, nbytes);
349 	return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, &walk);
350 }
351 
352 static struct crypto_alg cbc_des3_alg = {
353 	.cra_name		=	"cbc(des3_ede)",
354 	.cra_driver_name	=	"cbc-des3_ede-s390",
355 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
356 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
357 	.cra_blocksize		=	DES_BLOCK_SIZE,
358 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
359 	.cra_type		=	&crypto_blkcipher_type,
360 	.cra_module		=	THIS_MODULE,
361 	.cra_u			=	{
362 		.blkcipher = {
363 			.min_keysize		=	DES3_KEY_SIZE,
364 			.max_keysize		=	DES3_KEY_SIZE,
365 			.ivsize			=	DES_BLOCK_SIZE,
366 			.setkey			=	des3_setkey,
367 			.encrypt		=	cbc_des3_encrypt,
368 			.decrypt		=	cbc_des3_decrypt,
369 		}
370 	}
371 };
372 
373 static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
374 {
375 	unsigned int i, n;
376 
377 	/* align to block size, max. PAGE_SIZE */
378 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1);
379 	for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
380 		memcpy(ctrptr + i, ctrptr + i - DES_BLOCK_SIZE, DES_BLOCK_SIZE);
381 		crypto_inc(ctrptr + i, DES_BLOCK_SIZE);
382 	}
383 	return n;
384 }
385 
386 static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
387 			    struct s390_des_ctx *ctx,
388 			    struct blkcipher_walk *walk)
389 {
390 	int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
391 	unsigned int n, nbytes;
392 	u8 buf[DES_BLOCK_SIZE], ctrbuf[DES_BLOCK_SIZE];
393 	u8 *out, *in, *ctrptr = ctrbuf;
394 
395 	if (!walk->nbytes)
396 		return ret;
397 
398 	if (spin_trylock(&ctrblk_lock))
399 		ctrptr = ctrblk;
400 
401 	memcpy(ctrptr, walk->iv, DES_BLOCK_SIZE);
402 	while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
403 		out = walk->dst.virt.addr;
404 		in = walk->src.virt.addr;
405 		while (nbytes >= DES_BLOCK_SIZE) {
406 			if (ctrptr == ctrblk)
407 				n = __ctrblk_init(ctrptr, nbytes);
408 			else
409 				n = DES_BLOCK_SIZE;
410 			ret = crypt_s390_kmctr(func, ctx->key, out, in,
411 					       n, ctrptr);
412 			if (ret < 0 || ret != n) {
413 				if (ctrptr == ctrblk)
414 					spin_unlock(&ctrblk_lock);
415 				return -EIO;
416 			}
417 			if (n > DES_BLOCK_SIZE)
418 				memcpy(ctrptr, ctrptr + n - DES_BLOCK_SIZE,
419 				       DES_BLOCK_SIZE);
420 			crypto_inc(ctrptr, DES_BLOCK_SIZE);
421 			out += n;
422 			in += n;
423 			nbytes -= n;
424 		}
425 		ret = blkcipher_walk_done(desc, walk, nbytes);
426 	}
427 	if (ctrptr == ctrblk) {
428 		if (nbytes)
429 			memcpy(ctrbuf, ctrptr, DES_BLOCK_SIZE);
430 		else
431 			memcpy(walk->iv, ctrptr, DES_BLOCK_SIZE);
432 		spin_unlock(&ctrblk_lock);
433 	} else {
434 		if (!nbytes)
435 			memcpy(walk->iv, ctrptr, DES_BLOCK_SIZE);
436 	}
437 	/* final block may be < DES_BLOCK_SIZE, copy only nbytes */
438 	if (nbytes) {
439 		out = walk->dst.virt.addr;
440 		in = walk->src.virt.addr;
441 		ret = crypt_s390_kmctr(func, ctx->key, buf, in,
442 				       DES_BLOCK_SIZE, ctrbuf);
443 		if (ret < 0 || ret != DES_BLOCK_SIZE)
444 			return -EIO;
445 		memcpy(out, buf, nbytes);
446 		crypto_inc(ctrbuf, DES_BLOCK_SIZE);
447 		ret = blkcipher_walk_done(desc, walk, 0);
448 		memcpy(walk->iv, ctrbuf, DES_BLOCK_SIZE);
449 	}
450 	return ret;
451 }
452 
453 static int ctr_des_encrypt(struct blkcipher_desc *desc,
454 			   struct scatterlist *dst, struct scatterlist *src,
455 			   unsigned int nbytes)
456 {
457 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
458 	struct blkcipher_walk walk;
459 
460 	blkcipher_walk_init(&walk, dst, src, nbytes);
461 	return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
462 }
463 
464 static int ctr_des_decrypt(struct blkcipher_desc *desc,
465 			   struct scatterlist *dst, struct scatterlist *src,
466 			   unsigned int nbytes)
467 {
468 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
469 	struct blkcipher_walk walk;
470 
471 	blkcipher_walk_init(&walk, dst, src, nbytes);
472 	return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
473 }
474 
475 static struct crypto_alg ctr_des_alg = {
476 	.cra_name		=	"ctr(des)",
477 	.cra_driver_name	=	"ctr-des-s390",
478 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
479 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
480 	.cra_blocksize		=	1,
481 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
482 	.cra_type		=	&crypto_blkcipher_type,
483 	.cra_module		=	THIS_MODULE,
484 	.cra_u			=	{
485 		.blkcipher = {
486 			.min_keysize		=	DES_KEY_SIZE,
487 			.max_keysize		=	DES_KEY_SIZE,
488 			.ivsize			=	DES_BLOCK_SIZE,
489 			.setkey			=	des_setkey,
490 			.encrypt		=	ctr_des_encrypt,
491 			.decrypt		=	ctr_des_decrypt,
492 		}
493 	}
494 };
495 
496 static int ctr_des3_encrypt(struct blkcipher_desc *desc,
497 			    struct scatterlist *dst, struct scatterlist *src,
498 			    unsigned int nbytes)
499 {
500 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
501 	struct blkcipher_walk walk;
502 
503 	blkcipher_walk_init(&walk, dst, src, nbytes);
504 	return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
505 }
506 
507 static int ctr_des3_decrypt(struct blkcipher_desc *desc,
508 			    struct scatterlist *dst, struct scatterlist *src,
509 			    unsigned int nbytes)
510 {
511 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
512 	struct blkcipher_walk walk;
513 
514 	blkcipher_walk_init(&walk, dst, src, nbytes);
515 	return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
516 }
517 
518 static struct crypto_alg ctr_des3_alg = {
519 	.cra_name		=	"ctr(des3_ede)",
520 	.cra_driver_name	=	"ctr-des3_ede-s390",
521 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
522 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
523 	.cra_blocksize		=	1,
524 	.cra_ctxsize		=	sizeof(struct s390_des_ctx),
525 	.cra_type		=	&crypto_blkcipher_type,
526 	.cra_module		=	THIS_MODULE,
527 	.cra_u			=	{
528 		.blkcipher = {
529 			.min_keysize		=	DES3_KEY_SIZE,
530 			.max_keysize		=	DES3_KEY_SIZE,
531 			.ivsize			=	DES_BLOCK_SIZE,
532 			.setkey			=	des3_setkey,
533 			.encrypt		=	ctr_des3_encrypt,
534 			.decrypt		=	ctr_des3_decrypt,
535 		}
536 	}
537 };
538 
539 static int __init des_s390_init(void)
540 {
541 	int ret;
542 
543 	if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
544 	    !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
545 		return -EOPNOTSUPP;
546 
547 	ret = crypto_register_alg(&des_alg);
548 	if (ret)
549 		goto des_err;
550 	ret = crypto_register_alg(&ecb_des_alg);
551 	if (ret)
552 		goto ecb_des_err;
553 	ret = crypto_register_alg(&cbc_des_alg);
554 	if (ret)
555 		goto cbc_des_err;
556 	ret = crypto_register_alg(&des3_alg);
557 	if (ret)
558 		goto des3_err;
559 	ret = crypto_register_alg(&ecb_des3_alg);
560 	if (ret)
561 		goto ecb_des3_err;
562 	ret = crypto_register_alg(&cbc_des3_alg);
563 	if (ret)
564 		goto cbc_des3_err;
565 
566 	if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
567 			CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
568 	    crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
569 			CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
570 		ret = crypto_register_alg(&ctr_des_alg);
571 		if (ret)
572 			goto ctr_des_err;
573 		ret = crypto_register_alg(&ctr_des3_alg);
574 		if (ret)
575 			goto ctr_des3_err;
576 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
577 		if (!ctrblk) {
578 			ret = -ENOMEM;
579 			goto ctr_mem_err;
580 		}
581 	}
582 out:
583 	return ret;
584 
585 ctr_mem_err:
586 	crypto_unregister_alg(&ctr_des3_alg);
587 ctr_des3_err:
588 	crypto_unregister_alg(&ctr_des_alg);
589 ctr_des_err:
590 	crypto_unregister_alg(&cbc_des3_alg);
591 cbc_des3_err:
592 	crypto_unregister_alg(&ecb_des3_alg);
593 ecb_des3_err:
594 	crypto_unregister_alg(&des3_alg);
595 des3_err:
596 	crypto_unregister_alg(&cbc_des_alg);
597 cbc_des_err:
598 	crypto_unregister_alg(&ecb_des_alg);
599 ecb_des_err:
600 	crypto_unregister_alg(&des_alg);
601 des_err:
602 	goto out;
603 }
604 
605 static void __exit des_s390_exit(void)
606 {
607 	if (ctrblk) {
608 		crypto_unregister_alg(&ctr_des_alg);
609 		crypto_unregister_alg(&ctr_des3_alg);
610 		free_page((unsigned long) ctrblk);
611 	}
612 	crypto_unregister_alg(&cbc_des3_alg);
613 	crypto_unregister_alg(&ecb_des3_alg);
614 	crypto_unregister_alg(&des3_alg);
615 	crypto_unregister_alg(&cbc_des_alg);
616 	crypto_unregister_alg(&ecb_des_alg);
617 	crypto_unregister_alg(&des_alg);
618 }
619 
620 module_cpu_feature_match(MSA, des_s390_init);
621 module_exit(des_s390_exit);
622 
623 MODULE_ALIAS_CRYPTO("des");
624 MODULE_ALIAS_CRYPTO("des3_ede");
625 
626 MODULE_LICENSE("GPL");
627 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");
628