xref: /linux/arch/s390/crypto/paes_s390.c (revision 96ac6d435100450f0565708d9b885ea2a7400e0a)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Cryptographic API.
4  *
5  * s390 implementation of the AES Cipher Algorithm with protected keys.
6  *
7  * s390 Version:
8  *   Copyright IBM Corp. 2017
9  *   Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10  *		Harald Freudenberger <freude@de.ibm.com>
11  */
12 
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
15 
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
26 #include <asm/pkey.h>
27 
28 static u8 *ctrblk;
29 static DEFINE_SPINLOCK(ctrblk_lock);
30 
31 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
32 
33 struct key_blob {
34 	__u8 key[MAXKEYBLOBSIZE];
35 	unsigned int keylen;
36 };
37 
38 struct s390_paes_ctx {
39 	struct key_blob kb;
40 	struct pkey_protkey pk;
41 	unsigned long fc;
42 };
43 
44 struct s390_pxts_ctx {
45 	struct key_blob kb[2];
46 	struct pkey_protkey pk[2];
47 	unsigned long fc;
48 };
49 
50 static inline int __paes_convert_key(struct key_blob *kb,
51 				     struct pkey_protkey *pk)
52 {
53 	int i, ret;
54 
55 	/* try three times in case of failure */
56 	for (i = 0; i < 3; i++) {
57 		ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk);
58 		if (ret == 0)
59 			break;
60 	}
61 
62 	return ret;
63 }
64 
65 static int __paes_set_key(struct s390_paes_ctx *ctx)
66 {
67 	unsigned long fc;
68 
69 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
70 		return -EINVAL;
71 
72 	/* Pick the correct function code based on the protected key type */
73 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
74 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
75 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
76 
77 	/* Check if the function code is available */
78 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
79 
80 	return ctx->fc ? 0 : -EINVAL;
81 }
82 
83 static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
84 			    unsigned int key_len)
85 {
86 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
87 
88 	memcpy(ctx->kb.key, in_key, key_len);
89 	ctx->kb.keylen = key_len;
90 	if (__paes_set_key(ctx)) {
91 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
92 		return -EINVAL;
93 	}
94 	return 0;
95 }
96 
97 static int ecb_paes_crypt(struct blkcipher_desc *desc,
98 			  unsigned long modifier,
99 			  struct blkcipher_walk *walk)
100 {
101 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
102 	unsigned int nbytes, n, k;
103 	int ret;
104 
105 	ret = blkcipher_walk_virt(desc, walk);
106 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
107 		/* only use complete blocks */
108 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
109 		k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
110 			     walk->dst.virt.addr, walk->src.virt.addr, n);
111 		if (k)
112 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
113 		if (k < n) {
114 			if (__paes_set_key(ctx) != 0)
115 				return blkcipher_walk_done(desc, walk, -EIO);
116 		}
117 	}
118 	return ret;
119 }
120 
121 static int ecb_paes_encrypt(struct blkcipher_desc *desc,
122 			    struct scatterlist *dst, struct scatterlist *src,
123 			    unsigned int nbytes)
124 {
125 	struct blkcipher_walk walk;
126 
127 	blkcipher_walk_init(&walk, dst, src, nbytes);
128 	return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
129 }
130 
131 static int ecb_paes_decrypt(struct blkcipher_desc *desc,
132 			    struct scatterlist *dst, struct scatterlist *src,
133 			    unsigned int nbytes)
134 {
135 	struct blkcipher_walk walk;
136 
137 	blkcipher_walk_init(&walk, dst, src, nbytes);
138 	return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
139 }
140 
141 static struct crypto_alg ecb_paes_alg = {
142 	.cra_name		=	"ecb(paes)",
143 	.cra_driver_name	=	"ecb-paes-s390",
144 	.cra_priority		=	401,	/* combo: aes + ecb + 1 */
145 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
146 	.cra_blocksize		=	AES_BLOCK_SIZE,
147 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
148 	.cra_type		=	&crypto_blkcipher_type,
149 	.cra_module		=	THIS_MODULE,
150 	.cra_list		=	LIST_HEAD_INIT(ecb_paes_alg.cra_list),
151 	.cra_u			=	{
152 		.blkcipher = {
153 			.min_keysize		=	MINKEYBLOBSIZE,
154 			.max_keysize		=	MAXKEYBLOBSIZE,
155 			.setkey			=	ecb_paes_set_key,
156 			.encrypt		=	ecb_paes_encrypt,
157 			.decrypt		=	ecb_paes_decrypt,
158 		}
159 	}
160 };
161 
162 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
163 {
164 	unsigned long fc;
165 
166 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
167 		return -EINVAL;
168 
169 	/* Pick the correct function code based on the protected key type */
170 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
171 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
172 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
173 
174 	/* Check if the function code is available */
175 	ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
176 
177 	return ctx->fc ? 0 : -EINVAL;
178 }
179 
180 static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
181 			    unsigned int key_len)
182 {
183 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
184 
185 	memcpy(ctx->kb.key, in_key, key_len);
186 	ctx->kb.keylen = key_len;
187 	if (__cbc_paes_set_key(ctx)) {
188 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
189 		return -EINVAL;
190 	}
191 	return 0;
192 }
193 
194 static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
195 			  struct blkcipher_walk *walk)
196 {
197 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198 	unsigned int nbytes, n, k;
199 	int ret;
200 	struct {
201 		u8 iv[AES_BLOCK_SIZE];
202 		u8 key[MAXPROTKEYSIZE];
203 	} param;
204 
205 	ret = blkcipher_walk_virt(desc, walk);
206 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
207 	memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
208 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
209 		/* only use complete blocks */
210 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
211 		k = cpacf_kmc(ctx->fc | modifier, &param,
212 			      walk->dst.virt.addr, walk->src.virt.addr, n);
213 		if (k)
214 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
215 		if (k < n) {
216 			if (__cbc_paes_set_key(ctx) != 0)
217 				return blkcipher_walk_done(desc, walk, -EIO);
218 			memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
219 		}
220 	}
221 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
222 	return ret;
223 }
224 
225 static int cbc_paes_encrypt(struct blkcipher_desc *desc,
226 			    struct scatterlist *dst, struct scatterlist *src,
227 			    unsigned int nbytes)
228 {
229 	struct blkcipher_walk walk;
230 
231 	blkcipher_walk_init(&walk, dst, src, nbytes);
232 	return cbc_paes_crypt(desc, 0, &walk);
233 }
234 
235 static int cbc_paes_decrypt(struct blkcipher_desc *desc,
236 			    struct scatterlist *dst, struct scatterlist *src,
237 			    unsigned int nbytes)
238 {
239 	struct blkcipher_walk walk;
240 
241 	blkcipher_walk_init(&walk, dst, src, nbytes);
242 	return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
243 }
244 
245 static struct crypto_alg cbc_paes_alg = {
246 	.cra_name		=	"cbc(paes)",
247 	.cra_driver_name	=	"cbc-paes-s390",
248 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
249 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
250 	.cra_blocksize		=	AES_BLOCK_SIZE,
251 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
252 	.cra_type		=	&crypto_blkcipher_type,
253 	.cra_module		=	THIS_MODULE,
254 	.cra_list		=	LIST_HEAD_INIT(cbc_paes_alg.cra_list),
255 	.cra_u			=	{
256 		.blkcipher = {
257 			.min_keysize		=	MINKEYBLOBSIZE,
258 			.max_keysize		=	MAXKEYBLOBSIZE,
259 			.ivsize			=	AES_BLOCK_SIZE,
260 			.setkey			=	cbc_paes_set_key,
261 			.encrypt		=	cbc_paes_encrypt,
262 			.decrypt		=	cbc_paes_decrypt,
263 		}
264 	}
265 };
266 
267 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
268 {
269 	unsigned long fc;
270 
271 	if (__paes_convert_key(&ctx->kb[0], &ctx->pk[0]) ||
272 	    __paes_convert_key(&ctx->kb[1], &ctx->pk[1]))
273 		return -EINVAL;
274 
275 	if (ctx->pk[0].type != ctx->pk[1].type)
276 		return -EINVAL;
277 
278 	/* Pick the correct function code based on the protected key type */
279 	fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
280 		(ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
281 		CPACF_KM_PXTS_256 : 0;
282 
283 	/* Check if the function code is available */
284 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
285 
286 	return ctx->fc ? 0 : -EINVAL;
287 }
288 
289 static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
290 			    unsigned int key_len)
291 {
292 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
293 	u8 ckey[2 * AES_MAX_KEY_SIZE];
294 	unsigned int ckey_len, keytok_len;
295 
296 	if (key_len % 2)
297 		return -EINVAL;
298 
299 	keytok_len = key_len / 2;
300 	memcpy(ctx->kb[0].key, in_key, keytok_len);
301 	ctx->kb[0].keylen = keytok_len;
302 	memcpy(ctx->kb[1].key, in_key + keytok_len, keytok_len);
303 	ctx->kb[1].keylen = keytok_len;
304 	if (__xts_paes_set_key(ctx)) {
305 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
306 		return -EINVAL;
307 	}
308 
309 	/*
310 	 * xts_check_key verifies the key length is not odd and makes
311 	 * sure that the two keys are not the same. This can be done
312 	 * on the two protected keys as well
313 	 */
314 	ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
315 		AES_KEYSIZE_128 : AES_KEYSIZE_256;
316 	memcpy(ckey, ctx->pk[0].protkey, ckey_len);
317 	memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
318 	return xts_check_key(tfm, ckey, 2*ckey_len);
319 }
320 
321 static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
322 			  struct blkcipher_walk *walk)
323 {
324 	struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
325 	unsigned int keylen, offset, nbytes, n, k;
326 	int ret;
327 	struct {
328 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
329 		u8 tweak[16];
330 		u8 block[16];
331 		u8 bit[16];
332 		u8 xts[16];
333 	} pcc_param;
334 	struct {
335 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
336 		u8 init[16];
337 	} xts_param;
338 
339 	ret = blkcipher_walk_virt(desc, walk);
340 	keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
341 	offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
342 retry:
343 	memset(&pcc_param, 0, sizeof(pcc_param));
344 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
345 	memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
346 	cpacf_pcc(ctx->fc, pcc_param.key + offset);
347 
348 	memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
349 	memcpy(xts_param.init, pcc_param.xts, 16);
350 
351 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
352 		/* only use complete blocks */
353 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
354 		k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
355 			     walk->dst.virt.addr, walk->src.virt.addr, n);
356 		if (k)
357 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
358 		if (k < n) {
359 			if (__xts_paes_set_key(ctx) != 0)
360 				return blkcipher_walk_done(desc, walk, -EIO);
361 			goto retry;
362 		}
363 	}
364 	return ret;
365 }
366 
367 static int xts_paes_encrypt(struct blkcipher_desc *desc,
368 			    struct scatterlist *dst, struct scatterlist *src,
369 			    unsigned int nbytes)
370 {
371 	struct blkcipher_walk walk;
372 
373 	blkcipher_walk_init(&walk, dst, src, nbytes);
374 	return xts_paes_crypt(desc, 0, &walk);
375 }
376 
377 static int xts_paes_decrypt(struct blkcipher_desc *desc,
378 			    struct scatterlist *dst, struct scatterlist *src,
379 			    unsigned int nbytes)
380 {
381 	struct blkcipher_walk walk;
382 
383 	blkcipher_walk_init(&walk, dst, src, nbytes);
384 	return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
385 }
386 
387 static struct crypto_alg xts_paes_alg = {
388 	.cra_name		=	"xts(paes)",
389 	.cra_driver_name	=	"xts-paes-s390",
390 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
391 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
392 	.cra_blocksize		=	AES_BLOCK_SIZE,
393 	.cra_ctxsize		=	sizeof(struct s390_pxts_ctx),
394 	.cra_type		=	&crypto_blkcipher_type,
395 	.cra_module		=	THIS_MODULE,
396 	.cra_list		=	LIST_HEAD_INIT(xts_paes_alg.cra_list),
397 	.cra_u			=	{
398 		.blkcipher = {
399 			.min_keysize		=	2 * MINKEYBLOBSIZE,
400 			.max_keysize		=	2 * MAXKEYBLOBSIZE,
401 			.ivsize			=	AES_BLOCK_SIZE,
402 			.setkey			=	xts_paes_set_key,
403 			.encrypt		=	xts_paes_encrypt,
404 			.decrypt		=	xts_paes_decrypt,
405 		}
406 	}
407 };
408 
409 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
410 {
411 	unsigned long fc;
412 
413 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
414 		return -EINVAL;
415 
416 	/* Pick the correct function code based on the protected key type */
417 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
418 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
419 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
420 		CPACF_KMCTR_PAES_256 : 0;
421 
422 	/* Check if the function code is available */
423 	ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
424 
425 	return ctx->fc ? 0 : -EINVAL;
426 }
427 
428 static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
429 			    unsigned int key_len)
430 {
431 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
432 
433 	memcpy(ctx->kb.key, in_key, key_len);
434 	ctx->kb.keylen = key_len;
435 	if (__ctr_paes_set_key(ctx)) {
436 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
437 		return -EINVAL;
438 	}
439 	return 0;
440 }
441 
442 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
443 {
444 	unsigned int i, n;
445 
446 	/* only use complete blocks, max. PAGE_SIZE */
447 	memcpy(ctrptr, iv, AES_BLOCK_SIZE);
448 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
449 	for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
450 		memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
451 		crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
452 		ctrptr += AES_BLOCK_SIZE;
453 	}
454 	return n;
455 }
456 
457 static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
458 			  struct blkcipher_walk *walk)
459 {
460 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
461 	u8 buf[AES_BLOCK_SIZE], *ctrptr;
462 	unsigned int nbytes, n, k;
463 	int ret, locked;
464 
465 	locked = spin_trylock(&ctrblk_lock);
466 
467 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
468 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
469 		n = AES_BLOCK_SIZE;
470 		if (nbytes >= 2*AES_BLOCK_SIZE && locked)
471 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
472 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
473 		k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
474 				walk->dst.virt.addr, walk->src.virt.addr,
475 				n, ctrptr);
476 		if (k) {
477 			if (ctrptr == ctrblk)
478 				memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
479 				       AES_BLOCK_SIZE);
480 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
481 			ret = blkcipher_walk_done(desc, walk, nbytes - n);
482 		}
483 		if (k < n) {
484 			if (__ctr_paes_set_key(ctx) != 0) {
485 				if (locked)
486 					spin_unlock(&ctrblk_lock);
487 				return blkcipher_walk_done(desc, walk, -EIO);
488 			}
489 		}
490 	}
491 	if (locked)
492 		spin_unlock(&ctrblk_lock);
493 	/*
494 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
495 	 */
496 	if (nbytes) {
497 		while (1) {
498 			if (cpacf_kmctr(ctx->fc | modifier,
499 					ctx->pk.protkey, buf,
500 					walk->src.virt.addr, AES_BLOCK_SIZE,
501 					walk->iv) == AES_BLOCK_SIZE)
502 				break;
503 			if (__ctr_paes_set_key(ctx) != 0)
504 				return blkcipher_walk_done(desc, walk, -EIO);
505 		}
506 		memcpy(walk->dst.virt.addr, buf, nbytes);
507 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
508 		ret = blkcipher_walk_done(desc, walk, 0);
509 	}
510 
511 	return ret;
512 }
513 
514 static int ctr_paes_encrypt(struct blkcipher_desc *desc,
515 			    struct scatterlist *dst, struct scatterlist *src,
516 			    unsigned int nbytes)
517 {
518 	struct blkcipher_walk walk;
519 
520 	blkcipher_walk_init(&walk, dst, src, nbytes);
521 	return ctr_paes_crypt(desc, 0, &walk);
522 }
523 
524 static int ctr_paes_decrypt(struct blkcipher_desc *desc,
525 			    struct scatterlist *dst, struct scatterlist *src,
526 			    unsigned int nbytes)
527 {
528 	struct blkcipher_walk walk;
529 
530 	blkcipher_walk_init(&walk, dst, src, nbytes);
531 	return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
532 }
533 
534 static struct crypto_alg ctr_paes_alg = {
535 	.cra_name		=	"ctr(paes)",
536 	.cra_driver_name	=	"ctr-paes-s390",
537 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
538 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
539 	.cra_blocksize		=	1,
540 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
541 	.cra_type		=	&crypto_blkcipher_type,
542 	.cra_module		=	THIS_MODULE,
543 	.cra_list		=	LIST_HEAD_INIT(ctr_paes_alg.cra_list),
544 	.cra_u			=	{
545 		.blkcipher = {
546 			.min_keysize		=	MINKEYBLOBSIZE,
547 			.max_keysize		=	MAXKEYBLOBSIZE,
548 			.ivsize			=	AES_BLOCK_SIZE,
549 			.setkey			=	ctr_paes_set_key,
550 			.encrypt		=	ctr_paes_encrypt,
551 			.decrypt		=	ctr_paes_decrypt,
552 		}
553 	}
554 };
555 
556 static inline void __crypto_unregister_alg(struct crypto_alg *alg)
557 {
558 	if (!list_empty(&alg->cra_list))
559 		crypto_unregister_alg(alg);
560 }
561 
562 static void paes_s390_fini(void)
563 {
564 	if (ctrblk)
565 		free_page((unsigned long) ctrblk);
566 	__crypto_unregister_alg(&ctr_paes_alg);
567 	__crypto_unregister_alg(&xts_paes_alg);
568 	__crypto_unregister_alg(&cbc_paes_alg);
569 	__crypto_unregister_alg(&ecb_paes_alg);
570 }
571 
572 static int __init paes_s390_init(void)
573 {
574 	int ret;
575 
576 	/* Query available functions for KM, KMC and KMCTR */
577 	cpacf_query(CPACF_KM, &km_functions);
578 	cpacf_query(CPACF_KMC, &kmc_functions);
579 	cpacf_query(CPACF_KMCTR, &kmctr_functions);
580 
581 	if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
582 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
583 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
584 		ret = crypto_register_alg(&ecb_paes_alg);
585 		if (ret)
586 			goto out_err;
587 	}
588 
589 	if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
590 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
591 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
592 		ret = crypto_register_alg(&cbc_paes_alg);
593 		if (ret)
594 			goto out_err;
595 	}
596 
597 	if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
598 	    cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
599 		ret = crypto_register_alg(&xts_paes_alg);
600 		if (ret)
601 			goto out_err;
602 	}
603 
604 	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
605 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
606 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
607 		ret = crypto_register_alg(&ctr_paes_alg);
608 		if (ret)
609 			goto out_err;
610 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
611 		if (!ctrblk) {
612 			ret = -ENOMEM;
613 			goto out_err;
614 		}
615 	}
616 
617 	return 0;
618 out_err:
619 	paes_s390_fini();
620 	return ret;
621 }
622 
623 module_init(paes_s390_init);
624 module_exit(paes_s390_fini);
625 
626 MODULE_ALIAS_CRYPTO("paes");
627 
628 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
629 MODULE_LICENSE("GPL");
630