xref: /linux/arch/s390/crypto/aes_s390.c (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2005, 2007
8  *   Author(s): Jan Glauber (jang@de.ibm.com)
9  *		Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
10  *
11  * Derived from "crypto/aes_generic.c"
12  *
13  * This program is free software; you can redistribute it and/or modify it
14  * under the terms of the GNU General Public License as published by the Free
15  * Software Foundation; either version 2 of the License, or (at your option)
16  * any later version.
17  *
18  */
19 
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
22 
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/cpufeature.h>
28 #include <linux/init.h>
29 #include <linux/spinlock.h>
30 #include "crypt_s390.h"
31 
32 #define AES_KEYLEN_128		1
33 #define AES_KEYLEN_192		2
34 #define AES_KEYLEN_256		4
35 
36 static u8 *ctrblk;
37 static DEFINE_SPINLOCK(ctrblk_lock);
38 static char keylen_flag;
39 
40 struct s390_aes_ctx {
41 	u8 key[AES_MAX_KEY_SIZE];
42 	long enc;
43 	long dec;
44 	int key_len;
45 	union {
46 		struct crypto_blkcipher *blk;
47 		struct crypto_cipher *cip;
48 	} fallback;
49 };
50 
51 struct pcc_param {
52 	u8 key[32];
53 	u8 tweak[16];
54 	u8 block[16];
55 	u8 bit[16];
56 	u8 xts[16];
57 };
58 
59 struct s390_xts_ctx {
60 	u8 key[32];
61 	u8 pcc_key[32];
62 	long enc;
63 	long dec;
64 	int key_len;
65 	struct crypto_blkcipher *fallback;
66 };
67 
68 /*
69  * Check if the key_len is supported by the HW.
70  * Returns 0 if it is, a positive number if it is not and software fallback is
71  * required or a negative number in case the key size is not valid
72  */
73 static int need_fallback(unsigned int key_len)
74 {
75 	switch (key_len) {
76 	case 16:
77 		if (!(keylen_flag & AES_KEYLEN_128))
78 			return 1;
79 		break;
80 	case 24:
81 		if (!(keylen_flag & AES_KEYLEN_192))
82 			return 1;
83 		break;
84 	case 32:
85 		if (!(keylen_flag & AES_KEYLEN_256))
86 			return 1;
87 		break;
88 	default:
89 		return -1;
90 		break;
91 	}
92 	return 0;
93 }
94 
95 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
96 		unsigned int key_len)
97 {
98 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
99 	int ret;
100 
101 	sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
102 	sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
103 			CRYPTO_TFM_REQ_MASK);
104 
105 	ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
106 	if (ret) {
107 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
108 		tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
109 				CRYPTO_TFM_RES_MASK);
110 	}
111 	return ret;
112 }
113 
114 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
115 		       unsigned int key_len)
116 {
117 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
118 	u32 *flags = &tfm->crt_flags;
119 	int ret;
120 
121 	ret = need_fallback(key_len);
122 	if (ret < 0) {
123 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
124 		return -EINVAL;
125 	}
126 
127 	sctx->key_len = key_len;
128 	if (!ret) {
129 		memcpy(sctx->key, in_key, key_len);
130 		return 0;
131 	}
132 
133 	return setkey_fallback_cip(tfm, in_key, key_len);
134 }
135 
136 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
137 {
138 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
139 
140 	if (unlikely(need_fallback(sctx->key_len))) {
141 		crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
142 		return;
143 	}
144 
145 	switch (sctx->key_len) {
146 	case 16:
147 		crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
148 			      AES_BLOCK_SIZE);
149 		break;
150 	case 24:
151 		crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
152 			      AES_BLOCK_SIZE);
153 		break;
154 	case 32:
155 		crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
156 			      AES_BLOCK_SIZE);
157 		break;
158 	}
159 }
160 
161 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
162 {
163 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
164 
165 	if (unlikely(need_fallback(sctx->key_len))) {
166 		crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
167 		return;
168 	}
169 
170 	switch (sctx->key_len) {
171 	case 16:
172 		crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
173 			      AES_BLOCK_SIZE);
174 		break;
175 	case 24:
176 		crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
177 			      AES_BLOCK_SIZE);
178 		break;
179 	case 32:
180 		crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
181 			      AES_BLOCK_SIZE);
182 		break;
183 	}
184 }
185 
186 static int fallback_init_cip(struct crypto_tfm *tfm)
187 {
188 	const char *name = tfm->__crt_alg->cra_name;
189 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
190 
191 	sctx->fallback.cip = crypto_alloc_cipher(name, 0,
192 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
193 
194 	if (IS_ERR(sctx->fallback.cip)) {
195 		pr_err("Allocating AES fallback algorithm %s failed\n",
196 		       name);
197 		return PTR_ERR(sctx->fallback.cip);
198 	}
199 
200 	return 0;
201 }
202 
203 static void fallback_exit_cip(struct crypto_tfm *tfm)
204 {
205 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
206 
207 	crypto_free_cipher(sctx->fallback.cip);
208 	sctx->fallback.cip = NULL;
209 }
210 
211 static struct crypto_alg aes_alg = {
212 	.cra_name		=	"aes",
213 	.cra_driver_name	=	"aes-s390",
214 	.cra_priority		=	CRYPT_S390_PRIORITY,
215 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER |
216 					CRYPTO_ALG_NEED_FALLBACK,
217 	.cra_blocksize		=	AES_BLOCK_SIZE,
218 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
219 	.cra_module		=	THIS_MODULE,
220 	.cra_init               =       fallback_init_cip,
221 	.cra_exit               =       fallback_exit_cip,
222 	.cra_u			=	{
223 		.cipher = {
224 			.cia_min_keysize	=	AES_MIN_KEY_SIZE,
225 			.cia_max_keysize	=	AES_MAX_KEY_SIZE,
226 			.cia_setkey		=	aes_set_key,
227 			.cia_encrypt		=	aes_encrypt,
228 			.cia_decrypt		=	aes_decrypt,
229 		}
230 	}
231 };
232 
233 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
234 		unsigned int len)
235 {
236 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
237 	unsigned int ret;
238 
239 	sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
240 	sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
241 			CRYPTO_TFM_REQ_MASK);
242 
243 	ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
244 	if (ret) {
245 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
246 		tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
247 				CRYPTO_TFM_RES_MASK);
248 	}
249 	return ret;
250 }
251 
252 static int fallback_blk_dec(struct blkcipher_desc *desc,
253 		struct scatterlist *dst, struct scatterlist *src,
254 		unsigned int nbytes)
255 {
256 	unsigned int ret;
257 	struct crypto_blkcipher *tfm;
258 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
259 
260 	tfm = desc->tfm;
261 	desc->tfm = sctx->fallback.blk;
262 
263 	ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
264 
265 	desc->tfm = tfm;
266 	return ret;
267 }
268 
269 static int fallback_blk_enc(struct blkcipher_desc *desc,
270 		struct scatterlist *dst, struct scatterlist *src,
271 		unsigned int nbytes)
272 {
273 	unsigned int ret;
274 	struct crypto_blkcipher *tfm;
275 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
276 
277 	tfm = desc->tfm;
278 	desc->tfm = sctx->fallback.blk;
279 
280 	ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
281 
282 	desc->tfm = tfm;
283 	return ret;
284 }
285 
286 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
287 			   unsigned int key_len)
288 {
289 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
290 	int ret;
291 
292 	ret = need_fallback(key_len);
293 	if (ret > 0) {
294 		sctx->key_len = key_len;
295 		return setkey_fallback_blk(tfm, in_key, key_len);
296 	}
297 
298 	switch (key_len) {
299 	case 16:
300 		sctx->enc = KM_AES_128_ENCRYPT;
301 		sctx->dec = KM_AES_128_DECRYPT;
302 		break;
303 	case 24:
304 		sctx->enc = KM_AES_192_ENCRYPT;
305 		sctx->dec = KM_AES_192_DECRYPT;
306 		break;
307 	case 32:
308 		sctx->enc = KM_AES_256_ENCRYPT;
309 		sctx->dec = KM_AES_256_DECRYPT;
310 		break;
311 	}
312 
313 	return aes_set_key(tfm, in_key, key_len);
314 }
315 
316 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
317 			 struct blkcipher_walk *walk)
318 {
319 	int ret = blkcipher_walk_virt(desc, walk);
320 	unsigned int nbytes;
321 
322 	while ((nbytes = walk->nbytes)) {
323 		/* only use complete blocks */
324 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
325 		u8 *out = walk->dst.virt.addr;
326 		u8 *in = walk->src.virt.addr;
327 
328 		ret = crypt_s390_km(func, param, out, in, n);
329 		if (ret < 0 || ret != n)
330 			return -EIO;
331 
332 		nbytes &= AES_BLOCK_SIZE - 1;
333 		ret = blkcipher_walk_done(desc, walk, nbytes);
334 	}
335 
336 	return ret;
337 }
338 
339 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
340 			   struct scatterlist *dst, struct scatterlist *src,
341 			   unsigned int nbytes)
342 {
343 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
344 	struct blkcipher_walk walk;
345 
346 	if (unlikely(need_fallback(sctx->key_len)))
347 		return fallback_blk_enc(desc, dst, src, nbytes);
348 
349 	blkcipher_walk_init(&walk, dst, src, nbytes);
350 	return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
351 }
352 
353 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
354 			   struct scatterlist *dst, struct scatterlist *src,
355 			   unsigned int nbytes)
356 {
357 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
358 	struct blkcipher_walk walk;
359 
360 	if (unlikely(need_fallback(sctx->key_len)))
361 		return fallback_blk_dec(desc, dst, src, nbytes);
362 
363 	blkcipher_walk_init(&walk, dst, src, nbytes);
364 	return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
365 }
366 
367 static int fallback_init_blk(struct crypto_tfm *tfm)
368 {
369 	const char *name = tfm->__crt_alg->cra_name;
370 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
371 
372 	sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
373 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
374 
375 	if (IS_ERR(sctx->fallback.blk)) {
376 		pr_err("Allocating AES fallback algorithm %s failed\n",
377 		       name);
378 		return PTR_ERR(sctx->fallback.blk);
379 	}
380 
381 	return 0;
382 }
383 
384 static void fallback_exit_blk(struct crypto_tfm *tfm)
385 {
386 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
387 
388 	crypto_free_blkcipher(sctx->fallback.blk);
389 	sctx->fallback.blk = NULL;
390 }
391 
392 static struct crypto_alg ecb_aes_alg = {
393 	.cra_name		=	"ecb(aes)",
394 	.cra_driver_name	=	"ecb-aes-s390",
395 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
396 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
397 					CRYPTO_ALG_NEED_FALLBACK,
398 	.cra_blocksize		=	AES_BLOCK_SIZE,
399 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
400 	.cra_type		=	&crypto_blkcipher_type,
401 	.cra_module		=	THIS_MODULE,
402 	.cra_init		=	fallback_init_blk,
403 	.cra_exit		=	fallback_exit_blk,
404 	.cra_u			=	{
405 		.blkcipher = {
406 			.min_keysize		=	AES_MIN_KEY_SIZE,
407 			.max_keysize		=	AES_MAX_KEY_SIZE,
408 			.setkey			=	ecb_aes_set_key,
409 			.encrypt		=	ecb_aes_encrypt,
410 			.decrypt		=	ecb_aes_decrypt,
411 		}
412 	}
413 };
414 
415 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
416 			   unsigned int key_len)
417 {
418 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
419 	int ret;
420 
421 	ret = need_fallback(key_len);
422 	if (ret > 0) {
423 		sctx->key_len = key_len;
424 		return setkey_fallback_blk(tfm, in_key, key_len);
425 	}
426 
427 	switch (key_len) {
428 	case 16:
429 		sctx->enc = KMC_AES_128_ENCRYPT;
430 		sctx->dec = KMC_AES_128_DECRYPT;
431 		break;
432 	case 24:
433 		sctx->enc = KMC_AES_192_ENCRYPT;
434 		sctx->dec = KMC_AES_192_DECRYPT;
435 		break;
436 	case 32:
437 		sctx->enc = KMC_AES_256_ENCRYPT;
438 		sctx->dec = KMC_AES_256_DECRYPT;
439 		break;
440 	}
441 
442 	return aes_set_key(tfm, in_key, key_len);
443 }
444 
445 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
446 			 struct blkcipher_walk *walk)
447 {
448 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
449 	int ret = blkcipher_walk_virt(desc, walk);
450 	unsigned int nbytes = walk->nbytes;
451 	struct {
452 		u8 iv[AES_BLOCK_SIZE];
453 		u8 key[AES_MAX_KEY_SIZE];
454 	} param;
455 
456 	if (!nbytes)
457 		goto out;
458 
459 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
460 	memcpy(param.key, sctx->key, sctx->key_len);
461 	do {
462 		/* only use complete blocks */
463 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
464 		u8 *out = walk->dst.virt.addr;
465 		u8 *in = walk->src.virt.addr;
466 
467 		ret = crypt_s390_kmc(func, &param, out, in, n);
468 		if (ret < 0 || ret != n)
469 			return -EIO;
470 
471 		nbytes &= AES_BLOCK_SIZE - 1;
472 		ret = blkcipher_walk_done(desc, walk, nbytes);
473 	} while ((nbytes = walk->nbytes));
474 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
475 
476 out:
477 	return ret;
478 }
479 
480 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
481 			   struct scatterlist *dst, struct scatterlist *src,
482 			   unsigned int nbytes)
483 {
484 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
485 	struct blkcipher_walk walk;
486 
487 	if (unlikely(need_fallback(sctx->key_len)))
488 		return fallback_blk_enc(desc, dst, src, nbytes);
489 
490 	blkcipher_walk_init(&walk, dst, src, nbytes);
491 	return cbc_aes_crypt(desc, sctx->enc, &walk);
492 }
493 
494 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
495 			   struct scatterlist *dst, struct scatterlist *src,
496 			   unsigned int nbytes)
497 {
498 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
499 	struct blkcipher_walk walk;
500 
501 	if (unlikely(need_fallback(sctx->key_len)))
502 		return fallback_blk_dec(desc, dst, src, nbytes);
503 
504 	blkcipher_walk_init(&walk, dst, src, nbytes);
505 	return cbc_aes_crypt(desc, sctx->dec, &walk);
506 }
507 
508 static struct crypto_alg cbc_aes_alg = {
509 	.cra_name		=	"cbc(aes)",
510 	.cra_driver_name	=	"cbc-aes-s390",
511 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
512 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
513 					CRYPTO_ALG_NEED_FALLBACK,
514 	.cra_blocksize		=	AES_BLOCK_SIZE,
515 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
516 	.cra_type		=	&crypto_blkcipher_type,
517 	.cra_module		=	THIS_MODULE,
518 	.cra_init		=	fallback_init_blk,
519 	.cra_exit		=	fallback_exit_blk,
520 	.cra_u			=	{
521 		.blkcipher = {
522 			.min_keysize		=	AES_MIN_KEY_SIZE,
523 			.max_keysize		=	AES_MAX_KEY_SIZE,
524 			.ivsize			=	AES_BLOCK_SIZE,
525 			.setkey			=	cbc_aes_set_key,
526 			.encrypt		=	cbc_aes_encrypt,
527 			.decrypt		=	cbc_aes_decrypt,
528 		}
529 	}
530 };
531 
532 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
533 				   unsigned int len)
534 {
535 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
536 	unsigned int ret;
537 
538 	xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
539 	xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
540 			CRYPTO_TFM_REQ_MASK);
541 
542 	ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
543 	if (ret) {
544 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
545 		tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
546 				CRYPTO_TFM_RES_MASK);
547 	}
548 	return ret;
549 }
550 
551 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
552 		struct scatterlist *dst, struct scatterlist *src,
553 		unsigned int nbytes)
554 {
555 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
556 	struct crypto_blkcipher *tfm;
557 	unsigned int ret;
558 
559 	tfm = desc->tfm;
560 	desc->tfm = xts_ctx->fallback;
561 
562 	ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
563 
564 	desc->tfm = tfm;
565 	return ret;
566 }
567 
568 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
569 		struct scatterlist *dst, struct scatterlist *src,
570 		unsigned int nbytes)
571 {
572 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
573 	struct crypto_blkcipher *tfm;
574 	unsigned int ret;
575 
576 	tfm = desc->tfm;
577 	desc->tfm = xts_ctx->fallback;
578 
579 	ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
580 
581 	desc->tfm = tfm;
582 	return ret;
583 }
584 
585 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
586 			   unsigned int key_len)
587 {
588 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
589 	u32 *flags = &tfm->crt_flags;
590 
591 	switch (key_len) {
592 	case 32:
593 		xts_ctx->enc = KM_XTS_128_ENCRYPT;
594 		xts_ctx->dec = KM_XTS_128_DECRYPT;
595 		memcpy(xts_ctx->key + 16, in_key, 16);
596 		memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
597 		break;
598 	case 48:
599 		xts_ctx->enc = 0;
600 		xts_ctx->dec = 0;
601 		xts_fallback_setkey(tfm, in_key, key_len);
602 		break;
603 	case 64:
604 		xts_ctx->enc = KM_XTS_256_ENCRYPT;
605 		xts_ctx->dec = KM_XTS_256_DECRYPT;
606 		memcpy(xts_ctx->key, in_key, 32);
607 		memcpy(xts_ctx->pcc_key, in_key + 32, 32);
608 		break;
609 	default:
610 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
611 		return -EINVAL;
612 	}
613 	xts_ctx->key_len = key_len;
614 	return 0;
615 }
616 
617 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
618 			 struct s390_xts_ctx *xts_ctx,
619 			 struct blkcipher_walk *walk)
620 {
621 	unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
622 	int ret = blkcipher_walk_virt(desc, walk);
623 	unsigned int nbytes = walk->nbytes;
624 	unsigned int n;
625 	u8 *in, *out;
626 	struct pcc_param pcc_param;
627 	struct {
628 		u8 key[32];
629 		u8 init[16];
630 	} xts_param;
631 
632 	if (!nbytes)
633 		goto out;
634 
635 	memset(pcc_param.block, 0, sizeof(pcc_param.block));
636 	memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
637 	memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
638 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
639 	memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
640 	ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
641 	if (ret < 0)
642 		return -EIO;
643 
644 	memcpy(xts_param.key, xts_ctx->key, 32);
645 	memcpy(xts_param.init, pcc_param.xts, 16);
646 	do {
647 		/* only use complete blocks */
648 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
649 		out = walk->dst.virt.addr;
650 		in = walk->src.virt.addr;
651 
652 		ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
653 		if (ret < 0 || ret != n)
654 			return -EIO;
655 
656 		nbytes &= AES_BLOCK_SIZE - 1;
657 		ret = blkcipher_walk_done(desc, walk, nbytes);
658 	} while ((nbytes = walk->nbytes));
659 out:
660 	return ret;
661 }
662 
663 static int xts_aes_encrypt(struct blkcipher_desc *desc,
664 			   struct scatterlist *dst, struct scatterlist *src,
665 			   unsigned int nbytes)
666 {
667 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
668 	struct blkcipher_walk walk;
669 
670 	if (unlikely(xts_ctx->key_len == 48))
671 		return xts_fallback_encrypt(desc, dst, src, nbytes);
672 
673 	blkcipher_walk_init(&walk, dst, src, nbytes);
674 	return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
675 }
676 
677 static int xts_aes_decrypt(struct blkcipher_desc *desc,
678 			   struct scatterlist *dst, struct scatterlist *src,
679 			   unsigned int nbytes)
680 {
681 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
682 	struct blkcipher_walk walk;
683 
684 	if (unlikely(xts_ctx->key_len == 48))
685 		return xts_fallback_decrypt(desc, dst, src, nbytes);
686 
687 	blkcipher_walk_init(&walk, dst, src, nbytes);
688 	return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
689 }
690 
691 static int xts_fallback_init(struct crypto_tfm *tfm)
692 {
693 	const char *name = tfm->__crt_alg->cra_name;
694 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
695 
696 	xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
697 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
698 
699 	if (IS_ERR(xts_ctx->fallback)) {
700 		pr_err("Allocating XTS fallback algorithm %s failed\n",
701 		       name);
702 		return PTR_ERR(xts_ctx->fallback);
703 	}
704 	return 0;
705 }
706 
707 static void xts_fallback_exit(struct crypto_tfm *tfm)
708 {
709 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
710 
711 	crypto_free_blkcipher(xts_ctx->fallback);
712 	xts_ctx->fallback = NULL;
713 }
714 
715 static struct crypto_alg xts_aes_alg = {
716 	.cra_name		=	"xts(aes)",
717 	.cra_driver_name	=	"xts-aes-s390",
718 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
719 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
720 					CRYPTO_ALG_NEED_FALLBACK,
721 	.cra_blocksize		=	AES_BLOCK_SIZE,
722 	.cra_ctxsize		=	sizeof(struct s390_xts_ctx),
723 	.cra_type		=	&crypto_blkcipher_type,
724 	.cra_module		=	THIS_MODULE,
725 	.cra_init		=	xts_fallback_init,
726 	.cra_exit		=	xts_fallback_exit,
727 	.cra_u			=	{
728 		.blkcipher = {
729 			.min_keysize		=	2 * AES_MIN_KEY_SIZE,
730 			.max_keysize		=	2 * AES_MAX_KEY_SIZE,
731 			.ivsize			=	AES_BLOCK_SIZE,
732 			.setkey			=	xts_aes_set_key,
733 			.encrypt		=	xts_aes_encrypt,
734 			.decrypt		=	xts_aes_decrypt,
735 		}
736 	}
737 };
738 
739 static int xts_aes_alg_reg;
740 
741 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
742 			   unsigned int key_len)
743 {
744 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
745 
746 	switch (key_len) {
747 	case 16:
748 		sctx->enc = KMCTR_AES_128_ENCRYPT;
749 		sctx->dec = KMCTR_AES_128_DECRYPT;
750 		break;
751 	case 24:
752 		sctx->enc = KMCTR_AES_192_ENCRYPT;
753 		sctx->dec = KMCTR_AES_192_DECRYPT;
754 		break;
755 	case 32:
756 		sctx->enc = KMCTR_AES_256_ENCRYPT;
757 		sctx->dec = KMCTR_AES_256_DECRYPT;
758 		break;
759 	}
760 
761 	return aes_set_key(tfm, in_key, key_len);
762 }
763 
764 static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
765 {
766 	unsigned int i, n;
767 
768 	/* only use complete blocks, max. PAGE_SIZE */
769 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
770 	for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
771 		memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
772 		       AES_BLOCK_SIZE);
773 		crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
774 	}
775 	return n;
776 }
777 
778 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
779 			 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
780 {
781 	int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
782 	unsigned int n, nbytes;
783 	u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
784 	u8 *out, *in, *ctrptr = ctrbuf;
785 
786 	if (!walk->nbytes)
787 		return ret;
788 
789 	if (spin_trylock(&ctrblk_lock))
790 		ctrptr = ctrblk;
791 
792 	memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
793 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
794 		out = walk->dst.virt.addr;
795 		in = walk->src.virt.addr;
796 		while (nbytes >= AES_BLOCK_SIZE) {
797 			if (ctrptr == ctrblk)
798 				n = __ctrblk_init(ctrptr, nbytes);
799 			else
800 				n = AES_BLOCK_SIZE;
801 			ret = crypt_s390_kmctr(func, sctx->key, out, in,
802 					       n, ctrptr);
803 			if (ret < 0 || ret != n) {
804 				if (ctrptr == ctrblk)
805 					spin_unlock(&ctrblk_lock);
806 				return -EIO;
807 			}
808 			if (n > AES_BLOCK_SIZE)
809 				memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
810 				       AES_BLOCK_SIZE);
811 			crypto_inc(ctrptr, AES_BLOCK_SIZE);
812 			out += n;
813 			in += n;
814 			nbytes -= n;
815 		}
816 		ret = blkcipher_walk_done(desc, walk, nbytes);
817 	}
818 	if (ctrptr == ctrblk) {
819 		if (nbytes)
820 			memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
821 		else
822 			memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
823 		spin_unlock(&ctrblk_lock);
824 	} else {
825 		if (!nbytes)
826 			memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
827 	}
828 	/*
829 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
830 	 */
831 	if (nbytes) {
832 		out = walk->dst.virt.addr;
833 		in = walk->src.virt.addr;
834 		ret = crypt_s390_kmctr(func, sctx->key, buf, in,
835 				       AES_BLOCK_SIZE, ctrbuf);
836 		if (ret < 0 || ret != AES_BLOCK_SIZE)
837 			return -EIO;
838 		memcpy(out, buf, nbytes);
839 		crypto_inc(ctrbuf, AES_BLOCK_SIZE);
840 		ret = blkcipher_walk_done(desc, walk, 0);
841 		memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
842 	}
843 
844 	return ret;
845 }
846 
847 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
848 			   struct scatterlist *dst, struct scatterlist *src,
849 			   unsigned int nbytes)
850 {
851 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
852 	struct blkcipher_walk walk;
853 
854 	blkcipher_walk_init(&walk, dst, src, nbytes);
855 	return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
856 }
857 
858 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
859 			   struct scatterlist *dst, struct scatterlist *src,
860 			   unsigned int nbytes)
861 {
862 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
863 	struct blkcipher_walk walk;
864 
865 	blkcipher_walk_init(&walk, dst, src, nbytes);
866 	return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
867 }
868 
869 static struct crypto_alg ctr_aes_alg = {
870 	.cra_name		=	"ctr(aes)",
871 	.cra_driver_name	=	"ctr-aes-s390",
872 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
873 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
874 	.cra_blocksize		=	1,
875 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
876 	.cra_type		=	&crypto_blkcipher_type,
877 	.cra_module		=	THIS_MODULE,
878 	.cra_u			=	{
879 		.blkcipher = {
880 			.min_keysize		=	AES_MIN_KEY_SIZE,
881 			.max_keysize		=	AES_MAX_KEY_SIZE,
882 			.ivsize			=	AES_BLOCK_SIZE,
883 			.setkey			=	ctr_aes_set_key,
884 			.encrypt		=	ctr_aes_encrypt,
885 			.decrypt		=	ctr_aes_decrypt,
886 		}
887 	}
888 };
889 
890 static int ctr_aes_alg_reg;
891 
892 static int __init aes_s390_init(void)
893 {
894 	int ret;
895 
896 	if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
897 		keylen_flag |= AES_KEYLEN_128;
898 	if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
899 		keylen_flag |= AES_KEYLEN_192;
900 	if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
901 		keylen_flag |= AES_KEYLEN_256;
902 
903 	if (!keylen_flag)
904 		return -EOPNOTSUPP;
905 
906 	/* z9 109 and z9 BC/EC only support 128 bit key length */
907 	if (keylen_flag == AES_KEYLEN_128)
908 		pr_info("AES hardware acceleration is only available for"
909 			" 128-bit keys\n");
910 
911 	ret = crypto_register_alg(&aes_alg);
912 	if (ret)
913 		goto aes_err;
914 
915 	ret = crypto_register_alg(&ecb_aes_alg);
916 	if (ret)
917 		goto ecb_aes_err;
918 
919 	ret = crypto_register_alg(&cbc_aes_alg);
920 	if (ret)
921 		goto cbc_aes_err;
922 
923 	if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
924 			CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
925 	    crypt_s390_func_available(KM_XTS_256_ENCRYPT,
926 			CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
927 		ret = crypto_register_alg(&xts_aes_alg);
928 		if (ret)
929 			goto xts_aes_err;
930 		xts_aes_alg_reg = 1;
931 	}
932 
933 	if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
934 				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
935 	    crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
936 				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
937 	    crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
938 				CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
939 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
940 		if (!ctrblk) {
941 			ret = -ENOMEM;
942 			goto ctr_aes_err;
943 		}
944 		ret = crypto_register_alg(&ctr_aes_alg);
945 		if (ret) {
946 			free_page((unsigned long) ctrblk);
947 			goto ctr_aes_err;
948 		}
949 		ctr_aes_alg_reg = 1;
950 	}
951 
952 out:
953 	return ret;
954 
955 ctr_aes_err:
956 	crypto_unregister_alg(&xts_aes_alg);
957 xts_aes_err:
958 	crypto_unregister_alg(&cbc_aes_alg);
959 cbc_aes_err:
960 	crypto_unregister_alg(&ecb_aes_alg);
961 ecb_aes_err:
962 	crypto_unregister_alg(&aes_alg);
963 aes_err:
964 	goto out;
965 }
966 
967 static void __exit aes_s390_fini(void)
968 {
969 	if (ctr_aes_alg_reg) {
970 		crypto_unregister_alg(&ctr_aes_alg);
971 		free_page((unsigned long) ctrblk);
972 	}
973 	if (xts_aes_alg_reg)
974 		crypto_unregister_alg(&xts_aes_alg);
975 	crypto_unregister_alg(&cbc_aes_alg);
976 	crypto_unregister_alg(&ecb_aes_alg);
977 	crypto_unregister_alg(&aes_alg);
978 }
979 
980 module_cpu_feature_match(MSA, aes_s390_init);
981 module_exit(aes_s390_fini);
982 
983 MODULE_ALIAS_CRYPTO("aes-all");
984 
985 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
986 MODULE_LICENSE("GPL");
987