Lines Matching +full:aes +full:- +full:cmac

1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
8 #include <crypto/aes.h>
24 #include "aes-ce-setkey.h"
43 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 Crypto Extensions");
60 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 NEON");
63 MODULE_ALIAS_CRYPTO("ecb(aes)");
64 MODULE_ALIAS_CRYPTO("cbc(aes)");
65 MODULE_ALIAS_CRYPTO("ctr(aes)");
66 MODULE_ALIAS_CRYPTO("xts(aes)");
67 MODULE_ALIAS_CRYPTO("xctr(aes)");
69 MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
70 MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
71 MODULE_ALIAS_CRYPTO("cmac(aes)");
72 MODULE_ALIAS_CRYPTO("xcbc(aes)");
73 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
78 /* defined in aes-modes.S */
155 ret = aes_expandkey(&ctx->key1, in_key, key_len / 2); in xts_set_key()
157 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2], in xts_set_key()
170 ret = aes_expandkey(&ctx->key1, in_key, key_len); in essiv_cbc_set_key()
176 return aes_expandkey(&ctx->key2, digest, sizeof(digest)); in essiv_cbc_set_key()
183 int err, rounds = 6 + ctx->key_length / 4; in ecb_encrypt()
192 ctx->key_enc, rounds, blocks); in ecb_encrypt()
202 int err, rounds = 6 + ctx->key_length / 4; in ecb_decrypt()
211 ctx->key_dec, rounds, blocks); in ecb_decrypt()
222 int err = 0, rounds = 6 + ctx->key_length / 4; in cbc_encrypt_walk()
225 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
227 aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_encrypt_walk()
228 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk()
229 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
250 int err = 0, rounds = 6 + ctx->key_length / 4; in cbc_decrypt_walk()
253 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
255 aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_decrypt_walk()
256 ctx->key_dec, rounds, blocks, walk->iv); in cbc_decrypt_walk()
257 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
277 int err, rounds = 6 + ctx->key_length / 4; in cts_cbc_encrypt()
278 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
279 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_encrypt()
288 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
289 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
290 return -EINVAL; in cts_cbc_encrypt()
295 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
297 req->iv); in cts_cbc_encrypt()
304 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
307 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
308 if (req->dst != req->src) in cts_cbc_encrypt()
309 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt()
315 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
316 req->iv); in cts_cbc_encrypt()
324 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt()
333 int err, rounds = 6 + ctx->key_length / 4; in cts_cbc_decrypt()
334 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
335 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_decrypt()
344 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
345 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
346 return -EINVAL; in cts_cbc_decrypt()
351 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_decrypt()
353 req->iv); in cts_cbc_decrypt()
360 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
363 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_decrypt()
364 if (req->dst != req->src) in cts_cbc_decrypt()
365 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt()
371 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
372 req->iv); in cts_cbc_decrypt()
380 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt()
389 int err, rounds = 6 + ctx->key1.key_length / 4; in essiv_cbc_encrypt()
400 ctx->key1.key_enc, rounds, blocks, in essiv_cbc_encrypt()
401 req->iv, ctx->key2.key_enc); in essiv_cbc_encrypt()
411 int err, rounds = 6 + ctx->key1.key_length / 4; in essiv_cbc_decrypt()
422 ctx->key1.key_dec, rounds, blocks, in essiv_cbc_decrypt()
423 req->iv, ctx->key2.key_enc); in essiv_cbc_decrypt()
433 int err, rounds = 6 + ctx->key_length / 4; in xctr_encrypt()
454 src = dst = memcpy(buf + sizeof(buf) - nbytes, in xctr_encrypt()
457 nbytes &= ~(AES_BLOCK_SIZE - 1); in xctr_encrypt()
460 aes_xctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes, in xctr_encrypt()
465 buf + sizeof(buf) - nbytes, nbytes); in xctr_encrypt()
468 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xctr_encrypt()
478 int err, rounds = 6 + ctx->key_length / 4; in ctr_encrypt()
498 src = dst = memcpy(buf + sizeof(buf) - nbytes, in ctr_encrypt()
501 nbytes &= ~(AES_BLOCK_SIZE - 1); in ctr_encrypt()
504 aes_ctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes, in ctr_encrypt()
509 buf + sizeof(buf) - nbytes, nbytes); in ctr_encrypt()
511 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ctr_encrypt()
521 int err, first, rounds = 6 + ctx->key1.key_length / 4; in xts_encrypt()
522 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
528 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
529 return -EINVAL; in xts_encrypt()
534 int xts_blocks = DIV_ROUND_UP(req->cryptlen, in xts_encrypt()
535 AES_BLOCK_SIZE) - 2; in xts_encrypt()
543 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_encrypt()
545 req->iv); in xts_encrypt()
556 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt()
560 ctx->key1.key_enc, rounds, nbytes, in xts_encrypt()
561 ctx->key2.key_enc, walk.iv, first); in xts_encrypt()
562 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt()
568 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_encrypt()
569 if (req->dst != req->src) in xts_encrypt()
570 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt()
573 req->iv); in xts_encrypt()
581 ctx->key1.key_enc, rounds, walk.nbytes, in xts_encrypt()
582 ctx->key2.key_enc, walk.iv, first); in xts_encrypt()
591 int err, first, rounds = 6 + ctx->key1.key_length / 4; in xts_decrypt()
592 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt()
598 if (req->cryptlen < AES_BLOCK_SIZE) in xts_decrypt()
599 return -EINVAL; in xts_decrypt()
604 int xts_blocks = DIV_ROUND_UP(req->cryptlen, in xts_decrypt()
605 AES_BLOCK_SIZE) - 2; in xts_decrypt()
613 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_decrypt()
615 req->iv); in xts_decrypt()
626 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt()
630 ctx->key1.key_dec, rounds, nbytes, in xts_decrypt()
631 ctx->key2.key_enc, walk.iv, first); in xts_decrypt()
632 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_decrypt()
638 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_decrypt()
639 if (req->dst != req->src) in xts_decrypt()
640 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
643 req->iv); in xts_decrypt()
652 ctx->key1.key_dec, rounds, walk.nbytes, in xts_decrypt()
653 ctx->key2.key_enc, walk.iv, first); in xts_decrypt()
661 .cra_name = "ecb(aes)",
662 .cra_driver_name = "ecb-aes-" MODE,
675 .cra_name = "cbc(aes)",
676 .cra_driver_name = "cbc-aes-" MODE,
690 .cra_name = "ctr(aes)",
691 .cra_driver_name = "ctr-aes-" MODE,
706 .cra_name = "xctr(aes)",
707 .cra_driver_name = "xctr-aes-" MODE,
722 .cra_name = "xts(aes)",
723 .cra_driver_name = "xts-aes-" MODE,
739 .cra_name = "cts(cbc(aes))",
740 .cra_driver_name = "cts-cbc-aes-" MODE,
755 .cra_name = "essiv(cbc(aes),sha256)",
756 .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
775 return aes_expandkey(&ctx->key, in_key, key_len); in cbcmac_setkey()
780 u64 a = be64_to_cpu(x->a); in cmac_gf128_mul_by_x()
781 u64 b = be64_to_cpu(x->b); in cmac_gf128_mul_by_x()
783 y->a = cpu_to_be64((a << 1) | (b >> 63)); in cmac_gf128_mul_by_x()
784 y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0)); in cmac_gf128_mul_by_x()
791 be128 *consts = (be128 *)ctx->consts; in cmac_setkey()
801 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, in cmac_setkey()
802 ctx->key.key_enc, rounds, 1); in cmac_setkey()
814 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 }, in xcbc_setkey()
815 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 }, in xcbc_setkey()
816 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 }, in xcbc_setkey()
829 aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1); in xcbc_setkey()
830 aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2); in xcbc_setkey()
840 memset(ctx->dg, 0, AES_BLOCK_SIZE); in mac_init()
847 int rounds = 6 + ctx->key_length / 4; in mac_do_update()
852 rem = aes_mac_update(in, ctx->key_enc, rounds, blocks, in mac_do_update()
854 in += (blocks - rem) * AES_BLOCK_SIZE; in mac_do_update()
861 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in mac_update()
866 mac_do_update(&tctx->key, p, blocks, ctx->dg, 0); in mac_update()
873 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cbcmac_finup()
877 crypto_xor(ctx->dg, src, len); in cbcmac_finup()
878 mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1); in cbcmac_finup()
880 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cbcmac_finup()
887 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cmac_finup()
889 u8 *consts = tctx->consts; in cmac_finup()
891 crypto_xor(ctx->dg, src, len); in cmac_finup()
893 ctx->dg[len] ^= 0x80; in cmac_finup()
896 mac_do_update(&tctx->key, consts, 1, ctx->dg, 0); in cmac_finup()
897 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cmac_finup()
902 .base.cra_name = "cmac(aes)",
903 .base.cra_driver_name = "cmac-aes-" MODE,
919 .base.cra_name = "xcbc(aes)",
920 .base.cra_driver_name = "xcbc-aes-" MODE,
936 .base.cra_name = "cbcmac(aes)",
937 .base.cra_driver_name = "cbcmac-aes-" MODE,
978 module_cpu_feature_match(AES, aes_init);