xref: /linux/lib/crypto/arm64/aes.h (revision c17ee635fd3a482b2ad2bf5e269755c2eae5f25e)
1*2b1ef7aeSEric Biggers /* SPDX-License-Identifier: GPL-2.0-only */
2*2b1ef7aeSEric Biggers /*
3*2b1ef7aeSEric Biggers  * AES block cipher, optimized for ARM64
4*2b1ef7aeSEric Biggers  *
5*2b1ef7aeSEric Biggers  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6*2b1ef7aeSEric Biggers  * Copyright 2026 Google LLC
7*2b1ef7aeSEric Biggers  */
8*2b1ef7aeSEric Biggers 
9*2b1ef7aeSEric Biggers #include <asm/neon.h>
10*2b1ef7aeSEric Biggers #include <asm/simd.h>
11*2b1ef7aeSEric Biggers #include <linux/unaligned.h>
12*2b1ef7aeSEric Biggers #include <linux/cpufeature.h>
13*2b1ef7aeSEric Biggers 
14*2b1ef7aeSEric Biggers static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_aes);
15*2b1ef7aeSEric Biggers 
16*2b1ef7aeSEric Biggers struct aes_block {
17*2b1ef7aeSEric Biggers 	u8 b[AES_BLOCK_SIZE];
18*2b1ef7aeSEric Biggers };
19*2b1ef7aeSEric Biggers 
20*2b1ef7aeSEric Biggers asmlinkage void __aes_arm64_encrypt(const u32 rk[], u8 out[AES_BLOCK_SIZE],
21*2b1ef7aeSEric Biggers 				    const u8 in[AES_BLOCK_SIZE], int rounds);
22*2b1ef7aeSEric Biggers asmlinkage void __aes_arm64_decrypt(const u32 inv_rk[], u8 out[AES_BLOCK_SIZE],
23*2b1ef7aeSEric Biggers 				    const u8 in[AES_BLOCK_SIZE], int rounds);
24*2b1ef7aeSEric Biggers asmlinkage void __aes_ce_encrypt(const u32 rk[], u8 out[AES_BLOCK_SIZE],
25*2b1ef7aeSEric Biggers 				 const u8 in[AES_BLOCK_SIZE], int rounds);
26*2b1ef7aeSEric Biggers asmlinkage void __aes_ce_decrypt(const u32 inv_rk[], u8 out[AES_BLOCK_SIZE],
27*2b1ef7aeSEric Biggers 				 const u8 in[AES_BLOCK_SIZE], int rounds);
28*2b1ef7aeSEric Biggers asmlinkage u32 __aes_ce_sub(u32 l);
29*2b1ef7aeSEric Biggers asmlinkage void __aes_ce_invert(struct aes_block *out,
30*2b1ef7aeSEric Biggers 				const struct aes_block *in);
31*2b1ef7aeSEric Biggers 
32*2b1ef7aeSEric Biggers /*
33*2b1ef7aeSEric Biggers  * Expand an AES key using the crypto extensions if supported and usable or
34*2b1ef7aeSEric Biggers  * generic code otherwise.  The expanded key format is compatible between the
35*2b1ef7aeSEric Biggers  * two cases.  The outputs are @rndkeys (required) and @inv_rndkeys (optional).
36*2b1ef7aeSEric Biggers  */
37*2b1ef7aeSEric Biggers static void aes_expandkey_arm64(u32 rndkeys[], u32 *inv_rndkeys,
38*2b1ef7aeSEric Biggers 				const u8 *in_key, int key_len, int nrounds)
39*2b1ef7aeSEric Biggers {
40*2b1ef7aeSEric Biggers 	/*
41*2b1ef7aeSEric Biggers 	 * The AES key schedule round constants
42*2b1ef7aeSEric Biggers 	 */
43*2b1ef7aeSEric Biggers 	static u8 const rcon[] = {
44*2b1ef7aeSEric Biggers 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
45*2b1ef7aeSEric Biggers 	};
46*2b1ef7aeSEric Biggers 
47*2b1ef7aeSEric Biggers 	u32 kwords = key_len / sizeof(u32);
48*2b1ef7aeSEric Biggers 	struct aes_block *key_enc, *key_dec;
49*2b1ef7aeSEric Biggers 	int i, j;
50*2b1ef7aeSEric Biggers 
51*2b1ef7aeSEric Biggers 	if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) ||
52*2b1ef7aeSEric Biggers 	    !static_branch_likely(&have_aes) || unlikely(!may_use_simd())) {
53*2b1ef7aeSEric Biggers 		aes_expandkey_generic(rndkeys, inv_rndkeys, in_key, key_len);
54*2b1ef7aeSEric Biggers 		return;
55*2b1ef7aeSEric Biggers 	}
56*2b1ef7aeSEric Biggers 
57*2b1ef7aeSEric Biggers 	for (i = 0; i < kwords; i++)
58*2b1ef7aeSEric Biggers 		rndkeys[i] = get_unaligned_le32(&in_key[i * sizeof(u32)]);
59*2b1ef7aeSEric Biggers 
60*2b1ef7aeSEric Biggers 	scoped_ksimd() {
61*2b1ef7aeSEric Biggers 		for (i = 0; i < sizeof(rcon); i++) {
62*2b1ef7aeSEric Biggers 			u32 *rki = &rndkeys[i * kwords];
63*2b1ef7aeSEric Biggers 			u32 *rko = rki + kwords;
64*2b1ef7aeSEric Biggers 
65*2b1ef7aeSEric Biggers 			rko[0] = ror32(__aes_ce_sub(rki[kwords - 1]), 8) ^
66*2b1ef7aeSEric Biggers 				 rcon[i] ^ rki[0];
67*2b1ef7aeSEric Biggers 			rko[1] = rko[0] ^ rki[1];
68*2b1ef7aeSEric Biggers 			rko[2] = rko[1] ^ rki[2];
69*2b1ef7aeSEric Biggers 			rko[3] = rko[2] ^ rki[3];
70*2b1ef7aeSEric Biggers 
71*2b1ef7aeSEric Biggers 			if (key_len == AES_KEYSIZE_192) {
72*2b1ef7aeSEric Biggers 				if (i >= 7)
73*2b1ef7aeSEric Biggers 					break;
74*2b1ef7aeSEric Biggers 				rko[4] = rko[3] ^ rki[4];
75*2b1ef7aeSEric Biggers 				rko[5] = rko[4] ^ rki[5];
76*2b1ef7aeSEric Biggers 			} else if (key_len == AES_KEYSIZE_256) {
77*2b1ef7aeSEric Biggers 				if (i >= 6)
78*2b1ef7aeSEric Biggers 					break;
79*2b1ef7aeSEric Biggers 				rko[4] = __aes_ce_sub(rko[3]) ^ rki[4];
80*2b1ef7aeSEric Biggers 				rko[5] = rko[4] ^ rki[5];
81*2b1ef7aeSEric Biggers 				rko[6] = rko[5] ^ rki[6];
82*2b1ef7aeSEric Biggers 				rko[7] = rko[6] ^ rki[7];
83*2b1ef7aeSEric Biggers 			}
84*2b1ef7aeSEric Biggers 		}
85*2b1ef7aeSEric Biggers 
86*2b1ef7aeSEric Biggers 		/*
87*2b1ef7aeSEric Biggers 		 * Generate the decryption keys for the Equivalent Inverse
88*2b1ef7aeSEric Biggers 		 * Cipher.  This involves reversing the order of the round
89*2b1ef7aeSEric Biggers 		 * keys, and applying the Inverse Mix Columns transformation on
90*2b1ef7aeSEric Biggers 		 * all but the first and the last one.
91*2b1ef7aeSEric Biggers 		 */
92*2b1ef7aeSEric Biggers 		if (inv_rndkeys) {
93*2b1ef7aeSEric Biggers 			key_enc = (struct aes_block *)rndkeys;
94*2b1ef7aeSEric Biggers 			key_dec = (struct aes_block *)inv_rndkeys;
95*2b1ef7aeSEric Biggers 			j = nrounds;
96*2b1ef7aeSEric Biggers 
97*2b1ef7aeSEric Biggers 			key_dec[0] = key_enc[j];
98*2b1ef7aeSEric Biggers 			for (i = 1, j--; j > 0; i++, j--)
99*2b1ef7aeSEric Biggers 				__aes_ce_invert(key_dec + i, key_enc + j);
100*2b1ef7aeSEric Biggers 			key_dec[i] = key_enc[0];
101*2b1ef7aeSEric Biggers 		}
102*2b1ef7aeSEric Biggers 	}
103*2b1ef7aeSEric Biggers }
104*2b1ef7aeSEric Biggers 
105*2b1ef7aeSEric Biggers static void aes_preparekey_arch(union aes_enckey_arch *k,
106*2b1ef7aeSEric Biggers 				union aes_invkey_arch *inv_k,
107*2b1ef7aeSEric Biggers 				const u8 *in_key, int key_len, int nrounds)
108*2b1ef7aeSEric Biggers {
109*2b1ef7aeSEric Biggers 	aes_expandkey_arm64(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
110*2b1ef7aeSEric Biggers 			    in_key, key_len, nrounds);
111*2b1ef7aeSEric Biggers }
112*2b1ef7aeSEric Biggers 
113*2b1ef7aeSEric Biggers /*
114*2b1ef7aeSEric Biggers  * This is here temporarily until the remaining AES mode implementations are
115*2b1ef7aeSEric Biggers  * migrated from arch/arm64/crypto/ to lib/crypto/arm64/.
116*2b1ef7aeSEric Biggers  */
117*2b1ef7aeSEric Biggers int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
118*2b1ef7aeSEric Biggers 		     unsigned int key_len)
119*2b1ef7aeSEric Biggers {
120*2b1ef7aeSEric Biggers 	if (aes_check_keylen(key_len) != 0)
121*2b1ef7aeSEric Biggers 		return -EINVAL;
122*2b1ef7aeSEric Biggers 	ctx->key_length = key_len;
123*2b1ef7aeSEric Biggers 	aes_expandkey_arm64(ctx->key_enc, ctx->key_dec, in_key, key_len,
124*2b1ef7aeSEric Biggers 			    6 + key_len / 4);
125*2b1ef7aeSEric Biggers 	return 0;
126*2b1ef7aeSEric Biggers }
127*2b1ef7aeSEric Biggers EXPORT_SYMBOL(ce_aes_expandkey);
128*2b1ef7aeSEric Biggers 
129*2b1ef7aeSEric Biggers static void aes_encrypt_arch(const struct aes_enckey *key,
130*2b1ef7aeSEric Biggers 			     u8 out[AES_BLOCK_SIZE],
131*2b1ef7aeSEric Biggers 			     const u8 in[AES_BLOCK_SIZE])
132*2b1ef7aeSEric Biggers {
133*2b1ef7aeSEric Biggers 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
134*2b1ef7aeSEric Biggers 	    static_branch_likely(&have_aes) && likely(may_use_simd())) {
135*2b1ef7aeSEric Biggers 		scoped_ksimd()
136*2b1ef7aeSEric Biggers 			__aes_ce_encrypt(key->k.rndkeys, out, in, key->nrounds);
137*2b1ef7aeSEric Biggers 	} else {
138*2b1ef7aeSEric Biggers 		__aes_arm64_encrypt(key->k.rndkeys, out, in, key->nrounds);
139*2b1ef7aeSEric Biggers 	}
140*2b1ef7aeSEric Biggers }
141*2b1ef7aeSEric Biggers 
142*2b1ef7aeSEric Biggers static void aes_decrypt_arch(const struct aes_key *key,
143*2b1ef7aeSEric Biggers 			     u8 out[AES_BLOCK_SIZE],
144*2b1ef7aeSEric Biggers 			     const u8 in[AES_BLOCK_SIZE])
145*2b1ef7aeSEric Biggers {
146*2b1ef7aeSEric Biggers 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
147*2b1ef7aeSEric Biggers 	    static_branch_likely(&have_aes) && likely(may_use_simd())) {
148*2b1ef7aeSEric Biggers 		scoped_ksimd()
149*2b1ef7aeSEric Biggers 			__aes_ce_decrypt(key->inv_k.inv_rndkeys, out, in,
150*2b1ef7aeSEric Biggers 					 key->nrounds);
151*2b1ef7aeSEric Biggers 	} else {
152*2b1ef7aeSEric Biggers 		__aes_arm64_decrypt(key->inv_k.inv_rndkeys, out, in,
153*2b1ef7aeSEric Biggers 				    key->nrounds);
154*2b1ef7aeSEric Biggers 	}
155*2b1ef7aeSEric Biggers }
156*2b1ef7aeSEric Biggers 
157*2b1ef7aeSEric Biggers #ifdef CONFIG_KERNEL_MODE_NEON
158*2b1ef7aeSEric Biggers #define aes_mod_init_arch aes_mod_init_arch
159*2b1ef7aeSEric Biggers static void aes_mod_init_arch(void)
160*2b1ef7aeSEric Biggers {
161*2b1ef7aeSEric Biggers 	if (cpu_have_named_feature(AES))
162*2b1ef7aeSEric Biggers 		static_branch_enable(&have_aes);
163*2b1ef7aeSEric Biggers }
164*2b1ef7aeSEric Biggers #endif /* CONFIG_KERNEL_MODE_NEON */
165