xref: /linux/lib/crypto/arm/aes.h (revision c17ee635fd3a482b2ad2bf5e269755c2eae5f25e)
1*fa229775SEric Biggers /* SPDX-License-Identifier: GPL-2.0-only */
2*fa229775SEric Biggers /*
3*fa229775SEric Biggers  * AES block cipher, optimized for ARM
4*fa229775SEric Biggers  *
5*fa229775SEric Biggers  * Copyright (C) 2017 Linaro Ltd.
6*fa229775SEric Biggers  * Copyright 2026 Google LLC
7*fa229775SEric Biggers  */
8*fa229775SEric Biggers 
9*fa229775SEric Biggers asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
10*fa229775SEric Biggers 				  const u8 in[AES_BLOCK_SIZE],
11*fa229775SEric Biggers 				  u8 out[AES_BLOCK_SIZE]);
12*fa229775SEric Biggers asmlinkage void __aes_arm_decrypt(const u32 inv_rk[], int rounds,
13*fa229775SEric Biggers 				  const u8 in[AES_BLOCK_SIZE],
14*fa229775SEric Biggers 				  u8 out[AES_BLOCK_SIZE]);
15*fa229775SEric Biggers 
16*fa229775SEric Biggers static void aes_preparekey_arch(union aes_enckey_arch *k,
17*fa229775SEric Biggers 				union aes_invkey_arch *inv_k,
18*fa229775SEric Biggers 				const u8 *in_key, int key_len, int nrounds)
19*fa229775SEric Biggers {
20*fa229775SEric Biggers 	aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
21*fa229775SEric Biggers 			      in_key, key_len);
22*fa229775SEric Biggers }
23*fa229775SEric Biggers 
24*fa229775SEric Biggers static void aes_encrypt_arch(const struct aes_enckey *key,
25*fa229775SEric Biggers 			     u8 out[AES_BLOCK_SIZE],
26*fa229775SEric Biggers 			     const u8 in[AES_BLOCK_SIZE])
27*fa229775SEric Biggers {
28*fa229775SEric Biggers 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
29*fa229775SEric Biggers 	    !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
30*fa229775SEric Biggers 		u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
31*fa229775SEric Biggers 
32*fa229775SEric Biggers 		memcpy(bounce_buf, in, AES_BLOCK_SIZE);
33*fa229775SEric Biggers 		__aes_arm_encrypt(key->k.rndkeys, key->nrounds, bounce_buf,
34*fa229775SEric Biggers 				  bounce_buf);
35*fa229775SEric Biggers 		memcpy(out, bounce_buf, AES_BLOCK_SIZE);
36*fa229775SEric Biggers 		return;
37*fa229775SEric Biggers 	}
38*fa229775SEric Biggers 	__aes_arm_encrypt(key->k.rndkeys, key->nrounds, in, out);
39*fa229775SEric Biggers }
40*fa229775SEric Biggers 
41*fa229775SEric Biggers static void aes_decrypt_arch(const struct aes_key *key,
42*fa229775SEric Biggers 			     u8 out[AES_BLOCK_SIZE],
43*fa229775SEric Biggers 			     const u8 in[AES_BLOCK_SIZE])
44*fa229775SEric Biggers {
45*fa229775SEric Biggers 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
46*fa229775SEric Biggers 	    !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
47*fa229775SEric Biggers 		u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
48*fa229775SEric Biggers 
49*fa229775SEric Biggers 		memcpy(bounce_buf, in, AES_BLOCK_SIZE);
50*fa229775SEric Biggers 		__aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds,
51*fa229775SEric Biggers 				  bounce_buf, bounce_buf);
52*fa229775SEric Biggers 		memcpy(out, bounce_buf, AES_BLOCK_SIZE);
53*fa229775SEric Biggers 		return;
54*fa229775SEric Biggers 	}
55*fa229775SEric Biggers 	__aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds, in, out);
56*fa229775SEric Biggers }
57