xref: /linux/lib/crypto/arm/aes.h (revision 69050f8d6d075dc01af7a5f2f550a8067510366f)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * AES block cipher, optimized for ARM
4  *
5  * Copyright (C) 2017 Linaro Ltd.
6  * Copyright 2026 Google LLC
7  */
8 
9 asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
10 				  const u8 in[AES_BLOCK_SIZE],
11 				  u8 out[AES_BLOCK_SIZE]);
12 asmlinkage void __aes_arm_decrypt(const u32 inv_rk[], int rounds,
13 				  const u8 in[AES_BLOCK_SIZE],
14 				  u8 out[AES_BLOCK_SIZE]);
15 
16 static void aes_preparekey_arch(union aes_enckey_arch *k,
17 				union aes_invkey_arch *inv_k,
18 				const u8 *in_key, int key_len, int nrounds)
19 {
20 	aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
21 			      in_key, key_len);
22 }
23 
24 static void aes_encrypt_arch(const struct aes_enckey *key,
25 			     u8 out[AES_BLOCK_SIZE],
26 			     const u8 in[AES_BLOCK_SIZE])
27 {
28 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
29 	    !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
30 		u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
31 
32 		memcpy(bounce_buf, in, AES_BLOCK_SIZE);
33 		__aes_arm_encrypt(key->k.rndkeys, key->nrounds, bounce_buf,
34 				  bounce_buf);
35 		memcpy(out, bounce_buf, AES_BLOCK_SIZE);
36 		return;
37 	}
38 	__aes_arm_encrypt(key->k.rndkeys, key->nrounds, in, out);
39 }
40 
41 static void aes_decrypt_arch(const struct aes_key *key,
42 			     u8 out[AES_BLOCK_SIZE],
43 			     const u8 in[AES_BLOCK_SIZE])
44 {
45 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
46 	    !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
47 		u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
48 
49 		memcpy(bounce_buf, in, AES_BLOCK_SIZE);
50 		__aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds,
51 				  bounce_buf, bounce_buf);
52 		memcpy(out, bounce_buf, AES_BLOCK_SIZE);
53 		return;
54 	}
55 	__aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds, in, out);
56 }
57