xref: /linux/lib/crypto/s390/aes.h (revision 6f7e6393d1ce636bb7ec77a7fe7b77458fddf701)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * AES optimized using the CP Assist for Cryptographic Functions (CPACF)
4  *
5  * Copyright 2026 Google LLC
6  */
7 #include <asm/cpacf.h>
8 #include <linux/cpufeature.h>
9 
10 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes128);
11 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes192);
12 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes256);
13 
14 /*
15  * When the CPU supports CPACF AES for the requested key length, we need only
16  * save a copy of the raw AES key, as that's what the CPACF instructions need.
17  *
18  * When unsupported, fall back to the generic key expansion and en/decryption.
19  */
20 static void aes_preparekey_arch(union aes_enckey_arch *k,
21 				union aes_invkey_arch *inv_k,
22 				const u8 *in_key, int key_len, int nrounds)
23 {
24 	if (key_len == AES_KEYSIZE_128) {
25 		if (static_branch_likely(&have_cpacf_aes128)) {
26 			memcpy(k->raw_key, in_key, AES_KEYSIZE_128);
27 			return;
28 		}
29 	} else if (key_len == AES_KEYSIZE_192) {
30 		if (static_branch_likely(&have_cpacf_aes192)) {
31 			memcpy(k->raw_key, in_key, AES_KEYSIZE_192);
32 			return;
33 		}
34 	} else {
35 		if (static_branch_likely(&have_cpacf_aes256)) {
36 			memcpy(k->raw_key, in_key, AES_KEYSIZE_256);
37 			return;
38 		}
39 	}
40 	aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
41 			      in_key, key_len);
42 }
43 
44 static inline bool aes_crypt_s390(const struct aes_enckey *key,
45 				  u8 out[AES_BLOCK_SIZE],
46 				  const u8 in[AES_BLOCK_SIZE], int decrypt)
47 {
48 	if (key->len == AES_KEYSIZE_128) {
49 		if (static_branch_likely(&have_cpacf_aes128)) {
50 			cpacf_km(CPACF_KM_AES_128 | decrypt,
51 				 (void *)key->k.raw_key, out, in,
52 				 AES_BLOCK_SIZE);
53 			return true;
54 		}
55 	} else if (key->len == AES_KEYSIZE_192) {
56 		if (static_branch_likely(&have_cpacf_aes192)) {
57 			cpacf_km(CPACF_KM_AES_192 | decrypt,
58 				 (void *)key->k.raw_key, out, in,
59 				 AES_BLOCK_SIZE);
60 			return true;
61 		}
62 	} else {
63 		if (static_branch_likely(&have_cpacf_aes256)) {
64 			cpacf_km(CPACF_KM_AES_256 | decrypt,
65 				 (void *)key->k.raw_key, out, in,
66 				 AES_BLOCK_SIZE);
67 			return true;
68 		}
69 	}
70 	return false;
71 }
72 
73 static void aes_encrypt_arch(const struct aes_enckey *key,
74 			     u8 out[AES_BLOCK_SIZE],
75 			     const u8 in[AES_BLOCK_SIZE])
76 {
77 	if (likely(aes_crypt_s390(key, out, in, 0)))
78 		return;
79 	aes_encrypt_generic(key->k.rndkeys, key->nrounds, out, in);
80 }
81 
82 static void aes_decrypt_arch(const struct aes_key *key,
83 			     u8 out[AES_BLOCK_SIZE],
84 			     const u8 in[AES_BLOCK_SIZE])
85 {
86 	if (likely(aes_crypt_s390((const struct aes_enckey *)key, out, in,
87 				  CPACF_DECRYPT)))
88 		return;
89 	aes_decrypt_generic(key->inv_k.inv_rndkeys, key->nrounds, out, in);
90 }
91 
92 #define aes_mod_init_arch aes_mod_init_arch
93 static void aes_mod_init_arch(void)
94 {
95 	if (cpu_have_feature(S390_CPU_FEATURE_MSA)) {
96 		cpacf_mask_t km_functions;
97 
98 		cpacf_query(CPACF_KM, &km_functions);
99 		if (cpacf_test_func(&km_functions, CPACF_KM_AES_128))
100 			static_branch_enable(&have_cpacf_aes128);
101 		if (cpacf_test_func(&km_functions, CPACF_KM_AES_192))
102 			static_branch_enable(&have_cpacf_aes192);
103 		if (cpacf_test_func(&km_functions, CPACF_KM_AES_256))
104 			static_branch_enable(&have_cpacf_aes256);
105 	}
106 }
107