1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions 4 * 5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 6 */ 7 8 #include <asm/neon.h> 9 #include <asm/unaligned.h> 10 #include <crypto/aes.h> 11 #include <crypto/scatterwalk.h> 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/skcipher.h> 14 #include <linux/module.h> 15 16 #include "aes-ce-setkey.h" 17 18 static int num_rounds(struct crypto_aes_ctx *ctx) 19 { 20 /* 21 * # of rounds specified by AES: 22 * 128 bit key 10 rounds 23 * 192 bit key 12 rounds 24 * 256 bit key 14 rounds 25 * => n byte key => 6 + (n/4) rounds 26 */ 27 return 6 + ctx->key_length / 4; 28 } 29 30 asmlinkage u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes, 31 u32 macp, u32 const rk[], u32 rounds); 32 33 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes, 34 u32 const rk[], u32 rounds, u8 mac[], 35 u8 ctr[]); 36 37 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes, 38 u32 const rk[], u32 rounds, u8 mac[], 39 u8 ctr[]); 40 41 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[], 42 u32 rounds); 43 44 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key, 45 unsigned int key_len) 46 { 47 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm); 48 49 return ce_aes_expandkey(ctx, in_key, key_len); 50 } 51 52 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize) 53 { 54 if ((authsize & 1) || authsize < 4) 55 return -EINVAL; 56 return 0; 57 } 58 59 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen) 60 { 61 struct crypto_aead *aead = crypto_aead_reqtfm(req); 62 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; 63 u32 l = req->iv[0] + 1; 64 65 /* verify that CCM dimension 'L' is set correctly in the IV */ 66 if (l < 2 || l > 8) 67 return -EINVAL; 68 69 /* verify that msglen can in fact be represented in L bytes */ 70 if (l < 4 && msglen >> (8 * l)) 71 return -EOVERFLOW; 72 73 /* 74 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi 75 * uses a u32 type to represent msglen so the top 4 bytes are always 0. 76 */ 77 n[0] = 0; 78 n[1] = cpu_to_be32(msglen); 79 80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); 81 82 /* 83 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C) 84 * - bits 0..2 : max # of bytes required to represent msglen, minus 1 85 * (already set by caller) 86 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc) 87 * - bit 6 : indicates presence of authenticate-only data 88 */ 89 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2; 90 if (req->assoclen) 91 maciv[0] |= 0x40; 92 93 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); 94 return 0; 95 } 96 97 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[]) 98 { 99 struct crypto_aead *aead = crypto_aead_reqtfm(req); 100 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 101 struct __packed { __be16 l; __be32 h; u16 len; } ltag; 102 struct scatter_walk walk; 103 u32 len = req->assoclen; 104 u32 macp = 0; 105 106 /* prepend the AAD with a length tag */ 107 if (len < 0xff00) { 108 ltag.l = cpu_to_be16(len); 109 ltag.len = 2; 110 } else { 111 ltag.l = cpu_to_be16(0xfffe); 112 put_unaligned_be32(len, <ag.h); 113 ltag.len = 6; 114 } 115 116 macp = ce_aes_ccm_auth_data(mac, (u8 *)<ag, ltag.len, macp, 117 ctx->key_enc, num_rounds(ctx)); 118 scatterwalk_start(&walk, req->src); 119 120 do { 121 u32 n = scatterwalk_clamp(&walk, len); 122 u8 *p; 123 124 if (!n) { 125 scatterwalk_start(&walk, sg_next(walk.sg)); 126 n = scatterwalk_clamp(&walk, len); 127 } 128 n = min_t(u32, n, SZ_4K); /* yield NEON at least every 4k */ 129 p = scatterwalk_map(&walk); 130 131 macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc, 132 num_rounds(ctx)); 133 134 if (len / SZ_4K > (len - n) / SZ_4K) { 135 kernel_neon_end(); 136 kernel_neon_begin(); 137 } 138 len -= n; 139 140 scatterwalk_unmap(p); 141 scatterwalk_advance(&walk, n); 142 scatterwalk_done(&walk, 0, len); 143 } while (len); 144 } 145 146 static int ccm_encrypt(struct aead_request *req) 147 { 148 struct crypto_aead *aead = crypto_aead_reqtfm(req); 149 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 150 struct skcipher_walk walk; 151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; 152 u8 buf[AES_BLOCK_SIZE]; 153 u32 len = req->cryptlen; 154 int err; 155 156 err = ccm_init_mac(req, mac, len); 157 if (err) 158 return err; 159 160 /* preserve the original iv for the final round */ 161 memcpy(buf, req->iv, AES_BLOCK_SIZE); 162 163 err = skcipher_walk_aead_encrypt(&walk, req, false); 164 165 kernel_neon_begin(); 166 167 if (req->assoclen) 168 ccm_calculate_auth_mac(req, mac); 169 170 while (walk.nbytes) { 171 u32 tail = walk.nbytes % AES_BLOCK_SIZE; 172 bool final = walk.nbytes == walk.total; 173 174 if (final) 175 tail = 0; 176 177 ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 178 walk.nbytes - tail, ctx->key_enc, 179 num_rounds(ctx), mac, walk.iv); 180 181 if (!final) 182 kernel_neon_end(); 183 err = skcipher_walk_done(&walk, tail); 184 if (!final) 185 kernel_neon_begin(); 186 } 187 188 ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); 189 190 kernel_neon_end(); 191 192 /* copy authtag to end of dst */ 193 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, 194 crypto_aead_authsize(aead), 1); 195 196 return err; 197 } 198 199 static int ccm_decrypt(struct aead_request *req) 200 { 201 struct crypto_aead *aead = crypto_aead_reqtfm(req); 202 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 203 unsigned int authsize = crypto_aead_authsize(aead); 204 struct skcipher_walk walk; 205 u8 __aligned(8) mac[AES_BLOCK_SIZE]; 206 u8 buf[AES_BLOCK_SIZE]; 207 u32 len = req->cryptlen - authsize; 208 int err; 209 210 err = ccm_init_mac(req, mac, len); 211 if (err) 212 return err; 213 214 /* preserve the original iv for the final round */ 215 memcpy(buf, req->iv, AES_BLOCK_SIZE); 216 217 err = skcipher_walk_aead_decrypt(&walk, req, false); 218 219 kernel_neon_begin(); 220 221 if (req->assoclen) 222 ccm_calculate_auth_mac(req, mac); 223 224 while (walk.nbytes) { 225 u32 tail = walk.nbytes % AES_BLOCK_SIZE; 226 bool final = walk.nbytes == walk.total; 227 228 if (final) 229 tail = 0; 230 231 ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 232 walk.nbytes - tail, ctx->key_enc, 233 num_rounds(ctx), mac, walk.iv); 234 235 if (!final) 236 kernel_neon_end(); 237 err = skcipher_walk_done(&walk, tail); 238 if (!final) 239 kernel_neon_begin(); 240 } 241 242 ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); 243 244 kernel_neon_end(); 245 246 if (unlikely(err)) 247 return err; 248 249 /* compare calculated auth tag with the stored one */ 250 scatterwalk_map_and_copy(buf, req->src, 251 req->assoclen + req->cryptlen - authsize, 252 authsize, 0); 253 254 if (crypto_memneq(mac, buf, authsize)) 255 return -EBADMSG; 256 return 0; 257 } 258 259 static struct aead_alg ccm_aes_alg = { 260 .base = { 261 .cra_name = "ccm(aes)", 262 .cra_driver_name = "ccm-aes-ce", 263 .cra_priority = 300, 264 .cra_blocksize = 1, 265 .cra_ctxsize = sizeof(struct crypto_aes_ctx), 266 .cra_module = THIS_MODULE, 267 }, 268 .ivsize = AES_BLOCK_SIZE, 269 .chunksize = AES_BLOCK_SIZE, 270 .maxauthsize = AES_BLOCK_SIZE, 271 .setkey = ccm_setkey, 272 .setauthsize = ccm_setauthsize, 273 .encrypt = ccm_encrypt, 274 .decrypt = ccm_decrypt, 275 }; 276 277 static int __init aes_mod_init(void) 278 { 279 if (!cpu_have_named_feature(AES)) 280 return -ENODEV; 281 return crypto_register_aead(&ccm_aes_alg); 282 } 283 284 static void __exit aes_mod_exit(void) 285 { 286 crypto_unregister_aead(&ccm_aes_alg); 287 } 288 289 module_init(aes_mod_init); 290 module_exit(aes_mod_exit); 291 292 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions"); 293 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); 294 MODULE_LICENSE("GPL v2"); 295 MODULE_ALIAS_CRYPTO("ccm(aes)"); 296