1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * The AEGIS-128 Authenticated-Encryption Algorithm 4 * 5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com> 6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved. 7 */ 8 9 #include <crypto/algapi.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/internal/skcipher.h> 13 #include <crypto/scatterwalk.h> 14 #include <linux/err.h> 15 #include <linux/init.h> 16 #include <linux/jump_label.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 21 #include <asm/simd.h> 22 23 #include "aegis.h" 24 25 #define AEGIS128_NONCE_SIZE 16 26 #define AEGIS128_STATE_BLOCKS 5 27 #define AEGIS128_KEY_SIZE 16 28 #define AEGIS128_MIN_AUTH_SIZE 8 29 #define AEGIS128_MAX_AUTH_SIZE 16 30 31 struct aegis_state { 32 union aegis_block blocks[AEGIS128_STATE_BLOCKS]; 33 }; 34 35 struct aegis_ctx { 36 union aegis_block key; 37 }; 38 39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd); 40 41 static const union aegis_block crypto_aegis_const[2] = { 42 { .words64 = { 43 cpu_to_le64(U64_C(0x0d08050302010100)), 44 cpu_to_le64(U64_C(0x6279e99059372215)), 45 } }, 46 { .words64 = { 47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)), 48 cpu_to_le64(U64_C(0xdd28b57342311120)), 49 } }, 50 }; 51 52 static bool aegis128_do_simd(void) 53 { 54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD 55 if (static_branch_likely(&have_simd)) 56 return crypto_simd_usable(); 57 #endif 58 return false; 59 } 60 61 static void crypto_aegis128_update(struct aegis_state *state) 62 { 63 union aegis_block tmp; 64 unsigned int i; 65 66 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1]; 67 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--) 68 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1], 69 &state->blocks[i]); 70 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]); 71 } 72 73 static void crypto_aegis128_update_a(struct aegis_state *state, 74 const union aegis_block *msg, 75 bool do_simd) 76 { 77 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 78 crypto_aegis128_update_simd(state, msg); 79 return; 80 } 81 82 crypto_aegis128_update(state); 83 crypto_aegis_block_xor(&state->blocks[0], msg); 84 } 85 86 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg, 87 bool do_simd) 88 { 89 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 90 crypto_aegis128_update_simd(state, msg); 91 return; 92 } 93 94 crypto_aegis128_update(state); 95 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE); 96 } 97 98 static void crypto_aegis128_init(struct aegis_state *state, 99 const union aegis_block *key, 100 const u8 *iv) 101 { 102 union aegis_block key_iv; 103 unsigned int i; 104 105 key_iv = *key; 106 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE); 107 108 state->blocks[0] = key_iv; 109 state->blocks[1] = crypto_aegis_const[1]; 110 state->blocks[2] = crypto_aegis_const[0]; 111 state->blocks[3] = *key; 112 state->blocks[4] = *key; 113 114 crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]); 115 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]); 116 117 for (i = 0; i < 5; i++) { 118 crypto_aegis128_update_a(state, key, false); 119 crypto_aegis128_update_a(state, &key_iv, false); 120 } 121 } 122 123 static void crypto_aegis128_ad(struct aegis_state *state, 124 const u8 *src, unsigned int size, 125 bool do_simd) 126 { 127 if (AEGIS_ALIGNED(src)) { 128 const union aegis_block *src_blk = 129 (const union aegis_block *)src; 130 131 while (size >= AEGIS_BLOCK_SIZE) { 132 crypto_aegis128_update_a(state, src_blk, do_simd); 133 134 size -= AEGIS_BLOCK_SIZE; 135 src_blk++; 136 } 137 } else { 138 while (size >= AEGIS_BLOCK_SIZE) { 139 crypto_aegis128_update_u(state, src, do_simd); 140 141 size -= AEGIS_BLOCK_SIZE; 142 src += AEGIS_BLOCK_SIZE; 143 } 144 } 145 } 146 147 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst, 148 const u8 *src, unsigned int size) 149 { 150 memzero_explicit(dst, size); 151 } 152 153 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, 154 const u8 *src, unsigned int size) 155 { 156 union aegis_block tmp; 157 158 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 159 while (size >= AEGIS_BLOCK_SIZE) { 160 union aegis_block *dst_blk = 161 (union aegis_block *)dst; 162 const union aegis_block *src_blk = 163 (const union aegis_block *)src; 164 165 tmp = state->blocks[2]; 166 crypto_aegis_block_and(&tmp, &state->blocks[3]); 167 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 168 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 169 crypto_aegis_block_xor(&tmp, src_blk); 170 171 crypto_aegis128_update_a(state, src_blk, false); 172 173 *dst_blk = tmp; 174 175 size -= AEGIS_BLOCK_SIZE; 176 src += AEGIS_BLOCK_SIZE; 177 dst += AEGIS_BLOCK_SIZE; 178 } 179 } else { 180 while (size >= AEGIS_BLOCK_SIZE) { 181 tmp = state->blocks[2]; 182 crypto_aegis_block_and(&tmp, &state->blocks[3]); 183 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 184 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 185 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 186 187 crypto_aegis128_update_u(state, src, false); 188 189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 190 191 size -= AEGIS_BLOCK_SIZE; 192 src += AEGIS_BLOCK_SIZE; 193 dst += AEGIS_BLOCK_SIZE; 194 } 195 } 196 197 if (size > 0) { 198 union aegis_block msg = {}; 199 memcpy(msg.bytes, src, size); 200 201 tmp = state->blocks[2]; 202 crypto_aegis_block_and(&tmp, &state->blocks[3]); 203 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 204 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 205 206 crypto_aegis128_update_a(state, &msg, false); 207 208 crypto_aegis_block_xor(&msg, &tmp); 209 210 memcpy(dst, msg.bytes, size); 211 } 212 } 213 214 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, 215 const u8 *src, unsigned int size) 216 { 217 union aegis_block tmp; 218 219 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 220 while (size >= AEGIS_BLOCK_SIZE) { 221 union aegis_block *dst_blk = 222 (union aegis_block *)dst; 223 const union aegis_block *src_blk = 224 (const union aegis_block *)src; 225 226 tmp = state->blocks[2]; 227 crypto_aegis_block_and(&tmp, &state->blocks[3]); 228 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 229 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 230 crypto_aegis_block_xor(&tmp, src_blk); 231 232 crypto_aegis128_update_a(state, &tmp, false); 233 234 *dst_blk = tmp; 235 236 size -= AEGIS_BLOCK_SIZE; 237 src += AEGIS_BLOCK_SIZE; 238 dst += AEGIS_BLOCK_SIZE; 239 } 240 } else { 241 while (size >= AEGIS_BLOCK_SIZE) { 242 tmp = state->blocks[2]; 243 crypto_aegis_block_and(&tmp, &state->blocks[3]); 244 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 245 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 246 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 247 248 crypto_aegis128_update_a(state, &tmp, false); 249 250 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 251 252 size -= AEGIS_BLOCK_SIZE; 253 src += AEGIS_BLOCK_SIZE; 254 dst += AEGIS_BLOCK_SIZE; 255 } 256 } 257 258 if (size > 0) { 259 union aegis_block msg = {}; 260 memcpy(msg.bytes, src, size); 261 262 tmp = state->blocks[2]; 263 crypto_aegis_block_and(&tmp, &state->blocks[3]); 264 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 265 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 266 crypto_aegis_block_xor(&msg, &tmp); 267 268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); 269 270 crypto_aegis128_update_a(state, &msg, false); 271 272 memcpy(dst, msg.bytes, size); 273 } 274 } 275 276 static void crypto_aegis128_process_ad(struct aegis_state *state, 277 struct scatterlist *sg_src, 278 unsigned int assoclen, 279 bool do_simd) 280 { 281 struct scatter_walk walk; 282 union aegis_block buf; 283 unsigned int pos = 0; 284 285 scatterwalk_start(&walk, sg_src); 286 while (assoclen != 0) { 287 unsigned int size = scatterwalk_clamp(&walk, assoclen); 288 unsigned int left = size; 289 void *mapped = scatterwalk_map(&walk); 290 const u8 *src = (const u8 *)mapped; 291 292 if (pos + size >= AEGIS_BLOCK_SIZE) { 293 if (pos > 0) { 294 unsigned int fill = AEGIS_BLOCK_SIZE - pos; 295 memcpy(buf.bytes + pos, src, fill); 296 crypto_aegis128_update_a(state, &buf, do_simd); 297 pos = 0; 298 left -= fill; 299 src += fill; 300 } 301 302 crypto_aegis128_ad(state, src, left, do_simd); 303 src += left & ~(AEGIS_BLOCK_SIZE - 1); 304 left &= AEGIS_BLOCK_SIZE - 1; 305 } 306 307 memcpy(buf.bytes + pos, src, left); 308 309 pos += left; 310 assoclen -= size; 311 scatterwalk_unmap(mapped); 312 scatterwalk_advance(&walk, size); 313 scatterwalk_done(&walk, 0, assoclen); 314 } 315 316 if (pos > 0) { 317 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos); 318 crypto_aegis128_update_a(state, &buf, do_simd); 319 } 320 } 321 322 static __always_inline 323 int crypto_aegis128_process_crypt(struct aegis_state *state, 324 struct skcipher_walk *walk, 325 void (*crypt)(struct aegis_state *state, 326 u8 *dst, 327 const u8 *src, 328 unsigned int size)) 329 { 330 int err = 0; 331 332 while (walk->nbytes) { 333 unsigned int nbytes = walk->nbytes; 334 335 if (nbytes < walk->total) 336 nbytes = round_down(nbytes, walk->stride); 337 338 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); 339 340 err = skcipher_walk_done(walk, walk->nbytes - nbytes); 341 } 342 return err; 343 } 344 345 static void crypto_aegis128_final(struct aegis_state *state, 346 union aegis_block *tag_xor, 347 u64 assoclen, u64 cryptlen) 348 { 349 u64 assocbits = assoclen * 8; 350 u64 cryptbits = cryptlen * 8; 351 352 union aegis_block tmp; 353 unsigned int i; 354 355 tmp.words64[0] = cpu_to_le64(assocbits); 356 tmp.words64[1] = cpu_to_le64(cryptbits); 357 358 crypto_aegis_block_xor(&tmp, &state->blocks[3]); 359 360 for (i = 0; i < 7; i++) 361 crypto_aegis128_update_a(state, &tmp, false); 362 363 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++) 364 crypto_aegis_block_xor(tag_xor, &state->blocks[i]); 365 } 366 367 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key, 368 unsigned int keylen) 369 { 370 struct aegis_ctx *ctx = crypto_aead_ctx(aead); 371 372 if (keylen != AEGIS128_KEY_SIZE) 373 return -EINVAL; 374 375 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE); 376 return 0; 377 } 378 379 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm, 380 unsigned int authsize) 381 { 382 if (authsize > AEGIS128_MAX_AUTH_SIZE) 383 return -EINVAL; 384 if (authsize < AEGIS128_MIN_AUTH_SIZE) 385 return -EINVAL; 386 return 0; 387 } 388 389 static int crypto_aegis128_encrypt_generic(struct aead_request *req) 390 { 391 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 392 union aegis_block tag = {}; 393 unsigned int authsize = crypto_aead_authsize(tfm); 394 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 395 unsigned int cryptlen = req->cryptlen; 396 struct skcipher_walk walk; 397 struct aegis_state state; 398 399 skcipher_walk_aead_encrypt(&walk, req, false); 400 crypto_aegis128_init(&state, &ctx->key, req->iv); 401 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 402 crypto_aegis128_process_crypt(&state, &walk, 403 crypto_aegis128_encrypt_chunk); 404 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 405 406 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 407 authsize, 1); 408 return 0; 409 } 410 411 static int crypto_aegis128_decrypt_generic(struct aead_request *req) 412 { 413 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {}; 414 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 415 union aegis_block tag; 416 unsigned int authsize = crypto_aead_authsize(tfm); 417 unsigned int cryptlen = req->cryptlen - authsize; 418 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 419 struct skcipher_walk walk; 420 struct aegis_state state; 421 422 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 423 authsize, 0); 424 425 skcipher_walk_aead_decrypt(&walk, req, false); 426 crypto_aegis128_init(&state, &ctx->key, req->iv); 427 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 428 crypto_aegis128_process_crypt(&state, &walk, 429 crypto_aegis128_decrypt_chunk); 430 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 431 432 if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) { 433 /* 434 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0] 435 * 436 * "3. If verification fails, the decrypted plaintext and the 437 * wrong authentication tag should not be given as output." 438 * 439 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf 440 */ 441 skcipher_walk_aead_decrypt(&walk, req, false); 442 crypto_aegis128_process_crypt(NULL, &walk, 443 crypto_aegis128_wipe_chunk); 444 memzero_explicit(&tag, sizeof(tag)); 445 return -EBADMSG; 446 } 447 return 0; 448 } 449 450 static int crypto_aegis128_encrypt_simd(struct aead_request *req) 451 { 452 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 453 union aegis_block tag = {}; 454 unsigned int authsize = crypto_aead_authsize(tfm); 455 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 456 unsigned int cryptlen = req->cryptlen; 457 struct skcipher_walk walk; 458 struct aegis_state state; 459 460 if (!aegis128_do_simd()) 461 return crypto_aegis128_encrypt_generic(req); 462 463 skcipher_walk_aead_encrypt(&walk, req, false); 464 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 465 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 466 crypto_aegis128_process_crypt(&state, &walk, 467 crypto_aegis128_encrypt_chunk_simd); 468 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); 469 470 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 471 authsize, 1); 472 return 0; 473 } 474 475 static int crypto_aegis128_decrypt_simd(struct aead_request *req) 476 { 477 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 478 union aegis_block tag; 479 unsigned int authsize = crypto_aead_authsize(tfm); 480 unsigned int cryptlen = req->cryptlen - authsize; 481 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 482 struct skcipher_walk walk; 483 struct aegis_state state; 484 485 if (!aegis128_do_simd()) 486 return crypto_aegis128_decrypt_generic(req); 487 488 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 489 authsize, 0); 490 491 skcipher_walk_aead_decrypt(&walk, req, false); 492 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 493 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 494 crypto_aegis128_process_crypt(&state, &walk, 495 crypto_aegis128_decrypt_chunk_simd); 496 497 if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen, 498 cryptlen, authsize))) { 499 skcipher_walk_aead_decrypt(&walk, req, false); 500 crypto_aegis128_process_crypt(NULL, &walk, 501 crypto_aegis128_wipe_chunk); 502 return -EBADMSG; 503 } 504 return 0; 505 } 506 507 static struct aead_alg crypto_aegis128_alg_generic = { 508 .setkey = crypto_aegis128_setkey, 509 .setauthsize = crypto_aegis128_setauthsize, 510 .encrypt = crypto_aegis128_encrypt_generic, 511 .decrypt = crypto_aegis128_decrypt_generic, 512 513 .ivsize = AEGIS128_NONCE_SIZE, 514 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 515 .chunksize = AEGIS_BLOCK_SIZE, 516 517 .base.cra_blocksize = 1, 518 .base.cra_ctxsize = sizeof(struct aegis_ctx), 519 .base.cra_alignmask = 0, 520 .base.cra_priority = 100, 521 .base.cra_name = "aegis128", 522 .base.cra_driver_name = "aegis128-generic", 523 .base.cra_module = THIS_MODULE, 524 }; 525 526 static struct aead_alg crypto_aegis128_alg_simd = { 527 .setkey = crypto_aegis128_setkey, 528 .setauthsize = crypto_aegis128_setauthsize, 529 .encrypt = crypto_aegis128_encrypt_simd, 530 .decrypt = crypto_aegis128_decrypt_simd, 531 532 .ivsize = AEGIS128_NONCE_SIZE, 533 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 534 .chunksize = AEGIS_BLOCK_SIZE, 535 536 .base.cra_blocksize = 1, 537 .base.cra_ctxsize = sizeof(struct aegis_ctx), 538 .base.cra_alignmask = 0, 539 .base.cra_priority = 200, 540 .base.cra_name = "aegis128", 541 .base.cra_driver_name = "aegis128-simd", 542 .base.cra_module = THIS_MODULE, 543 }; 544 545 static int __init crypto_aegis128_module_init(void) 546 { 547 int ret; 548 549 ret = crypto_register_aead(&crypto_aegis128_alg_generic); 550 if (ret) 551 return ret; 552 553 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 554 crypto_aegis128_have_simd()) { 555 ret = crypto_register_aead(&crypto_aegis128_alg_simd); 556 if (ret) { 557 crypto_unregister_aead(&crypto_aegis128_alg_generic); 558 return ret; 559 } 560 static_branch_enable(&have_simd); 561 } 562 return 0; 563 } 564 565 static void __exit crypto_aegis128_module_exit(void) 566 { 567 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 568 crypto_aegis128_have_simd()) 569 crypto_unregister_aead(&crypto_aegis128_alg_simd); 570 571 crypto_unregister_aead(&crypto_aegis128_alg_generic); 572 } 573 574 subsys_initcall(crypto_aegis128_module_init); 575 module_exit(crypto_aegis128_module_exit); 576 577 MODULE_LICENSE("GPL"); 578 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>"); 579 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm"); 580 MODULE_ALIAS_CRYPTO("aegis128"); 581 MODULE_ALIAS_CRYPTO("aegis128-generic"); 582 MODULE_ALIAS_CRYPTO("aegis128-simd"); 583