1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * The AEGIS-128 Authenticated-Encryption Algorithm 4 * 5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com> 6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved. 7 */ 8 9 #include <crypto/algapi.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/internal/skcipher.h> 13 #include <crypto/scatterwalk.h> 14 #include <linux/err.h> 15 #include <linux/init.h> 16 #include <linux/jump_label.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 21 #include <asm/simd.h> 22 23 #include "aegis.h" 24 25 #define AEGIS128_NONCE_SIZE 16 26 #define AEGIS128_STATE_BLOCKS 5 27 #define AEGIS128_KEY_SIZE 16 28 #define AEGIS128_MIN_AUTH_SIZE 8 29 #define AEGIS128_MAX_AUTH_SIZE 16 30 31 struct aegis_state { 32 union aegis_block blocks[AEGIS128_STATE_BLOCKS]; 33 }; 34 35 struct aegis_ctx { 36 union aegis_block key; 37 }; 38 39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd); 40 41 static const union aegis_block crypto_aegis_const[2] = { 42 { .words64 = { 43 cpu_to_le64(U64_C(0x0d08050302010100)), 44 cpu_to_le64(U64_C(0x6279e99059372215)), 45 } }, 46 { .words64 = { 47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)), 48 cpu_to_le64(U64_C(0xdd28b57342311120)), 49 } }, 50 }; 51 52 static bool aegis128_do_simd(void) 53 { 54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD 55 if (static_branch_likely(&have_simd)) 56 return crypto_simd_usable(); 57 #endif 58 return false; 59 } 60 61 static void crypto_aegis128_update(struct aegis_state *state) 62 { 63 union aegis_block tmp; 64 unsigned int i; 65 66 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1]; 67 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--) 68 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1], 69 &state->blocks[i]); 70 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]); 71 } 72 73 static void crypto_aegis128_update_a(struct aegis_state *state, 74 const union aegis_block *msg, 75 bool do_simd) 76 { 77 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 78 crypto_aegis128_update_simd(state, msg); 79 return; 80 } 81 82 crypto_aegis128_update(state); 83 crypto_aegis_block_xor(&state->blocks[0], msg); 84 } 85 86 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg, 87 bool do_simd) 88 { 89 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 90 crypto_aegis128_update_simd(state, msg); 91 return; 92 } 93 94 crypto_aegis128_update(state); 95 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE); 96 } 97 98 static void crypto_aegis128_init(struct aegis_state *state, 99 const union aegis_block *key, 100 const u8 *iv) 101 { 102 union aegis_block key_iv; 103 unsigned int i; 104 105 key_iv = *key; 106 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE); 107 108 state->blocks[0] = key_iv; 109 state->blocks[1] = crypto_aegis_const[1]; 110 state->blocks[2] = crypto_aegis_const[0]; 111 state->blocks[3] = *key; 112 state->blocks[4] = *key; 113 114 crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]); 115 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]); 116 117 for (i = 0; i < 5; i++) { 118 crypto_aegis128_update_a(state, key, false); 119 crypto_aegis128_update_a(state, &key_iv, false); 120 } 121 } 122 123 static void crypto_aegis128_ad(struct aegis_state *state, 124 const u8 *src, unsigned int size, 125 bool do_simd) 126 { 127 if (AEGIS_ALIGNED(src)) { 128 const union aegis_block *src_blk = 129 (const union aegis_block *)src; 130 131 while (size >= AEGIS_BLOCK_SIZE) { 132 crypto_aegis128_update_a(state, src_blk, do_simd); 133 134 size -= AEGIS_BLOCK_SIZE; 135 src_blk++; 136 } 137 } else { 138 while (size >= AEGIS_BLOCK_SIZE) { 139 crypto_aegis128_update_u(state, src, do_simd); 140 141 size -= AEGIS_BLOCK_SIZE; 142 src += AEGIS_BLOCK_SIZE; 143 } 144 } 145 } 146 147 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst, 148 const u8 *src, unsigned int size) 149 { 150 memzero_explicit(dst, size); 151 } 152 153 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, 154 const u8 *src, unsigned int size) 155 { 156 union aegis_block tmp; 157 158 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 159 while (size >= AEGIS_BLOCK_SIZE) { 160 union aegis_block *dst_blk = 161 (union aegis_block *)dst; 162 const union aegis_block *src_blk = 163 (const union aegis_block *)src; 164 165 tmp = state->blocks[2]; 166 crypto_aegis_block_and(&tmp, &state->blocks[3]); 167 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 168 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 169 crypto_aegis_block_xor(&tmp, src_blk); 170 171 crypto_aegis128_update_a(state, src_blk, false); 172 173 *dst_blk = tmp; 174 175 size -= AEGIS_BLOCK_SIZE; 176 src += AEGIS_BLOCK_SIZE; 177 dst += AEGIS_BLOCK_SIZE; 178 } 179 } else { 180 while (size >= AEGIS_BLOCK_SIZE) { 181 tmp = state->blocks[2]; 182 crypto_aegis_block_and(&tmp, &state->blocks[3]); 183 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 184 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 185 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 186 187 crypto_aegis128_update_u(state, src, false); 188 189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 190 191 size -= AEGIS_BLOCK_SIZE; 192 src += AEGIS_BLOCK_SIZE; 193 dst += AEGIS_BLOCK_SIZE; 194 } 195 } 196 197 if (size > 0) { 198 union aegis_block msg = {}; 199 memcpy(msg.bytes, src, size); 200 201 tmp = state->blocks[2]; 202 crypto_aegis_block_and(&tmp, &state->blocks[3]); 203 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 204 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 205 206 crypto_aegis128_update_a(state, &msg, false); 207 208 crypto_aegis_block_xor(&msg, &tmp); 209 210 memcpy(dst, msg.bytes, size); 211 } 212 } 213 214 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, 215 const u8 *src, unsigned int size) 216 { 217 union aegis_block tmp; 218 219 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 220 while (size >= AEGIS_BLOCK_SIZE) { 221 union aegis_block *dst_blk = 222 (union aegis_block *)dst; 223 const union aegis_block *src_blk = 224 (const union aegis_block *)src; 225 226 tmp = state->blocks[2]; 227 crypto_aegis_block_and(&tmp, &state->blocks[3]); 228 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 229 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 230 crypto_aegis_block_xor(&tmp, src_blk); 231 232 crypto_aegis128_update_a(state, &tmp, false); 233 234 *dst_blk = tmp; 235 236 size -= AEGIS_BLOCK_SIZE; 237 src += AEGIS_BLOCK_SIZE; 238 dst += AEGIS_BLOCK_SIZE; 239 } 240 } else { 241 while (size >= AEGIS_BLOCK_SIZE) { 242 tmp = state->blocks[2]; 243 crypto_aegis_block_and(&tmp, &state->blocks[3]); 244 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 245 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 246 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 247 248 crypto_aegis128_update_a(state, &tmp, false); 249 250 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 251 252 size -= AEGIS_BLOCK_SIZE; 253 src += AEGIS_BLOCK_SIZE; 254 dst += AEGIS_BLOCK_SIZE; 255 } 256 } 257 258 if (size > 0) { 259 union aegis_block msg = {}; 260 memcpy(msg.bytes, src, size); 261 262 tmp = state->blocks[2]; 263 crypto_aegis_block_and(&tmp, &state->blocks[3]); 264 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 265 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 266 crypto_aegis_block_xor(&msg, &tmp); 267 268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); 269 270 crypto_aegis128_update_a(state, &msg, false); 271 272 memcpy(dst, msg.bytes, size); 273 } 274 } 275 276 static void crypto_aegis128_process_ad(struct aegis_state *state, 277 struct scatterlist *sg_src, 278 unsigned int assoclen, 279 bool do_simd) 280 { 281 struct scatter_walk walk; 282 union aegis_block buf; 283 unsigned int pos = 0; 284 285 scatterwalk_start(&walk, sg_src); 286 while (assoclen != 0) { 287 unsigned int size; 288 const u8 *mapped = scatterwalk_next(&walk, assoclen, &size); 289 unsigned int left = size; 290 const u8 *src = mapped; 291 292 if (pos + size >= AEGIS_BLOCK_SIZE) { 293 if (pos > 0) { 294 unsigned int fill = AEGIS_BLOCK_SIZE - pos; 295 memcpy(buf.bytes + pos, src, fill); 296 crypto_aegis128_update_a(state, &buf, do_simd); 297 pos = 0; 298 left -= fill; 299 src += fill; 300 } 301 302 crypto_aegis128_ad(state, src, left, do_simd); 303 src += left & ~(AEGIS_BLOCK_SIZE - 1); 304 left &= AEGIS_BLOCK_SIZE - 1; 305 } 306 307 memcpy(buf.bytes + pos, src, left); 308 309 pos += left; 310 assoclen -= size; 311 scatterwalk_done_src(&walk, mapped, size); 312 } 313 314 if (pos > 0) { 315 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos); 316 crypto_aegis128_update_a(state, &buf, do_simd); 317 } 318 } 319 320 static __always_inline 321 int crypto_aegis128_process_crypt(struct aegis_state *state, 322 struct skcipher_walk *walk, 323 void (*crypt)(struct aegis_state *state, 324 u8 *dst, 325 const u8 *src, 326 unsigned int size)) 327 { 328 int err = 0; 329 330 while (walk->nbytes) { 331 unsigned int nbytes = walk->nbytes; 332 333 if (nbytes < walk->total) 334 nbytes = round_down(nbytes, walk->stride); 335 336 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); 337 338 err = skcipher_walk_done(walk, walk->nbytes - nbytes); 339 } 340 return err; 341 } 342 343 static void crypto_aegis128_final(struct aegis_state *state, 344 union aegis_block *tag_xor, 345 u64 assoclen, u64 cryptlen) 346 { 347 u64 assocbits = assoclen * 8; 348 u64 cryptbits = cryptlen * 8; 349 350 union aegis_block tmp; 351 unsigned int i; 352 353 tmp.words64[0] = cpu_to_le64(assocbits); 354 tmp.words64[1] = cpu_to_le64(cryptbits); 355 356 crypto_aegis_block_xor(&tmp, &state->blocks[3]); 357 358 for (i = 0; i < 7; i++) 359 crypto_aegis128_update_a(state, &tmp, false); 360 361 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++) 362 crypto_aegis_block_xor(tag_xor, &state->blocks[i]); 363 } 364 365 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key, 366 unsigned int keylen) 367 { 368 struct aegis_ctx *ctx = crypto_aead_ctx(aead); 369 370 if (keylen != AEGIS128_KEY_SIZE) 371 return -EINVAL; 372 373 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE); 374 return 0; 375 } 376 377 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm, 378 unsigned int authsize) 379 { 380 if (authsize > AEGIS128_MAX_AUTH_SIZE) 381 return -EINVAL; 382 if (authsize < AEGIS128_MIN_AUTH_SIZE) 383 return -EINVAL; 384 return 0; 385 } 386 387 static int crypto_aegis128_encrypt_generic(struct aead_request *req) 388 { 389 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 390 union aegis_block tag = {}; 391 unsigned int authsize = crypto_aead_authsize(tfm); 392 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 393 unsigned int cryptlen = req->cryptlen; 394 struct skcipher_walk walk; 395 struct aegis_state state; 396 397 skcipher_walk_aead_encrypt(&walk, req, false); 398 crypto_aegis128_init(&state, &ctx->key, req->iv); 399 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 400 crypto_aegis128_process_crypt(&state, &walk, 401 crypto_aegis128_encrypt_chunk); 402 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 403 404 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 405 authsize, 1); 406 return 0; 407 } 408 409 static int crypto_aegis128_decrypt_generic(struct aead_request *req) 410 { 411 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {}; 412 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 413 union aegis_block tag; 414 unsigned int authsize = crypto_aead_authsize(tfm); 415 unsigned int cryptlen = req->cryptlen - authsize; 416 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 417 struct skcipher_walk walk; 418 struct aegis_state state; 419 420 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 421 authsize, 0); 422 423 skcipher_walk_aead_decrypt(&walk, req, false); 424 crypto_aegis128_init(&state, &ctx->key, req->iv); 425 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 426 crypto_aegis128_process_crypt(&state, &walk, 427 crypto_aegis128_decrypt_chunk); 428 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 429 430 if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) { 431 /* 432 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0] 433 * 434 * "3. If verification fails, the decrypted plaintext and the 435 * wrong authentication tag should not be given as output." 436 * 437 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf 438 */ 439 skcipher_walk_aead_decrypt(&walk, req, false); 440 crypto_aegis128_process_crypt(NULL, &walk, 441 crypto_aegis128_wipe_chunk); 442 memzero_explicit(&tag, sizeof(tag)); 443 return -EBADMSG; 444 } 445 return 0; 446 } 447 448 static int crypto_aegis128_encrypt_simd(struct aead_request *req) 449 { 450 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 451 union aegis_block tag = {}; 452 unsigned int authsize = crypto_aead_authsize(tfm); 453 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 454 unsigned int cryptlen = req->cryptlen; 455 struct skcipher_walk walk; 456 struct aegis_state state; 457 458 if (!aegis128_do_simd()) 459 return crypto_aegis128_encrypt_generic(req); 460 461 skcipher_walk_aead_encrypt(&walk, req, false); 462 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 463 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 464 crypto_aegis128_process_crypt(&state, &walk, 465 crypto_aegis128_encrypt_chunk_simd); 466 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); 467 468 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 469 authsize, 1); 470 return 0; 471 } 472 473 static int crypto_aegis128_decrypt_simd(struct aead_request *req) 474 { 475 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 476 union aegis_block tag; 477 unsigned int authsize = crypto_aead_authsize(tfm); 478 unsigned int cryptlen = req->cryptlen - authsize; 479 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 480 struct skcipher_walk walk; 481 struct aegis_state state; 482 483 if (!aegis128_do_simd()) 484 return crypto_aegis128_decrypt_generic(req); 485 486 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 487 authsize, 0); 488 489 skcipher_walk_aead_decrypt(&walk, req, false); 490 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 491 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 492 crypto_aegis128_process_crypt(&state, &walk, 493 crypto_aegis128_decrypt_chunk_simd); 494 495 if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen, 496 cryptlen, authsize))) { 497 skcipher_walk_aead_decrypt(&walk, req, false); 498 crypto_aegis128_process_crypt(NULL, &walk, 499 crypto_aegis128_wipe_chunk); 500 return -EBADMSG; 501 } 502 return 0; 503 } 504 505 static struct aead_alg crypto_aegis128_alg_generic = { 506 .setkey = crypto_aegis128_setkey, 507 .setauthsize = crypto_aegis128_setauthsize, 508 .encrypt = crypto_aegis128_encrypt_generic, 509 .decrypt = crypto_aegis128_decrypt_generic, 510 511 .ivsize = AEGIS128_NONCE_SIZE, 512 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 513 .chunksize = AEGIS_BLOCK_SIZE, 514 515 .base.cra_blocksize = 1, 516 .base.cra_ctxsize = sizeof(struct aegis_ctx), 517 .base.cra_priority = 100, 518 .base.cra_name = "aegis128", 519 .base.cra_driver_name = "aegis128-generic", 520 .base.cra_module = THIS_MODULE, 521 }; 522 523 static struct aead_alg crypto_aegis128_alg_simd = { 524 .setkey = crypto_aegis128_setkey, 525 .setauthsize = crypto_aegis128_setauthsize, 526 .encrypt = crypto_aegis128_encrypt_simd, 527 .decrypt = crypto_aegis128_decrypt_simd, 528 529 .ivsize = AEGIS128_NONCE_SIZE, 530 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 531 .chunksize = AEGIS_BLOCK_SIZE, 532 533 .base.cra_blocksize = 1, 534 .base.cra_ctxsize = sizeof(struct aegis_ctx), 535 .base.cra_priority = 200, 536 .base.cra_name = "aegis128", 537 .base.cra_driver_name = "aegis128-simd", 538 .base.cra_module = THIS_MODULE, 539 }; 540 541 static int __init crypto_aegis128_module_init(void) 542 { 543 int ret; 544 545 ret = crypto_register_aead(&crypto_aegis128_alg_generic); 546 if (ret) 547 return ret; 548 549 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 550 crypto_aegis128_have_simd()) { 551 ret = crypto_register_aead(&crypto_aegis128_alg_simd); 552 if (ret) { 553 crypto_unregister_aead(&crypto_aegis128_alg_generic); 554 return ret; 555 } 556 static_branch_enable(&have_simd); 557 } 558 return 0; 559 } 560 561 static void __exit crypto_aegis128_module_exit(void) 562 { 563 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 564 crypto_aegis128_have_simd()) 565 crypto_unregister_aead(&crypto_aegis128_alg_simd); 566 567 crypto_unregister_aead(&crypto_aegis128_alg_generic); 568 } 569 570 subsys_initcall(crypto_aegis128_module_init); 571 module_exit(crypto_aegis128_module_exit); 572 573 MODULE_LICENSE("GPL"); 574 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>"); 575 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm"); 576 MODULE_ALIAS_CRYPTO("aegis128"); 577 MODULE_ALIAS_CRYPTO("aegis128-generic"); 578 MODULE_ALIAS_CRYPTO("aegis128-simd"); 579