1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * HCTR2 length-preserving encryption mode 4 * 5 * Copyright 2021 Google LLC 6 */ 7 8 9 /* 10 * HCTR2 is a length-preserving encryption mode that is efficient on 11 * processors with instructions to accelerate AES and carryless 12 * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM 13 * processors with the ARMv8 crypto extensions. 14 * 15 * For more details, see the paper: "Length-preserving encryption with HCTR2" 16 * (https://eprint.iacr.org/2021/1441.pdf) 17 */ 18 19 #include <crypto/internal/cipher.h> 20 #include <crypto/internal/hash.h> 21 #include <crypto/internal/skcipher.h> 22 #include <crypto/polyval.h> 23 #include <crypto/scatterwalk.h> 24 #include <linux/module.h> 25 26 #define BLOCKCIPHER_BLOCK_SIZE 16 27 28 /* 29 * The specification allows variable-length tweaks, but Linux's crypto API 30 * currently only allows algorithms to support a single length. The "natural" 31 * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for 32 * the best performance. But longer tweaks are useful for fscrypt, to avoid 33 * needing to derive per-file keys. So instead we use two blocks, or 32 bytes. 34 */ 35 #define TWEAK_SIZE 32 36 37 struct hctr2_instance_ctx { 38 struct crypto_cipher_spawn blockcipher_spawn; 39 struct crypto_skcipher_spawn xctr_spawn; 40 struct crypto_shash_spawn polyval_spawn; 41 }; 42 43 struct hctr2_tfm_ctx { 44 struct crypto_cipher *blockcipher; 45 struct crypto_skcipher *xctr; 46 struct crypto_shash *polyval; 47 u8 L[BLOCKCIPHER_BLOCK_SIZE]; 48 int hashed_tweak_offset; 49 /* 50 * This struct is allocated with extra space for two exported hash 51 * states. Since the hash state size is not known at compile-time, we 52 * can't add these to the struct directly. 53 * 54 * hashed_tweaklen_divisible; 55 * hashed_tweaklen_remainder; 56 */ 57 }; 58 59 struct hctr2_request_ctx { 60 u8 first_block[BLOCKCIPHER_BLOCK_SIZE]; 61 u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE]; 62 struct scatterlist *bulk_part_dst; 63 struct scatterlist *bulk_part_src; 64 struct scatterlist sg_src[2]; 65 struct scatterlist sg_dst[2]; 66 /* 67 * Sub-request sizes are unknown at compile-time, so they need to go 68 * after the members with known sizes. 69 */ 70 union { 71 struct shash_desc hash_desc; 72 struct skcipher_request xctr_req; 73 } u; 74 /* 75 * This struct is allocated with extra space for one exported hash 76 * state. Since the hash state size is not known at compile-time, we 77 * can't add it to the struct directly. 78 * 79 * hashed_tweak; 80 */ 81 }; 82 83 static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx, 84 bool has_remainder) 85 { 86 u8 *p = (u8 *)tctx + sizeof(*tctx); 87 88 if (has_remainder) /* For messages not a multiple of block length */ 89 p += crypto_shash_statesize(tctx->polyval); 90 return p; 91 } 92 93 static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx, 94 struct hctr2_request_ctx *rctx) 95 { 96 return (u8 *)rctx + tctx->hashed_tweak_offset; 97 } 98 99 /* 100 * The input data for each HCTR2 hash step begins with a 16-byte block that 101 * contains the tweak length and a flag that indicates whether the input is evenly 102 * divisible into blocks. Since this implementation only supports one tweak 103 * length, we precompute the two hash states resulting from hashing the two 104 * possible values of this initial block. This reduces by one block the amount of 105 * data that needs to be hashed for each encryption/decryption 106 * 107 * These precomputed hashes are stored in hctr2_tfm_ctx. 108 */ 109 static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder) 110 { 111 SHASH_DESC_ON_STACK(shash, tfm->polyval); 112 __le64 tweak_length_block[2]; 113 int err; 114 115 shash->tfm = tctx->polyval; 116 memset(tweak_length_block, 0, sizeof(tweak_length_block)); 117 118 tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder); 119 err = crypto_shash_init(shash); 120 if (err) 121 return err; 122 err = crypto_shash_update(shash, (u8 *)tweak_length_block, 123 POLYVAL_BLOCK_SIZE); 124 if (err) 125 return err; 126 return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder)); 127 } 128 129 static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key, 130 unsigned int keylen) 131 { 132 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 133 u8 hbar[BLOCKCIPHER_BLOCK_SIZE]; 134 int err; 135 136 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK); 137 crypto_cipher_set_flags(tctx->blockcipher, 138 crypto_skcipher_get_flags(tfm) & 139 CRYPTO_TFM_REQ_MASK); 140 err = crypto_cipher_setkey(tctx->blockcipher, key, keylen); 141 if (err) 142 return err; 143 144 crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK); 145 crypto_skcipher_set_flags(tctx->xctr, 146 crypto_skcipher_get_flags(tfm) & 147 CRYPTO_TFM_REQ_MASK); 148 err = crypto_skcipher_setkey(tctx->xctr, key, keylen); 149 if (err) 150 return err; 151 152 memset(hbar, 0, sizeof(hbar)); 153 crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar); 154 155 memset(tctx->L, 0, sizeof(tctx->L)); 156 tctx->L[0] = 0x01; 157 crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L); 158 159 crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK); 160 crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) & 161 CRYPTO_TFM_REQ_MASK); 162 err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE); 163 if (err) 164 return err; 165 memzero_explicit(hbar, sizeof(hbar)); 166 167 return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false); 168 } 169 170 static int hctr2_hash_tweak(struct skcipher_request *req) 171 { 172 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 173 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 174 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 175 struct shash_desc *hash_desc = &rctx->u.hash_desc; 176 int err; 177 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; 178 179 hash_desc->tfm = tctx->polyval; 180 err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder)); 181 if (err) 182 return err; 183 err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE); 184 if (err) 185 return err; 186 187 // Store the hashed tweak, since we need it when computing both 188 // H(T || N) and H(T || V). 189 return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx)); 190 } 191 192 static int hctr2_hash_message(struct skcipher_request *req, 193 struct scatterlist *sgl, 194 u8 digest[POLYVAL_DIGEST_SIZE]) 195 { 196 static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 }; 197 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 198 struct shash_desc *hash_desc = &rctx->u.hash_desc; 199 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; 200 struct sg_mapping_iter miter; 201 unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE; 202 int i; 203 int err = 0; 204 int n = 0; 205 206 sg_miter_start(&miter, sgl, sg_nents(sgl), 207 SG_MITER_FROM_SG | SG_MITER_ATOMIC); 208 for (i = 0; i < bulk_len; i += n) { 209 sg_miter_next(&miter); 210 n = min_t(unsigned int, miter.length, bulk_len - i); 211 err = crypto_shash_update(hash_desc, miter.addr, n); 212 if (err) 213 break; 214 } 215 sg_miter_stop(&miter); 216 217 if (err) 218 return err; 219 220 if (remainder) { 221 err = crypto_shash_update(hash_desc, padding, 222 BLOCKCIPHER_BLOCK_SIZE - remainder); 223 if (err) 224 return err; 225 } 226 return crypto_shash_final(hash_desc, digest); 227 } 228 229 static int hctr2_finish(struct skcipher_request *req) 230 { 231 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 232 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 233 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 234 u8 digest[POLYVAL_DIGEST_SIZE]; 235 struct shash_desc *hash_desc = &rctx->u.hash_desc; 236 int err; 237 238 // U = UU ^ H(T || V) 239 // or M = MM ^ H(T || N) 240 hash_desc->tfm = tctx->polyval; 241 err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx)); 242 if (err) 243 return err; 244 err = hctr2_hash_message(req, rctx->bulk_part_dst, digest); 245 if (err) 246 return err; 247 crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE); 248 249 // Copy U (or M) into dst scatterlist 250 scatterwalk_map_and_copy(rctx->first_block, req->dst, 251 0, BLOCKCIPHER_BLOCK_SIZE, 1); 252 return 0; 253 } 254 255 static void hctr2_xctr_done(struct crypto_async_request *areq, 256 int err) 257 { 258 struct skcipher_request *req = areq->data; 259 260 if (!err) 261 err = hctr2_finish(req); 262 263 skcipher_request_complete(req, err); 264 } 265 266 static int hctr2_crypt(struct skcipher_request *req, bool enc) 267 { 268 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 269 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 270 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 271 u8 digest[POLYVAL_DIGEST_SIZE]; 272 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; 273 int err; 274 275 // Requests must be at least one block 276 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) 277 return -EINVAL; 278 279 // Copy M (or U) into a temporary buffer 280 scatterwalk_map_and_copy(rctx->first_block, req->src, 281 0, BLOCKCIPHER_BLOCK_SIZE, 0); 282 283 // Create scatterlists for N and V 284 rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src, 285 BLOCKCIPHER_BLOCK_SIZE); 286 rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst, 287 BLOCKCIPHER_BLOCK_SIZE); 288 289 // MM = M ^ H(T || N) 290 // or UU = U ^ H(T || V) 291 err = hctr2_hash_tweak(req); 292 if (err) 293 return err; 294 err = hctr2_hash_message(req, rctx->bulk_part_src, digest); 295 if (err) 296 return err; 297 crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE); 298 299 // UU = E(MM) 300 // or MM = D(UU) 301 if (enc) 302 crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block, 303 digest); 304 else 305 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block, 306 digest); 307 308 // S = MM ^ UU ^ L 309 crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE); 310 crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE); 311 312 // V = XCTR(S, N) 313 // or N = XCTR(S, V) 314 skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr); 315 skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src, 316 rctx->bulk_part_dst, bulk_len, 317 rctx->xctr_iv); 318 skcipher_request_set_callback(&rctx->u.xctr_req, 319 req->base.flags, 320 hctr2_xctr_done, req); 321 return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?: 322 hctr2_finish(req); 323 } 324 325 static int hctr2_encrypt(struct skcipher_request *req) 326 { 327 return hctr2_crypt(req, true); 328 } 329 330 static int hctr2_decrypt(struct skcipher_request *req) 331 { 332 return hctr2_crypt(req, false); 333 } 334 335 static int hctr2_init_tfm(struct crypto_skcipher *tfm) 336 { 337 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 338 struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst); 339 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 340 struct crypto_skcipher *xctr; 341 struct crypto_cipher *blockcipher; 342 struct crypto_shash *polyval; 343 unsigned int subreq_size; 344 int err; 345 346 xctr = crypto_spawn_skcipher(&ictx->xctr_spawn); 347 if (IS_ERR(xctr)) 348 return PTR_ERR(xctr); 349 350 blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn); 351 if (IS_ERR(blockcipher)) { 352 err = PTR_ERR(blockcipher); 353 goto err_free_xctr; 354 } 355 356 polyval = crypto_spawn_shash(&ictx->polyval_spawn); 357 if (IS_ERR(polyval)) { 358 err = PTR_ERR(polyval); 359 goto err_free_blockcipher; 360 } 361 362 tctx->xctr = xctr; 363 tctx->blockcipher = blockcipher; 364 tctx->polyval = polyval; 365 366 BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) != 367 sizeof(struct hctr2_request_ctx)); 368 subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) + 369 crypto_shash_descsize(polyval), 370 sizeof_field(struct hctr2_request_ctx, u.xctr_req) + 371 crypto_skcipher_reqsize(xctr)); 372 373 tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) + 374 subreq_size; 375 crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset + 376 crypto_shash_statesize(polyval)); 377 return 0; 378 379 err_free_blockcipher: 380 crypto_free_cipher(blockcipher); 381 err_free_xctr: 382 crypto_free_skcipher(xctr); 383 return err; 384 } 385 386 static void hctr2_exit_tfm(struct crypto_skcipher *tfm) 387 { 388 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 389 390 crypto_free_cipher(tctx->blockcipher); 391 crypto_free_skcipher(tctx->xctr); 392 crypto_free_shash(tctx->polyval); 393 } 394 395 static void hctr2_free_instance(struct skcipher_instance *inst) 396 { 397 struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst); 398 399 crypto_drop_cipher(&ictx->blockcipher_spawn); 400 crypto_drop_skcipher(&ictx->xctr_spawn); 401 crypto_drop_shash(&ictx->polyval_spawn); 402 kfree(inst); 403 } 404 405 static int hctr2_create_common(struct crypto_template *tmpl, 406 struct rtattr **tb, 407 const char *xctr_name, 408 const char *polyval_name) 409 { 410 u32 mask; 411 struct skcipher_instance *inst; 412 struct hctr2_instance_ctx *ictx; 413 struct skcipher_alg *xctr_alg; 414 struct crypto_alg *blockcipher_alg; 415 struct shash_alg *polyval_alg; 416 char blockcipher_name[CRYPTO_MAX_ALG_NAME]; 417 int len; 418 int err; 419 420 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 421 if (err) 422 return err; 423 424 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 425 if (!inst) 426 return -ENOMEM; 427 ictx = skcipher_instance_ctx(inst); 428 429 /* Stream cipher, xctr(block_cipher) */ 430 err = crypto_grab_skcipher(&ictx->xctr_spawn, 431 skcipher_crypto_instance(inst), 432 xctr_name, 0, mask); 433 if (err) 434 goto err_free_inst; 435 xctr_alg = crypto_spawn_skcipher_alg(&ictx->xctr_spawn); 436 437 err = -EINVAL; 438 if (strncmp(xctr_alg->base.cra_name, "xctr(", 5)) 439 goto err_free_inst; 440 len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5, 441 sizeof(blockcipher_name)); 442 if (len < 1) 443 goto err_free_inst; 444 if (blockcipher_name[len - 1] != ')') 445 goto err_free_inst; 446 blockcipher_name[len - 1] = 0; 447 448 /* Block cipher, e.g. "aes" */ 449 err = crypto_grab_cipher(&ictx->blockcipher_spawn, 450 skcipher_crypto_instance(inst), 451 blockcipher_name, 0, mask); 452 if (err) 453 goto err_free_inst; 454 blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn); 455 456 /* Require blocksize of 16 bytes */ 457 err = -EINVAL; 458 if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE) 459 goto err_free_inst; 460 461 /* Polyval ε-∆U hash function */ 462 err = crypto_grab_shash(&ictx->polyval_spawn, 463 skcipher_crypto_instance(inst), 464 polyval_name, 0, mask); 465 if (err) 466 goto err_free_inst; 467 polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn); 468 469 /* Ensure Polyval is being used */ 470 err = -EINVAL; 471 if (strcmp(polyval_alg->base.cra_name, "polyval") != 0) 472 goto err_free_inst; 473 474 /* Instance fields */ 475 476 err = -ENAMETOOLONG; 477 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)", 478 blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 479 goto err_free_inst; 480 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 481 "hctr2_base(%s,%s)", 482 xctr_alg->base.cra_driver_name, 483 polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 484 goto err_free_inst; 485 486 inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE; 487 inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) + 488 polyval_alg->statesize * 2; 489 inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask | 490 polyval_alg->base.cra_alignmask; 491 /* 492 * The hash function is called twice, so it is weighted higher than the 493 * xctr and blockcipher. 494 */ 495 inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority + 496 4 * polyval_alg->base.cra_priority + 497 blockcipher_alg->cra_priority) / 7; 498 499 inst->alg.setkey = hctr2_setkey; 500 inst->alg.encrypt = hctr2_encrypt; 501 inst->alg.decrypt = hctr2_decrypt; 502 inst->alg.init = hctr2_init_tfm; 503 inst->alg.exit = hctr2_exit_tfm; 504 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(xctr_alg); 505 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(xctr_alg); 506 inst->alg.ivsize = TWEAK_SIZE; 507 508 inst->free = hctr2_free_instance; 509 510 err = skcipher_register_instance(tmpl, inst); 511 if (err) { 512 err_free_inst: 513 hctr2_free_instance(inst); 514 } 515 return err; 516 } 517 518 static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb) 519 { 520 const char *xctr_name; 521 const char *polyval_name; 522 523 xctr_name = crypto_attr_alg_name(tb[1]); 524 if (IS_ERR(xctr_name)) 525 return PTR_ERR(xctr_name); 526 527 polyval_name = crypto_attr_alg_name(tb[2]); 528 if (IS_ERR(polyval_name)) 529 return PTR_ERR(polyval_name); 530 531 return hctr2_create_common(tmpl, tb, xctr_name, polyval_name); 532 } 533 534 static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb) 535 { 536 const char *blockcipher_name; 537 char xctr_name[CRYPTO_MAX_ALG_NAME]; 538 539 blockcipher_name = crypto_attr_alg_name(tb[1]); 540 if (IS_ERR(blockcipher_name)) 541 return PTR_ERR(blockcipher_name); 542 543 if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)", 544 blockcipher_name) >= CRYPTO_MAX_ALG_NAME) 545 return -ENAMETOOLONG; 546 547 return hctr2_create_common(tmpl, tb, xctr_name, "polyval"); 548 } 549 550 static struct crypto_template hctr2_tmpls[] = { 551 { 552 /* hctr2_base(xctr_name, polyval_name) */ 553 .name = "hctr2_base", 554 .create = hctr2_create_base, 555 .module = THIS_MODULE, 556 }, { 557 /* hctr2(blockcipher_name) */ 558 .name = "hctr2", 559 .create = hctr2_create, 560 .module = THIS_MODULE, 561 } 562 }; 563 564 static int __init hctr2_module_init(void) 565 { 566 return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls)); 567 } 568 569 static void __exit hctr2_module_exit(void) 570 { 571 return crypto_unregister_templates(hctr2_tmpls, 572 ARRAY_SIZE(hctr2_tmpls)); 573 } 574 575 subsys_initcall(hctr2_module_init); 576 module_exit(hctr2_module_exit); 577 578 MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode"); 579 MODULE_LICENSE("GPL v2"); 580 MODULE_ALIAS_CRYPTO("hctr2"); 581 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 582