1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * AEAD wrapper for Kerberos 5 RFC3961 simplified profile. 4 * 5 * Copyright (C) 2025 Red Hat, Inc. All Rights Reserved. 6 * Written by David Howells (dhowells@redhat.com) 7 * 8 * Derived from authenc: 9 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au> 10 */ 11 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/hash.h> 14 #include <crypto/internal/skcipher.h> 15 #include <crypto/authenc.h> 16 #include <crypto/scatterwalk.h> 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/rtnetlink.h> 22 #include <linux/slab.h> 23 #include <linux/spinlock.h> 24 25 struct krb5enc_instance_ctx { 26 struct crypto_ahash_spawn auth; 27 struct crypto_skcipher_spawn enc; 28 unsigned int reqoff; 29 }; 30 31 struct krb5enc_ctx { 32 struct crypto_ahash *auth; 33 struct crypto_skcipher *enc; 34 }; 35 36 struct krb5enc_request_ctx { 37 struct scatterlist src[2]; 38 struct scatterlist dst[2]; 39 char tail[]; 40 }; 41 42 /** 43 * crypto_krb5enc_extractkeys - Extract Ke and Ki keys from the key blob. 44 * @keys: Where to put the key sizes and pointers 45 * @key: Encoded key material 46 * @keylen: Amount of key material 47 * 48 * Decode the key blob we're given. It starts with an rtattr that indicates 49 * the format and the length. Format CRYPTO_AUTHENC_KEYA_PARAM is: 50 * 51 * rtattr || __be32 enckeylen || authkey || enckey 52 * 53 * Note that the rtattr is in cpu-endian form, unlike enckeylen. This must be 54 * handled correctly in static testmgr data. 55 */ 56 int crypto_krb5enc_extractkeys(struct crypto_authenc_keys *keys, const u8 *key, 57 unsigned int keylen) 58 { 59 struct rtattr *rta = (struct rtattr *)key; 60 struct crypto_authenc_key_param *param; 61 62 if (!RTA_OK(rta, keylen)) 63 return -EINVAL; 64 if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) 65 return -EINVAL; 66 67 /* 68 * RTA_OK() didn't align the rtattr's payload when validating that it 69 * fits in the buffer. Yet, the keys should start on the next 4-byte 70 * aligned boundary. To avoid confusion, require that the rtattr 71 * payload be exactly the param struct, which has a 4-byte aligned size. 72 */ 73 if (RTA_PAYLOAD(rta) != sizeof(*param)) 74 return -EINVAL; 75 BUILD_BUG_ON(sizeof(*param) % RTA_ALIGNTO); 76 77 param = RTA_DATA(rta); 78 keys->enckeylen = be32_to_cpu(param->enckeylen); 79 80 key += rta->rta_len; 81 keylen -= rta->rta_len; 82 83 if (keylen < keys->enckeylen) 84 return -EINVAL; 85 86 keys->authkeylen = keylen - keys->enckeylen; 87 keys->authkey = key; 88 keys->enckey = key + keys->authkeylen; 89 return 0; 90 } 91 EXPORT_SYMBOL(crypto_krb5enc_extractkeys); 92 93 static int krb5enc_setkey(struct crypto_aead *krb5enc, const u8 *key, 94 unsigned int keylen) 95 { 96 struct crypto_authenc_keys keys; 97 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 98 struct crypto_skcipher *enc = ctx->enc; 99 struct crypto_ahash *auth = ctx->auth; 100 unsigned int flags = crypto_aead_get_flags(krb5enc); 101 int err = -EINVAL; 102 103 if (crypto_krb5enc_extractkeys(&keys, key, keylen) != 0) 104 goto out; 105 106 crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); 107 crypto_ahash_set_flags(auth, flags & CRYPTO_TFM_REQ_MASK); 108 err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen); 109 if (err) 110 goto out; 111 112 crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); 113 crypto_skcipher_set_flags(enc, flags & CRYPTO_TFM_REQ_MASK); 114 err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen); 115 out: 116 memzero_explicit(&keys, sizeof(keys)); 117 return err; 118 } 119 120 static void krb5enc_encrypt_done(void *data, int err) 121 { 122 struct aead_request *req = data; 123 124 aead_request_complete(req, err); 125 } 126 127 /* 128 * Start the encryption of the plaintext. We skip over the associated data as 129 * that only gets included in the hash. 130 */ 131 static int krb5enc_dispatch_encrypt(struct aead_request *req, 132 unsigned int flags) 133 { 134 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 135 struct aead_instance *inst = aead_alg_instance(krb5enc); 136 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 137 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 138 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 139 struct crypto_skcipher *enc = ctx->enc; 140 struct skcipher_request *skreq = (void *)(areq_ctx->tail + 141 ictx->reqoff); 142 struct scatterlist *src, *dst; 143 144 src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); 145 if (req->src == req->dst) 146 dst = src; 147 else 148 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); 149 150 skcipher_request_set_tfm(skreq, enc); 151 skcipher_request_set_callback(skreq, flags, 152 krb5enc_encrypt_done, req); 153 skcipher_request_set_crypt(skreq, src, dst, req->cryptlen, req->iv); 154 155 return crypto_skcipher_encrypt(skreq); 156 } 157 158 /* 159 * Insert the hash into the checksum field in the destination buffer directly 160 * after the encrypted region. 161 */ 162 static void krb5enc_insert_checksum(struct aead_request *req, u8 *hash) 163 { 164 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 165 166 scatterwalk_map_and_copy(hash, req->dst, 167 req->assoclen + req->cryptlen, 168 crypto_aead_authsize(krb5enc), 1); 169 } 170 171 /* 172 * Upon completion of an asynchronous digest, transfer the hash to the checksum 173 * field. 174 */ 175 static void krb5enc_encrypt_ahash_done(void *data, int err) 176 { 177 struct aead_request *req = data; 178 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 179 struct aead_instance *inst = aead_alg_instance(krb5enc); 180 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 181 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 182 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 183 184 if (err) 185 goto out; 186 187 krb5enc_insert_checksum(req, ahreq->result); 188 189 err = krb5enc_dispatch_encrypt(req, 0); 190 if (err == -EINPROGRESS) 191 return; 192 193 out: 194 aead_request_complete(req, err); 195 } 196 197 /* 198 * Start the digest of the plaintext for encryption. In theory, this could be 199 * run in parallel with the encryption, provided the src and dst buffers don't 200 * overlap. 201 */ 202 static int krb5enc_dispatch_encrypt_hash(struct aead_request *req) 203 { 204 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 205 struct aead_instance *inst = aead_alg_instance(krb5enc); 206 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 207 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 208 struct crypto_ahash *auth = ctx->auth; 209 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 210 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 211 u8 *hash = areq_ctx->tail; 212 int err; 213 214 ahash_request_set_callback(ahreq, aead_request_flags(req), 215 krb5enc_encrypt_ahash_done, req); 216 ahash_request_set_tfm(ahreq, auth); 217 ahash_request_set_crypt(ahreq, req->src, hash, req->assoclen + req->cryptlen); 218 219 err = crypto_ahash_digest(ahreq); 220 if (err) 221 return err; 222 223 krb5enc_insert_checksum(req, hash); 224 return 0; 225 } 226 227 /* 228 * Process an encryption operation. We can perform the cipher and the hash in 229 * parallel, provided the src and dst buffers are separate. 230 */ 231 static int krb5enc_encrypt(struct aead_request *req) 232 { 233 int err; 234 235 err = krb5enc_dispatch_encrypt_hash(req); 236 if (err < 0) 237 return err; 238 239 return krb5enc_dispatch_encrypt(req, aead_request_flags(req)); 240 } 241 242 static int krb5enc_verify_hash(struct aead_request *req) 243 { 244 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 245 struct aead_instance *inst = aead_alg_instance(krb5enc); 246 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 247 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 248 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 249 unsigned int authsize = crypto_aead_authsize(krb5enc); 250 u8 *calc_hash = areq_ctx->tail; 251 u8 *msg_hash = areq_ctx->tail + authsize; 252 253 scatterwalk_map_and_copy(msg_hash, req->src, ahreq->nbytes, authsize, 0); 254 255 if (crypto_memneq(msg_hash, calc_hash, authsize)) 256 return -EBADMSG; 257 return 0; 258 } 259 260 static void krb5enc_decrypt_hash_done(void *data, int err) 261 { 262 struct aead_request *req = data; 263 264 if (!err) 265 err = krb5enc_verify_hash(req); 266 aead_request_complete(req, err); 267 } 268 269 /* 270 * Dispatch the hashing of the plaintext after we've done the decryption. 271 */ 272 static int krb5enc_dispatch_decrypt_hash(struct aead_request *req, 273 unsigned int flags) 274 { 275 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 276 struct aead_instance *inst = aead_alg_instance(krb5enc); 277 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 278 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 279 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 280 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 281 struct crypto_ahash *auth = ctx->auth; 282 unsigned int authsize = crypto_aead_authsize(krb5enc); 283 u8 *hash = areq_ctx->tail; 284 int err; 285 286 ahash_request_set_tfm(ahreq, auth); 287 ahash_request_set_crypt(ahreq, req->dst, hash, 288 req->assoclen + req->cryptlen - authsize); 289 ahash_request_set_callback(ahreq, flags, 290 krb5enc_decrypt_hash_done, req); 291 292 err = crypto_ahash_digest(ahreq); 293 if (err < 0) 294 return err; 295 296 return krb5enc_verify_hash(req); 297 } 298 299 static void krb5enc_decrypt_done(void *data, int err) 300 { 301 struct aead_request *req = data; 302 303 if (err) 304 goto out; 305 306 err = krb5enc_dispatch_decrypt_hash(req, 0); 307 if (err == -EINPROGRESS) 308 return; 309 310 out: 311 aead_request_complete(req, err); 312 } 313 314 /* 315 * Dispatch the decryption of the ciphertext. 316 */ 317 static int krb5enc_dispatch_decrypt(struct aead_request *req) 318 { 319 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 320 struct aead_instance *inst = aead_alg_instance(krb5enc); 321 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 322 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 323 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 324 struct skcipher_request *skreq = (void *)(areq_ctx->tail + 325 ictx->reqoff); 326 unsigned int authsize = crypto_aead_authsize(krb5enc); 327 struct scatterlist *src, *dst; 328 329 src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); 330 dst = src; 331 332 if (req->src != req->dst) 333 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); 334 335 skcipher_request_set_tfm(skreq, ctx->enc); 336 skcipher_request_set_callback(skreq, aead_request_flags(req), 337 krb5enc_decrypt_done, req); 338 skcipher_request_set_crypt(skreq, src, dst, 339 req->cryptlen - authsize, req->iv); 340 341 return crypto_skcipher_decrypt(skreq); 342 } 343 344 static int krb5enc_decrypt(struct aead_request *req) 345 { 346 int err; 347 348 err = krb5enc_dispatch_decrypt(req); 349 if (err < 0) 350 return err; 351 352 return krb5enc_dispatch_decrypt_hash(req, aead_request_flags(req)); 353 } 354 355 static int krb5enc_init_tfm(struct crypto_aead *tfm) 356 { 357 struct aead_instance *inst = aead_alg_instance(tfm); 358 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 359 struct krb5enc_ctx *ctx = crypto_aead_ctx(tfm); 360 struct crypto_ahash *auth; 361 struct crypto_skcipher *enc; 362 int err; 363 364 auth = crypto_spawn_ahash(&ictx->auth); 365 if (IS_ERR(auth)) 366 return PTR_ERR(auth); 367 368 enc = crypto_spawn_skcipher(&ictx->enc); 369 err = PTR_ERR(enc); 370 if (IS_ERR(enc)) 371 goto err_free_ahash; 372 373 ctx->auth = auth; 374 ctx->enc = enc; 375 376 crypto_aead_set_reqsize( 377 tfm, 378 sizeof(struct krb5enc_request_ctx) + 379 ictx->reqoff + /* Space for two checksums */ 380 umax(sizeof(struct ahash_request) + crypto_ahash_reqsize(auth), 381 sizeof(struct skcipher_request) + crypto_skcipher_reqsize(enc))); 382 383 return 0; 384 385 err_free_ahash: 386 crypto_free_ahash(auth); 387 return err; 388 } 389 390 static void krb5enc_exit_tfm(struct crypto_aead *tfm) 391 { 392 struct krb5enc_ctx *ctx = crypto_aead_ctx(tfm); 393 394 crypto_free_ahash(ctx->auth); 395 crypto_free_skcipher(ctx->enc); 396 } 397 398 static void krb5enc_free(struct aead_instance *inst) 399 { 400 struct krb5enc_instance_ctx *ctx = aead_instance_ctx(inst); 401 402 crypto_drop_skcipher(&ctx->enc); 403 crypto_drop_ahash(&ctx->auth); 404 kfree(inst); 405 } 406 407 /* 408 * Create an instance of a template for a specific hash and cipher pair. 409 */ 410 static int krb5enc_create(struct crypto_template *tmpl, struct rtattr **tb) 411 { 412 struct krb5enc_instance_ctx *ictx; 413 struct skcipher_alg_common *enc; 414 struct hash_alg_common *auth; 415 struct aead_instance *inst; 416 struct crypto_alg *auth_base; 417 u32 mask; 418 int err; 419 420 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 421 if (err) { 422 pr_err("attr_type failed\n"); 423 return err; 424 } 425 426 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 427 if (!inst) 428 return -ENOMEM; 429 ictx = aead_instance_ctx(inst); 430 431 err = crypto_grab_ahash(&ictx->auth, aead_crypto_instance(inst), 432 crypto_attr_alg_name(tb[1]), 0, mask); 433 if (err) { 434 pr_err("grab ahash failed\n"); 435 goto err_free_inst; 436 } 437 auth = crypto_spawn_ahash_alg(&ictx->auth); 438 auth_base = &auth->base; 439 440 err = crypto_grab_skcipher(&ictx->enc, aead_crypto_instance(inst), 441 crypto_attr_alg_name(tb[2]), 0, mask); 442 if (err) { 443 pr_err("grab skcipher failed\n"); 444 goto err_free_inst; 445 } 446 enc = crypto_spawn_skcipher_alg_common(&ictx->enc); 447 448 ictx->reqoff = 2 * auth->digestsize; 449 450 err = -ENAMETOOLONG; 451 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 452 "krb5enc(%s,%s)", auth_base->cra_name, 453 enc->base.cra_name) >= 454 CRYPTO_MAX_ALG_NAME) 455 goto err_free_inst; 456 457 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 458 "krb5enc(%s,%s)", auth_base->cra_driver_name, 459 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 460 goto err_free_inst; 461 462 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + 463 auth_base->cra_priority; 464 inst->alg.base.cra_blocksize = enc->base.cra_blocksize; 465 inst->alg.base.cra_alignmask = enc->base.cra_alignmask; 466 inst->alg.base.cra_ctxsize = sizeof(struct krb5enc_ctx); 467 468 inst->alg.ivsize = enc->ivsize; 469 inst->alg.chunksize = enc->chunksize; 470 inst->alg.maxauthsize = auth->digestsize; 471 472 inst->alg.init = krb5enc_init_tfm; 473 inst->alg.exit = krb5enc_exit_tfm; 474 475 inst->alg.setkey = krb5enc_setkey; 476 inst->alg.encrypt = krb5enc_encrypt; 477 inst->alg.decrypt = krb5enc_decrypt; 478 479 inst->free = krb5enc_free; 480 481 err = aead_register_instance(tmpl, inst); 482 if (err) { 483 pr_err("ref failed\n"); 484 goto err_free_inst; 485 } 486 487 return 0; 488 489 err_free_inst: 490 krb5enc_free(inst); 491 return err; 492 } 493 494 static struct crypto_template crypto_krb5enc_tmpl = { 495 .name = "krb5enc", 496 .create = krb5enc_create, 497 .module = THIS_MODULE, 498 }; 499 500 static int __init crypto_krb5enc_module_init(void) 501 { 502 return crypto_register_template(&crypto_krb5enc_tmpl); 503 } 504 505 static void __exit crypto_krb5enc_module_exit(void) 506 { 507 crypto_unregister_template(&crypto_krb5enc_tmpl); 508 } 509 510 module_init(crypto_krb5enc_module_init); 511 module_exit(crypto_krb5enc_module_exit); 512 513 MODULE_LICENSE("GPL"); 514 MODULE_DESCRIPTION("Simple AEAD wrapper for Kerberos 5 RFC3961"); 515 MODULE_ALIAS_CRYPTO("krb5enc"); 516