1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * AEAD wrapper for Kerberos 5 RFC3961 simplified profile. 4 * 5 * Copyright (C) 2025 Red Hat, Inc. All Rights Reserved. 6 * Written by David Howells (dhowells@redhat.com) 7 * 8 * Derived from authenc: 9 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au> 10 */ 11 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/hash.h> 14 #include <crypto/internal/skcipher.h> 15 #include <crypto/authenc.h> 16 #include <crypto/scatterwalk.h> 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/rtnetlink.h> 22 #include <linux/slab.h> 23 #include <linux/spinlock.h> 24 25 struct krb5enc_instance_ctx { 26 struct crypto_ahash_spawn auth; 27 struct crypto_skcipher_spawn enc; 28 unsigned int reqoff; 29 }; 30 31 struct krb5enc_ctx { 32 struct crypto_ahash *auth; 33 struct crypto_skcipher *enc; 34 }; 35 36 struct krb5enc_request_ctx { 37 struct scatterlist src[2]; 38 struct scatterlist dst[2]; 39 char tail[]; 40 }; 41 42 static void krb5enc_request_complete(struct aead_request *req, int err) 43 { 44 if (err != -EINPROGRESS) 45 aead_request_complete(req, err); 46 } 47 48 /** 49 * crypto_krb5enc_extractkeys - Extract Ke and Ki keys from the key blob. 50 * @keys: Where to put the key sizes and pointers 51 * @key: Encoded key material 52 * @keylen: Amount of key material 53 * 54 * Decode the key blob we're given. It starts with an rtattr that indicates 55 * the format and the length. Format CRYPTO_AUTHENC_KEYA_PARAM is: 56 * 57 * rtattr || __be32 enckeylen || authkey || enckey 58 * 59 * Note that the rtattr is in cpu-endian form, unlike enckeylen. This must be 60 * handled correctly in static testmgr data. 61 */ 62 int crypto_krb5enc_extractkeys(struct crypto_authenc_keys *keys, const u8 *key, 63 unsigned int keylen) 64 { 65 struct rtattr *rta = (struct rtattr *)key; 66 struct crypto_authenc_key_param *param; 67 68 if (!RTA_OK(rta, keylen)) 69 return -EINVAL; 70 if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) 71 return -EINVAL; 72 73 /* 74 * RTA_OK() didn't align the rtattr's payload when validating that it 75 * fits in the buffer. Yet, the keys should start on the next 4-byte 76 * aligned boundary. To avoid confusion, require that the rtattr 77 * payload be exactly the param struct, which has a 4-byte aligned size. 78 */ 79 if (RTA_PAYLOAD(rta) != sizeof(*param)) 80 return -EINVAL; 81 BUILD_BUG_ON(sizeof(*param) % RTA_ALIGNTO); 82 83 param = RTA_DATA(rta); 84 keys->enckeylen = be32_to_cpu(param->enckeylen); 85 86 key += rta->rta_len; 87 keylen -= rta->rta_len; 88 89 if (keylen < keys->enckeylen) 90 return -EINVAL; 91 92 keys->authkeylen = keylen - keys->enckeylen; 93 keys->authkey = key; 94 keys->enckey = key + keys->authkeylen; 95 return 0; 96 } 97 EXPORT_SYMBOL(crypto_krb5enc_extractkeys); 98 99 static int krb5enc_setkey(struct crypto_aead *krb5enc, const u8 *key, 100 unsigned int keylen) 101 { 102 struct crypto_authenc_keys keys; 103 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 104 struct crypto_skcipher *enc = ctx->enc; 105 struct crypto_ahash *auth = ctx->auth; 106 unsigned int flags = crypto_aead_get_flags(krb5enc); 107 int err = -EINVAL; 108 109 if (crypto_krb5enc_extractkeys(&keys, key, keylen) != 0) 110 goto out; 111 112 crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); 113 crypto_ahash_set_flags(auth, flags & CRYPTO_TFM_REQ_MASK); 114 err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen); 115 if (err) 116 goto out; 117 118 crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); 119 crypto_skcipher_set_flags(enc, flags & CRYPTO_TFM_REQ_MASK); 120 err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen); 121 out: 122 memzero_explicit(&keys, sizeof(keys)); 123 return err; 124 } 125 126 static void krb5enc_encrypt_done(void *data, int err) 127 { 128 struct aead_request *req = data; 129 130 krb5enc_request_complete(req, err); 131 } 132 133 /* 134 * Start the encryption of the plaintext. We skip over the associated data as 135 * that only gets included in the hash. 136 */ 137 static int krb5enc_dispatch_encrypt(struct aead_request *req, 138 unsigned int flags) 139 { 140 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 141 struct aead_instance *inst = aead_alg_instance(krb5enc); 142 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 143 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 144 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 145 struct crypto_skcipher *enc = ctx->enc; 146 struct skcipher_request *skreq = (void *)(areq_ctx->tail + 147 ictx->reqoff); 148 struct scatterlist *src, *dst; 149 150 src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); 151 if (req->src == req->dst) 152 dst = src; 153 else 154 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); 155 156 skcipher_request_set_tfm(skreq, enc); 157 skcipher_request_set_callback(skreq, flags, 158 krb5enc_encrypt_done, req); 159 skcipher_request_set_crypt(skreq, src, dst, req->cryptlen, req->iv); 160 161 return crypto_skcipher_encrypt(skreq); 162 } 163 164 /* 165 * Insert the hash into the checksum field in the destination buffer directly 166 * after the encrypted region. 167 */ 168 static void krb5enc_insert_checksum(struct aead_request *req, u8 *hash) 169 { 170 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 171 172 scatterwalk_map_and_copy(hash, req->dst, 173 req->assoclen + req->cryptlen, 174 crypto_aead_authsize(krb5enc), 1); 175 } 176 177 /* 178 * Upon completion of an asynchronous digest, transfer the hash to the checksum 179 * field. 180 */ 181 static void krb5enc_encrypt_ahash_done(void *data, int err) 182 { 183 struct aead_request *req = data; 184 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 185 struct aead_instance *inst = aead_alg_instance(krb5enc); 186 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 187 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 188 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 189 190 if (err) 191 return krb5enc_request_complete(req, err); 192 193 krb5enc_insert_checksum(req, ahreq->result); 194 195 err = krb5enc_dispatch_encrypt(req, 196 aead_request_flags(req) & ~CRYPTO_TFM_REQ_MAY_SLEEP); 197 if (err != -EINPROGRESS) 198 aead_request_complete(req, err); 199 } 200 201 /* 202 * Start the digest of the plaintext for encryption. In theory, this could be 203 * run in parallel with the encryption, provided the src and dst buffers don't 204 * overlap. 205 */ 206 static int krb5enc_dispatch_encrypt_hash(struct aead_request *req) 207 { 208 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 209 struct aead_instance *inst = aead_alg_instance(krb5enc); 210 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 211 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 212 struct crypto_ahash *auth = ctx->auth; 213 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 214 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 215 u8 *hash = areq_ctx->tail; 216 int err; 217 218 ahash_request_set_callback(ahreq, aead_request_flags(req), 219 krb5enc_encrypt_ahash_done, req); 220 ahash_request_set_tfm(ahreq, auth); 221 ahash_request_set_crypt(ahreq, req->src, hash, req->assoclen + req->cryptlen); 222 223 err = crypto_ahash_digest(ahreq); 224 if (err) 225 return err; 226 227 krb5enc_insert_checksum(req, hash); 228 return 0; 229 } 230 231 /* 232 * Process an encryption operation. We can perform the cipher and the hash in 233 * parallel, provided the src and dst buffers are separate. 234 */ 235 static int krb5enc_encrypt(struct aead_request *req) 236 { 237 int err; 238 239 err = krb5enc_dispatch_encrypt_hash(req); 240 if (err < 0) 241 return err; 242 243 return krb5enc_dispatch_encrypt(req, aead_request_flags(req)); 244 } 245 246 static int krb5enc_verify_hash(struct aead_request *req) 247 { 248 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 249 struct aead_instance *inst = aead_alg_instance(krb5enc); 250 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 251 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 252 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 253 unsigned int authsize = crypto_aead_authsize(krb5enc); 254 u8 *calc_hash = areq_ctx->tail; 255 u8 *msg_hash = areq_ctx->tail + authsize; 256 257 scatterwalk_map_and_copy(msg_hash, req->src, ahreq->nbytes, authsize, 0); 258 259 if (crypto_memneq(msg_hash, calc_hash, authsize)) 260 return -EBADMSG; 261 return 0; 262 } 263 264 static void krb5enc_decrypt_hash_done(void *data, int err) 265 { 266 struct aead_request *req = data; 267 268 if (err) 269 return krb5enc_request_complete(req, err); 270 271 err = krb5enc_verify_hash(req); 272 krb5enc_request_complete(req, err); 273 } 274 275 /* 276 * Dispatch the hashing of the plaintext after we've done the decryption. 277 */ 278 static int krb5enc_dispatch_decrypt_hash(struct aead_request *req) 279 { 280 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 281 struct aead_instance *inst = aead_alg_instance(krb5enc); 282 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 283 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 284 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 285 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); 286 struct crypto_ahash *auth = ctx->auth; 287 unsigned int authsize = crypto_aead_authsize(krb5enc); 288 u8 *hash = areq_ctx->tail; 289 int err; 290 291 ahash_request_set_tfm(ahreq, auth); 292 ahash_request_set_crypt(ahreq, req->dst, hash, 293 req->assoclen + req->cryptlen - authsize); 294 ahash_request_set_callback(ahreq, aead_request_flags(req), 295 krb5enc_decrypt_hash_done, req); 296 297 err = crypto_ahash_digest(ahreq); 298 if (err < 0) 299 return err; 300 301 return krb5enc_verify_hash(req); 302 } 303 304 /* 305 * Dispatch the decryption of the ciphertext. 306 */ 307 static int krb5enc_dispatch_decrypt(struct aead_request *req) 308 { 309 struct crypto_aead *krb5enc = crypto_aead_reqtfm(req); 310 struct aead_instance *inst = aead_alg_instance(krb5enc); 311 struct krb5enc_ctx *ctx = crypto_aead_ctx(krb5enc); 312 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 313 struct krb5enc_request_ctx *areq_ctx = aead_request_ctx(req); 314 struct skcipher_request *skreq = (void *)(areq_ctx->tail + 315 ictx->reqoff); 316 unsigned int authsize = crypto_aead_authsize(krb5enc); 317 struct scatterlist *src, *dst; 318 319 src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); 320 dst = src; 321 322 if (req->src != req->dst) 323 dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); 324 325 skcipher_request_set_tfm(skreq, ctx->enc); 326 skcipher_request_set_callback(skreq, aead_request_flags(req), 327 req->base.complete, req->base.data); 328 skcipher_request_set_crypt(skreq, src, dst, 329 req->cryptlen - authsize, req->iv); 330 331 return crypto_skcipher_decrypt(skreq); 332 } 333 334 static int krb5enc_decrypt(struct aead_request *req) 335 { 336 int err; 337 338 err = krb5enc_dispatch_decrypt(req); 339 if (err < 0) 340 return err; 341 342 return krb5enc_dispatch_decrypt_hash(req); 343 } 344 345 static int krb5enc_init_tfm(struct crypto_aead *tfm) 346 { 347 struct aead_instance *inst = aead_alg_instance(tfm); 348 struct krb5enc_instance_ctx *ictx = aead_instance_ctx(inst); 349 struct krb5enc_ctx *ctx = crypto_aead_ctx(tfm); 350 struct crypto_ahash *auth; 351 struct crypto_skcipher *enc; 352 int err; 353 354 auth = crypto_spawn_ahash(&ictx->auth); 355 if (IS_ERR(auth)) 356 return PTR_ERR(auth); 357 358 enc = crypto_spawn_skcipher(&ictx->enc); 359 err = PTR_ERR(enc); 360 if (IS_ERR(enc)) 361 goto err_free_ahash; 362 363 ctx->auth = auth; 364 ctx->enc = enc; 365 366 crypto_aead_set_reqsize( 367 tfm, 368 sizeof(struct krb5enc_request_ctx) + 369 ictx->reqoff + /* Space for two checksums */ 370 umax(sizeof(struct ahash_request) + crypto_ahash_reqsize(auth), 371 sizeof(struct skcipher_request) + crypto_skcipher_reqsize(enc))); 372 373 return 0; 374 375 err_free_ahash: 376 crypto_free_ahash(auth); 377 return err; 378 } 379 380 static void krb5enc_exit_tfm(struct crypto_aead *tfm) 381 { 382 struct krb5enc_ctx *ctx = crypto_aead_ctx(tfm); 383 384 crypto_free_ahash(ctx->auth); 385 crypto_free_skcipher(ctx->enc); 386 } 387 388 static void krb5enc_free(struct aead_instance *inst) 389 { 390 struct krb5enc_instance_ctx *ctx = aead_instance_ctx(inst); 391 392 crypto_drop_skcipher(&ctx->enc); 393 crypto_drop_ahash(&ctx->auth); 394 kfree(inst); 395 } 396 397 /* 398 * Create an instance of a template for a specific hash and cipher pair. 399 */ 400 static int krb5enc_create(struct crypto_template *tmpl, struct rtattr **tb) 401 { 402 struct krb5enc_instance_ctx *ictx; 403 struct skcipher_alg_common *enc; 404 struct hash_alg_common *auth; 405 struct aead_instance *inst; 406 struct crypto_alg *auth_base; 407 u32 mask; 408 int err; 409 410 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 411 if (err) { 412 pr_err("attr_type failed\n"); 413 return err; 414 } 415 416 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 417 if (!inst) 418 return -ENOMEM; 419 ictx = aead_instance_ctx(inst); 420 421 err = crypto_grab_ahash(&ictx->auth, aead_crypto_instance(inst), 422 crypto_attr_alg_name(tb[1]), 0, mask); 423 if (err) { 424 pr_err("grab ahash failed\n"); 425 goto err_free_inst; 426 } 427 auth = crypto_spawn_ahash_alg(&ictx->auth); 428 auth_base = &auth->base; 429 430 err = crypto_grab_skcipher(&ictx->enc, aead_crypto_instance(inst), 431 crypto_attr_alg_name(tb[2]), 0, mask); 432 if (err) { 433 pr_err("grab skcipher failed\n"); 434 goto err_free_inst; 435 } 436 enc = crypto_spawn_skcipher_alg_common(&ictx->enc); 437 438 ictx->reqoff = 2 * auth->digestsize; 439 440 err = -ENAMETOOLONG; 441 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 442 "krb5enc(%s,%s)", auth_base->cra_name, 443 enc->base.cra_name) >= 444 CRYPTO_MAX_ALG_NAME) 445 goto err_free_inst; 446 447 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 448 "krb5enc(%s,%s)", auth_base->cra_driver_name, 449 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 450 goto err_free_inst; 451 452 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + 453 auth_base->cra_priority; 454 inst->alg.base.cra_blocksize = enc->base.cra_blocksize; 455 inst->alg.base.cra_alignmask = enc->base.cra_alignmask; 456 inst->alg.base.cra_ctxsize = sizeof(struct krb5enc_ctx); 457 458 inst->alg.ivsize = enc->ivsize; 459 inst->alg.chunksize = enc->chunksize; 460 inst->alg.maxauthsize = auth->digestsize; 461 462 inst->alg.init = krb5enc_init_tfm; 463 inst->alg.exit = krb5enc_exit_tfm; 464 465 inst->alg.setkey = krb5enc_setkey; 466 inst->alg.encrypt = krb5enc_encrypt; 467 inst->alg.decrypt = krb5enc_decrypt; 468 469 inst->free = krb5enc_free; 470 471 err = aead_register_instance(tmpl, inst); 472 if (err) { 473 pr_err("ref failed\n"); 474 goto err_free_inst; 475 } 476 477 return 0; 478 479 err_free_inst: 480 krb5enc_free(inst); 481 return err; 482 } 483 484 static struct crypto_template crypto_krb5enc_tmpl = { 485 .name = "krb5enc", 486 .create = krb5enc_create, 487 .module = THIS_MODULE, 488 }; 489 490 static int __init crypto_krb5enc_module_init(void) 491 { 492 return crypto_register_template(&crypto_krb5enc_tmpl); 493 } 494 495 static void __exit crypto_krb5enc_module_exit(void) 496 { 497 crypto_unregister_template(&crypto_krb5enc_tmpl); 498 } 499 500 module_init(crypto_krb5enc_module_init); 501 module_exit(crypto_krb5enc_module_exit); 502 503 MODULE_LICENSE("GPL"); 504 MODULE_DESCRIPTION("Simple AEAD wrapper for Kerberos 5 RFC3961"); 505 MODULE_ALIAS_CRYPTO("krb5enc"); 506