1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Symmetric key cipher operations. 4 * 5 * Generic encrypt/decrypt wrapper for ciphers, handles operations across 6 * multiple page boundaries by using temporary blocks. In user context, 7 * the kernel is given a chance to schedule us once per page. 8 * 9 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au> 10 */ 11 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/cipher.h> 14 #include <crypto/internal/skcipher.h> 15 #include <crypto/scatterwalk.h> 16 #include <linux/bug.h> 17 #include <linux/cryptouser.h> 18 #include <linux/err.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/seq_file.h> 22 #include <linux/slab.h> 23 #include <linux/string.h> 24 #include <linux/string_choices.h> 25 #include <net/netlink.h> 26 #include "skcipher.h" 27 28 #define CRYPTO_ALG_TYPE_SKCIPHER_MASK 0x0000000e 29 30 static const struct crypto_type crypto_skcipher_type; 31 32 static inline struct skcipher_alg *__crypto_skcipher_alg( 33 struct crypto_alg *alg) 34 { 35 return container_of(alg, struct skcipher_alg, base); 36 } 37 38 int skcipher_walk_virt(struct skcipher_walk *__restrict walk, 39 struct skcipher_request *__restrict req, bool atomic) 40 { 41 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 42 struct skcipher_alg *alg; 43 44 might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP); 45 46 alg = crypto_skcipher_alg(tfm); 47 48 walk->total = req->cryptlen; 49 walk->nbytes = 0; 50 walk->iv = req->iv; 51 walk->oiv = req->iv; 52 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP)) 53 atomic = true; 54 55 if (unlikely(!walk->total)) 56 return 0; 57 58 scatterwalk_start(&walk->in, req->src); 59 scatterwalk_start(&walk->out, req->dst); 60 61 walk->blocksize = crypto_skcipher_blocksize(tfm); 62 walk->ivsize = crypto_skcipher_ivsize(tfm); 63 walk->alignmask = crypto_skcipher_alignmask(tfm); 64 65 if (alg->co.base.cra_type != &crypto_skcipher_type) 66 walk->stride = alg->co.chunksize; 67 else 68 walk->stride = alg->walksize; 69 70 return skcipher_walk_first(walk, atomic); 71 } 72 EXPORT_SYMBOL_GPL(skcipher_walk_virt); 73 74 static int skcipher_walk_aead_common(struct skcipher_walk *__restrict walk, 75 struct aead_request *__restrict req, 76 bool atomic) 77 { 78 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 79 80 walk->nbytes = 0; 81 walk->iv = req->iv; 82 walk->oiv = req->iv; 83 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP)) 84 atomic = true; 85 86 if (unlikely(!walk->total)) 87 return 0; 88 89 scatterwalk_start_at_pos(&walk->in, req->src, req->assoclen); 90 scatterwalk_start_at_pos(&walk->out, req->dst, req->assoclen); 91 92 walk->blocksize = crypto_aead_blocksize(tfm); 93 walk->stride = crypto_aead_chunksize(tfm); 94 walk->ivsize = crypto_aead_ivsize(tfm); 95 walk->alignmask = crypto_aead_alignmask(tfm); 96 97 return skcipher_walk_first(walk, atomic); 98 } 99 100 int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk, 101 struct aead_request *__restrict req, 102 bool atomic) 103 { 104 walk->total = req->cryptlen; 105 106 return skcipher_walk_aead_common(walk, req, atomic); 107 } 108 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt); 109 110 int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk, 111 struct aead_request *__restrict req, 112 bool atomic) 113 { 114 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 115 116 walk->total = req->cryptlen - crypto_aead_authsize(tfm); 117 118 return skcipher_walk_aead_common(walk, req, atomic); 119 } 120 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt); 121 122 static void skcipher_set_needkey(struct crypto_skcipher *tfm) 123 { 124 if (crypto_skcipher_max_keysize(tfm) != 0) 125 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 126 } 127 128 static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm, 129 const u8 *key, unsigned int keylen) 130 { 131 unsigned long alignmask = crypto_skcipher_alignmask(tfm); 132 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); 133 u8 *buffer, *alignbuffer; 134 unsigned long absize; 135 int ret; 136 137 absize = keylen + alignmask; 138 buffer = kmalloc(absize, GFP_ATOMIC); 139 if (!buffer) 140 return -ENOMEM; 141 142 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 143 memcpy(alignbuffer, key, keylen); 144 ret = cipher->setkey(tfm, alignbuffer, keylen); 145 kfree_sensitive(buffer); 146 return ret; 147 } 148 149 int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, 150 unsigned int keylen) 151 { 152 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); 153 unsigned long alignmask = crypto_skcipher_alignmask(tfm); 154 int err; 155 156 if (cipher->co.base.cra_type != &crypto_skcipher_type) { 157 struct crypto_lskcipher **ctx = crypto_skcipher_ctx(tfm); 158 159 crypto_lskcipher_clear_flags(*ctx, CRYPTO_TFM_REQ_MASK); 160 crypto_lskcipher_set_flags(*ctx, 161 crypto_skcipher_get_flags(tfm) & 162 CRYPTO_TFM_REQ_MASK); 163 err = crypto_lskcipher_setkey(*ctx, key, keylen); 164 goto out; 165 } 166 167 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) 168 return -EINVAL; 169 170 if ((unsigned long)key & alignmask) 171 err = skcipher_setkey_unaligned(tfm, key, keylen); 172 else 173 err = cipher->setkey(tfm, key, keylen); 174 175 out: 176 if (unlikely(err)) { 177 skcipher_set_needkey(tfm); 178 return err; 179 } 180 181 crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 182 return 0; 183 } 184 EXPORT_SYMBOL_GPL(crypto_skcipher_setkey); 185 186 int crypto_skcipher_encrypt(struct skcipher_request *req) 187 { 188 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 189 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 190 191 if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 192 return -ENOKEY; 193 if (alg->co.base.cra_type != &crypto_skcipher_type) 194 return crypto_lskcipher_encrypt_sg(req); 195 return alg->encrypt(req); 196 } 197 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt); 198 199 int crypto_skcipher_decrypt(struct skcipher_request *req) 200 { 201 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 202 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 203 204 if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 205 return -ENOKEY; 206 if (alg->co.base.cra_type != &crypto_skcipher_type) 207 return crypto_lskcipher_decrypt_sg(req); 208 return alg->decrypt(req); 209 } 210 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt); 211 212 static int crypto_lskcipher_export(struct skcipher_request *req, void *out) 213 { 214 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 215 u8 *ivs = skcipher_request_ctx(req); 216 217 ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(tfm) + 1); 218 219 memcpy(out, ivs + crypto_skcipher_ivsize(tfm), 220 crypto_skcipher_statesize(tfm)); 221 222 return 0; 223 } 224 225 static int crypto_lskcipher_import(struct skcipher_request *req, const void *in) 226 { 227 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 228 u8 *ivs = skcipher_request_ctx(req); 229 230 ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(tfm) + 1); 231 232 memcpy(ivs + crypto_skcipher_ivsize(tfm), in, 233 crypto_skcipher_statesize(tfm)); 234 235 return 0; 236 } 237 238 static int skcipher_noexport(struct skcipher_request *req, void *out) 239 { 240 return 0; 241 } 242 243 static int skcipher_noimport(struct skcipher_request *req, const void *in) 244 { 245 return 0; 246 } 247 248 int crypto_skcipher_export(struct skcipher_request *req, void *out) 249 { 250 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 251 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 252 253 if (alg->co.base.cra_type != &crypto_skcipher_type) 254 return crypto_lskcipher_export(req, out); 255 return alg->export(req, out); 256 } 257 EXPORT_SYMBOL_GPL(crypto_skcipher_export); 258 259 int crypto_skcipher_import(struct skcipher_request *req, const void *in) 260 { 261 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 262 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 263 264 if (alg->co.base.cra_type != &crypto_skcipher_type) 265 return crypto_lskcipher_import(req, in); 266 return alg->import(req, in); 267 } 268 EXPORT_SYMBOL_GPL(crypto_skcipher_import); 269 270 static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm) 271 { 272 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 273 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 274 275 alg->exit(skcipher); 276 } 277 278 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 279 { 280 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 281 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 282 283 skcipher_set_needkey(skcipher); 284 285 if (tfm->__crt_alg->cra_type != &crypto_skcipher_type) { 286 unsigned am = crypto_skcipher_alignmask(skcipher); 287 unsigned reqsize; 288 289 reqsize = am & ~(crypto_tfm_ctx_alignment() - 1); 290 reqsize += crypto_skcipher_ivsize(skcipher); 291 reqsize += crypto_skcipher_statesize(skcipher); 292 crypto_skcipher_set_reqsize(skcipher, reqsize); 293 294 return crypto_init_lskcipher_ops_sg(tfm); 295 } 296 297 if (alg->exit) 298 skcipher->base.exit = crypto_skcipher_exit_tfm; 299 300 if (alg->init) 301 return alg->init(skcipher); 302 303 return 0; 304 } 305 306 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) 307 { 308 if (alg->cra_type != &crypto_skcipher_type) 309 return sizeof(struct crypto_lskcipher *); 310 311 return crypto_alg_extsize(alg); 312 } 313 314 static void crypto_skcipher_free_instance(struct crypto_instance *inst) 315 { 316 struct skcipher_instance *skcipher = 317 container_of(inst, struct skcipher_instance, s.base); 318 319 skcipher->free(skcipher); 320 } 321 322 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 323 __maybe_unused; 324 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 325 { 326 struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg); 327 328 seq_printf(m, "type : skcipher\n"); 329 seq_printf(m, "async : %s\n", 330 str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC)); 331 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 332 seq_printf(m, "min keysize : %u\n", skcipher->min_keysize); 333 seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); 334 seq_printf(m, "ivsize : %u\n", skcipher->ivsize); 335 seq_printf(m, "chunksize : %u\n", skcipher->chunksize); 336 seq_printf(m, "walksize : %u\n", skcipher->walksize); 337 seq_printf(m, "statesize : %u\n", skcipher->statesize); 338 } 339 340 static int __maybe_unused crypto_skcipher_report( 341 struct sk_buff *skb, struct crypto_alg *alg) 342 { 343 struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg); 344 struct crypto_report_blkcipher rblkcipher; 345 346 memset(&rblkcipher, 0, sizeof(rblkcipher)); 347 348 strscpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type)); 349 strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv)); 350 351 rblkcipher.blocksize = alg->cra_blocksize; 352 rblkcipher.min_keysize = skcipher->min_keysize; 353 rblkcipher.max_keysize = skcipher->max_keysize; 354 rblkcipher.ivsize = skcipher->ivsize; 355 356 return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER, 357 sizeof(rblkcipher), &rblkcipher); 358 } 359 360 static const struct crypto_type crypto_skcipher_type = { 361 .extsize = crypto_skcipher_extsize, 362 .init_tfm = crypto_skcipher_init_tfm, 363 .free = crypto_skcipher_free_instance, 364 #ifdef CONFIG_PROC_FS 365 .show = crypto_skcipher_show, 366 #endif 367 #if IS_ENABLED(CONFIG_CRYPTO_USER) 368 .report = crypto_skcipher_report, 369 #endif 370 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 371 .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MASK, 372 .type = CRYPTO_ALG_TYPE_SKCIPHER, 373 .tfmsize = offsetof(struct crypto_skcipher, base), 374 .algsize = offsetof(struct skcipher_alg, base), 375 }; 376 377 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, 378 struct crypto_instance *inst, 379 const char *name, u32 type, u32 mask) 380 { 381 spawn->base.frontend = &crypto_skcipher_type; 382 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 383 } 384 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 385 386 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 387 u32 type, u32 mask) 388 { 389 return crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); 390 } 391 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 392 393 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher( 394 const char *alg_name, u32 type, u32 mask) 395 { 396 struct crypto_skcipher *tfm; 397 398 /* Only sync algorithms allowed. */ 399 mask |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE; 400 type &= ~(CRYPTO_ALG_ASYNC | CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE); 401 402 tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); 403 404 /* 405 * Make sure we do not allocate something that might get used with 406 * an on-stack request: check the request size. 407 */ 408 if (!IS_ERR(tfm) && WARN_ON(crypto_skcipher_reqsize(tfm) > 409 MAX_SYNC_SKCIPHER_REQSIZE)) { 410 crypto_free_skcipher(tfm); 411 return ERR_PTR(-EINVAL); 412 } 413 414 return (struct crypto_sync_skcipher *)tfm; 415 } 416 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher); 417 418 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask) 419 { 420 return crypto_type_has_alg(alg_name, &crypto_skcipher_type, type, mask); 421 } 422 EXPORT_SYMBOL_GPL(crypto_has_skcipher); 423 424 int skcipher_prepare_alg_common(struct skcipher_alg_common *alg) 425 { 426 struct crypto_alg *base = &alg->base; 427 428 if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || 429 alg->statesize > PAGE_SIZE / 2 || 430 (alg->ivsize + alg->statesize) > PAGE_SIZE / 2) 431 return -EINVAL; 432 433 if (!alg->chunksize) 434 alg->chunksize = base->cra_blocksize; 435 436 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 437 438 return 0; 439 } 440 441 static int skcipher_prepare_alg(struct skcipher_alg *alg) 442 { 443 struct crypto_alg *base = &alg->base; 444 int err; 445 446 err = skcipher_prepare_alg_common(&alg->co); 447 if (err) 448 return err; 449 450 if (alg->walksize > PAGE_SIZE / 8) 451 return -EINVAL; 452 453 if (!alg->walksize) 454 alg->walksize = alg->chunksize; 455 456 if (!alg->statesize) { 457 alg->import = skcipher_noimport; 458 alg->export = skcipher_noexport; 459 } else if (!(alg->import && alg->export)) 460 return -EINVAL; 461 462 base->cra_type = &crypto_skcipher_type; 463 base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; 464 465 return 0; 466 } 467 468 int crypto_register_skcipher(struct skcipher_alg *alg) 469 { 470 struct crypto_alg *base = &alg->base; 471 int err; 472 473 err = skcipher_prepare_alg(alg); 474 if (err) 475 return err; 476 477 return crypto_register_alg(base); 478 } 479 EXPORT_SYMBOL_GPL(crypto_register_skcipher); 480 481 void crypto_unregister_skcipher(struct skcipher_alg *alg) 482 { 483 crypto_unregister_alg(&alg->base); 484 } 485 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher); 486 487 int crypto_register_skciphers(struct skcipher_alg *algs, int count) 488 { 489 int i, ret; 490 491 for (i = 0; i < count; i++) { 492 ret = crypto_register_skcipher(&algs[i]); 493 if (ret) 494 goto err; 495 } 496 497 return 0; 498 499 err: 500 for (--i; i >= 0; --i) 501 crypto_unregister_skcipher(&algs[i]); 502 503 return ret; 504 } 505 EXPORT_SYMBOL_GPL(crypto_register_skciphers); 506 507 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count) 508 { 509 int i; 510 511 for (i = count - 1; i >= 0; --i) 512 crypto_unregister_skcipher(&algs[i]); 513 } 514 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers); 515 516 int skcipher_register_instance(struct crypto_template *tmpl, 517 struct skcipher_instance *inst) 518 { 519 int err; 520 521 if (WARN_ON(!inst->free)) 522 return -EINVAL; 523 524 err = skcipher_prepare_alg(&inst->alg); 525 if (err) 526 return err; 527 528 return crypto_register_instance(tmpl, skcipher_crypto_instance(inst)); 529 } 530 EXPORT_SYMBOL_GPL(skcipher_register_instance); 531 532 static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key, 533 unsigned int keylen) 534 { 535 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); 536 537 crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK); 538 crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) & 539 CRYPTO_TFM_REQ_MASK); 540 return crypto_cipher_setkey(cipher, key, keylen); 541 } 542 543 static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm) 544 { 545 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 546 struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst); 547 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); 548 struct crypto_cipher *cipher; 549 550 cipher = crypto_spawn_cipher(spawn); 551 if (IS_ERR(cipher)) 552 return PTR_ERR(cipher); 553 554 ctx->cipher = cipher; 555 return 0; 556 } 557 558 static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm) 559 { 560 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); 561 562 crypto_free_cipher(ctx->cipher); 563 } 564 565 static void skcipher_free_instance_simple(struct skcipher_instance *inst) 566 { 567 crypto_drop_cipher(skcipher_instance_ctx(inst)); 568 kfree(inst); 569 } 570 571 /** 572 * skcipher_alloc_instance_simple - allocate instance of simple block cipher mode 573 * 574 * Allocate an skcipher_instance for a simple block cipher mode of operation, 575 * e.g. cbc or ecb. The instance context will have just a single crypto_spawn, 576 * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize, 577 * alignmask, and priority are set from the underlying cipher but can be 578 * overridden if needed. The tfm context defaults to skcipher_ctx_simple, and 579 * default ->setkey(), ->init(), and ->exit() methods are installed. 580 * 581 * @tmpl: the template being instantiated 582 * @tb: the template parameters 583 * 584 * Return: a pointer to the new instance, or an ERR_PTR(). The caller still 585 * needs to register the instance. 586 */ 587 struct skcipher_instance *skcipher_alloc_instance_simple( 588 struct crypto_template *tmpl, struct rtattr **tb) 589 { 590 u32 mask; 591 struct skcipher_instance *inst; 592 struct crypto_cipher_spawn *spawn; 593 struct crypto_alg *cipher_alg; 594 int err; 595 596 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 597 if (err) 598 return ERR_PTR(err); 599 600 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 601 if (!inst) 602 return ERR_PTR(-ENOMEM); 603 spawn = skcipher_instance_ctx(inst); 604 605 err = crypto_grab_cipher(spawn, skcipher_crypto_instance(inst), 606 crypto_attr_alg_name(tb[1]), 0, mask); 607 if (err) 608 goto err_free_inst; 609 cipher_alg = crypto_spawn_cipher_alg(spawn); 610 611 err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name, 612 cipher_alg); 613 if (err) 614 goto err_free_inst; 615 616 inst->free = skcipher_free_instance_simple; 617 618 /* Default algorithm properties, can be overridden */ 619 inst->alg.base.cra_blocksize = cipher_alg->cra_blocksize; 620 inst->alg.base.cra_alignmask = cipher_alg->cra_alignmask; 621 inst->alg.base.cra_priority = cipher_alg->cra_priority; 622 inst->alg.min_keysize = cipher_alg->cra_cipher.cia_min_keysize; 623 inst->alg.max_keysize = cipher_alg->cra_cipher.cia_max_keysize; 624 inst->alg.ivsize = cipher_alg->cra_blocksize; 625 626 /* Use skcipher_ctx_simple by default, can be overridden */ 627 inst->alg.base.cra_ctxsize = sizeof(struct skcipher_ctx_simple); 628 inst->alg.setkey = skcipher_setkey_simple; 629 inst->alg.init = skcipher_init_tfm_simple; 630 inst->alg.exit = skcipher_exit_tfm_simple; 631 632 return inst; 633 634 err_free_inst: 635 skcipher_free_instance_simple(inst); 636 return ERR_PTR(err); 637 } 638 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple); 639 640 MODULE_LICENSE("GPL"); 641 MODULE_DESCRIPTION("Symmetric key cipher type"); 642 MODULE_IMPORT_NS("CRYPTO_INTERNAL"); 643