1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * Symmetric key ciphers. 4 * 5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #ifndef _CRYPTO_SKCIPHER_H 9 #define _CRYPTO_SKCIPHER_H 10 11 #include <linux/atomic.h> 12 #include <linux/container_of.h> 13 #include <linux/crypto.h> 14 #include <linux/slab.h> 15 #include <linux/string.h> 16 #include <linux/types.h> 17 18 struct scatterlist; 19 20 /** 21 * struct skcipher_request - Symmetric key cipher request 22 * @cryptlen: Number of bytes to encrypt or decrypt 23 * @iv: Initialisation Vector 24 * @src: Source SG list 25 * @dst: Destination SG list 26 * @base: Underlying async request 27 * @__ctx: Start of private context data 28 */ 29 struct skcipher_request { 30 unsigned int cryptlen; 31 32 u8 *iv; 33 34 struct scatterlist *src; 35 struct scatterlist *dst; 36 37 struct crypto_async_request base; 38 39 void *__ctx[] CRYPTO_MINALIGN_ATTR; 40 }; 41 42 struct crypto_skcipher { 43 unsigned int reqsize; 44 45 struct crypto_tfm base; 46 }; 47 48 struct crypto_sync_skcipher { 49 struct crypto_skcipher base; 50 }; 51 52 struct crypto_lskcipher { 53 struct crypto_tfm base; 54 }; 55 56 /* 57 * struct crypto_istat_cipher - statistics for cipher algorithm 58 * @encrypt_cnt: number of encrypt requests 59 * @encrypt_tlen: total data size handled by encrypt requests 60 * @decrypt_cnt: number of decrypt requests 61 * @decrypt_tlen: total data size handled by decrypt requests 62 * @err_cnt: number of error for cipher requests 63 */ 64 struct crypto_istat_cipher { 65 atomic64_t encrypt_cnt; 66 atomic64_t encrypt_tlen; 67 atomic64_t decrypt_cnt; 68 atomic64_t decrypt_tlen; 69 atomic64_t err_cnt; 70 }; 71 72 #ifdef CONFIG_CRYPTO_STATS 73 #define SKCIPHER_ALG_COMMON_STAT struct crypto_istat_cipher stat; 74 #else 75 #define SKCIPHER_ALG_COMMON_STAT 76 #endif 77 78 /* 79 * struct skcipher_alg_common - common properties of skcipher_alg 80 * @min_keysize: Minimum key size supported by the transformation. This is the 81 * smallest key length supported by this transformation algorithm. 82 * This must be set to one of the pre-defined values as this is 83 * not hardware specific. Possible values for this field can be 84 * found via git grep "_MIN_KEY_SIZE" include/crypto/ 85 * @max_keysize: Maximum key size supported by the transformation. This is the 86 * largest key length supported by this transformation algorithm. 87 * This must be set to one of the pre-defined values as this is 88 * not hardware specific. Possible values for this field can be 89 * found via git grep "_MAX_KEY_SIZE" include/crypto/ 90 * @ivsize: IV size applicable for transformation. The consumer must provide an 91 * IV of exactly that size to perform the encrypt or decrypt operation. 92 * @chunksize: Equal to the block size except for stream ciphers such as 93 * CTR where it is set to the underlying block size. 94 * @stat: Statistics for cipher algorithm 95 * @base: Definition of a generic crypto algorithm. 96 */ 97 #define SKCIPHER_ALG_COMMON { \ 98 unsigned int min_keysize; \ 99 unsigned int max_keysize; \ 100 unsigned int ivsize; \ 101 unsigned int chunksize; \ 102 \ 103 SKCIPHER_ALG_COMMON_STAT \ 104 \ 105 struct crypto_alg base; \ 106 } 107 struct skcipher_alg_common SKCIPHER_ALG_COMMON; 108 109 /** 110 * struct skcipher_alg - symmetric key cipher definition 111 * @min_keysize: Minimum key size supported by the transformation. This is the 112 * smallest key length supported by this transformation algorithm. 113 * This must be set to one of the pre-defined values as this is 114 * not hardware specific. Possible values for this field can be 115 * found via git grep "_MIN_KEY_SIZE" include/crypto/ 116 * @max_keysize: Maximum key size supported by the transformation. This is the 117 * largest key length supported by this transformation algorithm. 118 * This must be set to one of the pre-defined values as this is 119 * not hardware specific. Possible values for this field can be 120 * found via git grep "_MAX_KEY_SIZE" include/crypto/ 121 * @setkey: Set key for the transformation. This function is used to either 122 * program a supplied key into the hardware or store the key in the 123 * transformation context for programming it later. Note that this 124 * function does modify the transformation context. This function can 125 * be called multiple times during the existence of the transformation 126 * object, so one must make sure the key is properly reprogrammed into 127 * the hardware. This function is also responsible for checking the key 128 * length for validity. In case a software fallback was put in place in 129 * the @cra_init call, this function might need to use the fallback if 130 * the algorithm doesn't support all of the key sizes. 131 * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt 132 * the supplied scatterlist containing the blocks of data. The crypto 133 * API consumer is responsible for aligning the entries of the 134 * scatterlist properly and making sure the chunks are correctly 135 * sized. In case a software fallback was put in place in the 136 * @cra_init call, this function might need to use the fallback if 137 * the algorithm doesn't support all of the key sizes. In case the 138 * key was stored in transformation context, the key might need to be 139 * re-programmed into the hardware in this function. This function 140 * shall not modify the transformation context, as this function may 141 * be called in parallel with the same transformation object. 142 * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt 143 * and the conditions are exactly the same. 144 * @init: Initialize the cryptographic transformation object. This function 145 * is used to initialize the cryptographic transformation object. 146 * This function is called only once at the instantiation time, right 147 * after the transformation context was allocated. In case the 148 * cryptographic hardware has some special requirements which need to 149 * be handled by software, this function shall check for the precise 150 * requirement of the transformation and put any software fallbacks 151 * in place. 152 * @exit: Deinitialize the cryptographic transformation object. This is a 153 * counterpart to @init, used to remove various changes set in 154 * @init. 155 * @ivsize: IV size applicable for transformation. The consumer must provide an 156 * IV of exactly that size to perform the encrypt or decrypt operation. 157 * @chunksize: Equal to the block size except for stream ciphers such as 158 * CTR where it is set to the underlying block size. 159 * @walksize: Equal to the chunk size except in cases where the algorithm is 160 * considerably more efficient if it can operate on multiple chunks 161 * in parallel. Should be a multiple of chunksize. 162 * @stat: Statistics for cipher algorithm 163 * @base: Definition of a generic crypto algorithm. 164 * @co: see struct skcipher_alg_common 165 * 166 * All fields except @ivsize are mandatory and must be filled. 167 */ 168 struct skcipher_alg { 169 int (*setkey)(struct crypto_skcipher *tfm, const u8 *key, 170 unsigned int keylen); 171 int (*encrypt)(struct skcipher_request *req); 172 int (*decrypt)(struct skcipher_request *req); 173 int (*init)(struct crypto_skcipher *tfm); 174 void (*exit)(struct crypto_skcipher *tfm); 175 176 unsigned int walksize; 177 178 union { 179 struct SKCIPHER_ALG_COMMON; 180 struct skcipher_alg_common co; 181 }; 182 }; 183 184 /** 185 * struct lskcipher_alg - linear symmetric key cipher definition 186 * @setkey: Set key for the transformation. This function is used to either 187 * program a supplied key into the hardware or store the key in the 188 * transformation context for programming it later. Note that this 189 * function does modify the transformation context. This function can 190 * be called multiple times during the existence of the transformation 191 * object, so one must make sure the key is properly reprogrammed into 192 * the hardware. This function is also responsible for checking the key 193 * length for validity. In case a software fallback was put in place in 194 * the @cra_init call, this function might need to use the fallback if 195 * the algorithm doesn't support all of the key sizes. 196 * @encrypt: Encrypt a number of bytes. This function is used to encrypt 197 * the supplied data. This function shall not modify 198 * the transformation context, as this function may be called 199 * in parallel with the same transformation object. Data 200 * may be left over if length is not a multiple of blocks 201 * and there is more to come (final == false). The number of 202 * left-over bytes should be returned in case of success. 203 * @decrypt: Decrypt a number of bytes. This is a reverse counterpart to 204 * @encrypt and the conditions are exactly the same. 205 * @init: Initialize the cryptographic transformation object. This function 206 * is used to initialize the cryptographic transformation object. 207 * This function is called only once at the instantiation time, right 208 * after the transformation context was allocated. 209 * @exit: Deinitialize the cryptographic transformation object. This is a 210 * counterpart to @init, used to remove various changes set in 211 * @init. 212 * @co: see struct skcipher_alg_common 213 */ 214 struct lskcipher_alg { 215 int (*setkey)(struct crypto_lskcipher *tfm, const u8 *key, 216 unsigned int keylen); 217 int (*encrypt)(struct crypto_lskcipher *tfm, const u8 *src, 218 u8 *dst, unsigned len, u8 *iv, bool final); 219 int (*decrypt)(struct crypto_lskcipher *tfm, const u8 *src, 220 u8 *dst, unsigned len, u8 *iv, bool final); 221 int (*init)(struct crypto_lskcipher *tfm); 222 void (*exit)(struct crypto_lskcipher *tfm); 223 224 struct skcipher_alg_common co; 225 }; 226 227 #define MAX_SYNC_SKCIPHER_REQSIZE 384 228 /* 229 * This performs a type-check against the "tfm" argument to make sure 230 * all users have the correct skcipher tfm for doing on-stack requests. 231 */ 232 #define SYNC_SKCIPHER_REQUEST_ON_STACK(name, tfm) \ 233 char __##name##_desc[sizeof(struct skcipher_request) + \ 234 MAX_SYNC_SKCIPHER_REQSIZE + \ 235 (!(sizeof((struct crypto_sync_skcipher *)1 == \ 236 (typeof(tfm))1))) \ 237 ] CRYPTO_MINALIGN_ATTR; \ 238 struct skcipher_request *name = (void *)__##name##_desc 239 240 /** 241 * DOC: Symmetric Key Cipher API 242 * 243 * Symmetric key cipher API is used with the ciphers of type 244 * CRYPTO_ALG_TYPE_SKCIPHER (listed as type "skcipher" in /proc/crypto). 245 * 246 * Asynchronous cipher operations imply that the function invocation for a 247 * cipher request returns immediately before the completion of the operation. 248 * The cipher request is scheduled as a separate kernel thread and therefore 249 * load-balanced on the different CPUs via the process scheduler. To allow 250 * the kernel crypto API to inform the caller about the completion of a cipher 251 * request, the caller must provide a callback function. That function is 252 * invoked with the cipher handle when the request completes. 253 * 254 * To support the asynchronous operation, additional information than just the 255 * cipher handle must be supplied to the kernel crypto API. That additional 256 * information is given by filling in the skcipher_request data structure. 257 * 258 * For the symmetric key cipher API, the state is maintained with the tfm 259 * cipher handle. A single tfm can be used across multiple calls and in 260 * parallel. For asynchronous block cipher calls, context data supplied and 261 * only used by the caller can be referenced the request data structure in 262 * addition to the IV used for the cipher request. The maintenance of such 263 * state information would be important for a crypto driver implementer to 264 * have, because when calling the callback function upon completion of the 265 * cipher operation, that callback function may need some information about 266 * which operation just finished if it invoked multiple in parallel. This 267 * state information is unused by the kernel crypto API. 268 */ 269 270 static inline struct crypto_skcipher *__crypto_skcipher_cast( 271 struct crypto_tfm *tfm) 272 { 273 return container_of(tfm, struct crypto_skcipher, base); 274 } 275 276 /** 277 * crypto_alloc_skcipher() - allocate symmetric key cipher handle 278 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 279 * skcipher cipher 280 * @type: specifies the type of the cipher 281 * @mask: specifies the mask for the cipher 282 * 283 * Allocate a cipher handle for an skcipher. The returned struct 284 * crypto_skcipher is the cipher handle that is required for any subsequent 285 * API invocation for that skcipher. 286 * 287 * Return: allocated cipher handle in case of success; IS_ERR() is true in case 288 * of an error, PTR_ERR() returns the error code. 289 */ 290 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 291 u32 type, u32 mask); 292 293 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(const char *alg_name, 294 u32 type, u32 mask); 295 296 297 /** 298 * crypto_alloc_lskcipher() - allocate linear symmetric key cipher handle 299 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 300 * lskcipher 301 * @type: specifies the type of the cipher 302 * @mask: specifies the mask for the cipher 303 * 304 * Allocate a cipher handle for an lskcipher. The returned struct 305 * crypto_lskcipher is the cipher handle that is required for any subsequent 306 * API invocation for that lskcipher. 307 * 308 * Return: allocated cipher handle in case of success; IS_ERR() is true in case 309 * of an error, PTR_ERR() returns the error code. 310 */ 311 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name, 312 u32 type, u32 mask); 313 314 static inline struct crypto_tfm *crypto_skcipher_tfm( 315 struct crypto_skcipher *tfm) 316 { 317 return &tfm->base; 318 } 319 320 static inline struct crypto_tfm *crypto_lskcipher_tfm( 321 struct crypto_lskcipher *tfm) 322 { 323 return &tfm->base; 324 } 325 326 /** 327 * crypto_free_skcipher() - zeroize and free cipher handle 328 * @tfm: cipher handle to be freed 329 * 330 * If @tfm is a NULL or error pointer, this function does nothing. 331 */ 332 static inline void crypto_free_skcipher(struct crypto_skcipher *tfm) 333 { 334 crypto_destroy_tfm(tfm, crypto_skcipher_tfm(tfm)); 335 } 336 337 static inline void crypto_free_sync_skcipher(struct crypto_sync_skcipher *tfm) 338 { 339 crypto_free_skcipher(&tfm->base); 340 } 341 342 /** 343 * crypto_free_lskcipher() - zeroize and free cipher handle 344 * @tfm: cipher handle to be freed 345 * 346 * If @tfm is a NULL or error pointer, this function does nothing. 347 */ 348 static inline void crypto_free_lskcipher(struct crypto_lskcipher *tfm) 349 { 350 crypto_destroy_tfm(tfm, crypto_lskcipher_tfm(tfm)); 351 } 352 353 /** 354 * crypto_has_skcipher() - Search for the availability of an skcipher. 355 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 356 * skcipher 357 * @type: specifies the type of the skcipher 358 * @mask: specifies the mask for the skcipher 359 * 360 * Return: true when the skcipher is known to the kernel crypto API; false 361 * otherwise 362 */ 363 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask); 364 365 static inline const char *crypto_skcipher_driver_name( 366 struct crypto_skcipher *tfm) 367 { 368 return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 369 } 370 371 static inline const char *crypto_lskcipher_driver_name( 372 struct crypto_lskcipher *tfm) 373 { 374 return crypto_tfm_alg_driver_name(crypto_lskcipher_tfm(tfm)); 375 } 376 377 static inline struct skcipher_alg_common *crypto_skcipher_alg_common( 378 struct crypto_skcipher *tfm) 379 { 380 return container_of(crypto_skcipher_tfm(tfm)->__crt_alg, 381 struct skcipher_alg_common, base); 382 } 383 384 static inline struct skcipher_alg *crypto_skcipher_alg( 385 struct crypto_skcipher *tfm) 386 { 387 return container_of(crypto_skcipher_tfm(tfm)->__crt_alg, 388 struct skcipher_alg, base); 389 } 390 391 static inline struct lskcipher_alg *crypto_lskcipher_alg( 392 struct crypto_lskcipher *tfm) 393 { 394 return container_of(crypto_lskcipher_tfm(tfm)->__crt_alg, 395 struct lskcipher_alg, co.base); 396 } 397 398 /** 399 * crypto_skcipher_ivsize() - obtain IV size 400 * @tfm: cipher handle 401 * 402 * The size of the IV for the skcipher referenced by the cipher handle is 403 * returned. This IV size may be zero if the cipher does not need an IV. 404 * 405 * Return: IV size in bytes 406 */ 407 static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm) 408 { 409 return crypto_skcipher_alg_common(tfm)->ivsize; 410 } 411 412 static inline unsigned int crypto_sync_skcipher_ivsize( 413 struct crypto_sync_skcipher *tfm) 414 { 415 return crypto_skcipher_ivsize(&tfm->base); 416 } 417 418 /** 419 * crypto_lskcipher_ivsize() - obtain IV size 420 * @tfm: cipher handle 421 * 422 * The size of the IV for the lskcipher referenced by the cipher handle is 423 * returned. This IV size may be zero if the cipher does not need an IV. 424 * 425 * Return: IV size in bytes 426 */ 427 static inline unsigned int crypto_lskcipher_ivsize( 428 struct crypto_lskcipher *tfm) 429 { 430 return crypto_lskcipher_alg(tfm)->co.ivsize; 431 } 432 433 /** 434 * crypto_skcipher_blocksize() - obtain block size of cipher 435 * @tfm: cipher handle 436 * 437 * The block size for the skcipher referenced with the cipher handle is 438 * returned. The caller may use that information to allocate appropriate 439 * memory for the data returned by the encryption or decryption operation 440 * 441 * Return: block size of cipher 442 */ 443 static inline unsigned int crypto_skcipher_blocksize( 444 struct crypto_skcipher *tfm) 445 { 446 return crypto_tfm_alg_blocksize(crypto_skcipher_tfm(tfm)); 447 } 448 449 /** 450 * crypto_lskcipher_blocksize() - obtain block size of cipher 451 * @tfm: cipher handle 452 * 453 * The block size for the lskcipher referenced with the cipher handle is 454 * returned. The caller may use that information to allocate appropriate 455 * memory for the data returned by the encryption or decryption operation 456 * 457 * Return: block size of cipher 458 */ 459 static inline unsigned int crypto_lskcipher_blocksize( 460 struct crypto_lskcipher *tfm) 461 { 462 return crypto_tfm_alg_blocksize(crypto_lskcipher_tfm(tfm)); 463 } 464 465 /** 466 * crypto_skcipher_chunksize() - obtain chunk size 467 * @tfm: cipher handle 468 * 469 * The block size is set to one for ciphers such as CTR. However, 470 * you still need to provide incremental updates in multiples of 471 * the underlying block size as the IV does not have sub-block 472 * granularity. This is known in this API as the chunk size. 473 * 474 * Return: chunk size in bytes 475 */ 476 static inline unsigned int crypto_skcipher_chunksize( 477 struct crypto_skcipher *tfm) 478 { 479 return crypto_skcipher_alg_common(tfm)->chunksize; 480 } 481 482 /** 483 * crypto_lskcipher_chunksize() - obtain chunk size 484 * @tfm: cipher handle 485 * 486 * The block size is set to one for ciphers such as CTR. However, 487 * you still need to provide incremental updates in multiples of 488 * the underlying block size as the IV does not have sub-block 489 * granularity. This is known in this API as the chunk size. 490 * 491 * Return: chunk size in bytes 492 */ 493 static inline unsigned int crypto_lskcipher_chunksize( 494 struct crypto_lskcipher *tfm) 495 { 496 return crypto_lskcipher_alg(tfm)->co.chunksize; 497 } 498 499 static inline unsigned int crypto_sync_skcipher_blocksize( 500 struct crypto_sync_skcipher *tfm) 501 { 502 return crypto_skcipher_blocksize(&tfm->base); 503 } 504 505 static inline unsigned int crypto_skcipher_alignmask( 506 struct crypto_skcipher *tfm) 507 { 508 return crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm)); 509 } 510 511 static inline unsigned int crypto_lskcipher_alignmask( 512 struct crypto_lskcipher *tfm) 513 { 514 return crypto_tfm_alg_alignmask(crypto_lskcipher_tfm(tfm)); 515 } 516 517 static inline u32 crypto_skcipher_get_flags(struct crypto_skcipher *tfm) 518 { 519 return crypto_tfm_get_flags(crypto_skcipher_tfm(tfm)); 520 } 521 522 static inline void crypto_skcipher_set_flags(struct crypto_skcipher *tfm, 523 u32 flags) 524 { 525 crypto_tfm_set_flags(crypto_skcipher_tfm(tfm), flags); 526 } 527 528 static inline void crypto_skcipher_clear_flags(struct crypto_skcipher *tfm, 529 u32 flags) 530 { 531 crypto_tfm_clear_flags(crypto_skcipher_tfm(tfm), flags); 532 } 533 534 static inline u32 crypto_sync_skcipher_get_flags( 535 struct crypto_sync_skcipher *tfm) 536 { 537 return crypto_skcipher_get_flags(&tfm->base); 538 } 539 540 static inline void crypto_sync_skcipher_set_flags( 541 struct crypto_sync_skcipher *tfm, u32 flags) 542 { 543 crypto_skcipher_set_flags(&tfm->base, flags); 544 } 545 546 static inline void crypto_sync_skcipher_clear_flags( 547 struct crypto_sync_skcipher *tfm, u32 flags) 548 { 549 crypto_skcipher_clear_flags(&tfm->base, flags); 550 } 551 552 static inline u32 crypto_lskcipher_get_flags(struct crypto_lskcipher *tfm) 553 { 554 return crypto_tfm_get_flags(crypto_lskcipher_tfm(tfm)); 555 } 556 557 static inline void crypto_lskcipher_set_flags(struct crypto_lskcipher *tfm, 558 u32 flags) 559 { 560 crypto_tfm_set_flags(crypto_lskcipher_tfm(tfm), flags); 561 } 562 563 static inline void crypto_lskcipher_clear_flags(struct crypto_lskcipher *tfm, 564 u32 flags) 565 { 566 crypto_tfm_clear_flags(crypto_lskcipher_tfm(tfm), flags); 567 } 568 569 /** 570 * crypto_skcipher_setkey() - set key for cipher 571 * @tfm: cipher handle 572 * @key: buffer holding the key 573 * @keylen: length of the key in bytes 574 * 575 * The caller provided key is set for the skcipher referenced by the cipher 576 * handle. 577 * 578 * Note, the key length determines the cipher type. Many block ciphers implement 579 * different cipher modes depending on the key size, such as AES-128 vs AES-192 580 * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128 581 * is performed. 582 * 583 * Return: 0 if the setting of the key was successful; < 0 if an error occurred 584 */ 585 int crypto_skcipher_setkey(struct crypto_skcipher *tfm, 586 const u8 *key, unsigned int keylen); 587 588 static inline int crypto_sync_skcipher_setkey(struct crypto_sync_skcipher *tfm, 589 const u8 *key, unsigned int keylen) 590 { 591 return crypto_skcipher_setkey(&tfm->base, key, keylen); 592 } 593 594 /** 595 * crypto_lskcipher_setkey() - set key for cipher 596 * @tfm: cipher handle 597 * @key: buffer holding the key 598 * @keylen: length of the key in bytes 599 * 600 * The caller provided key is set for the lskcipher referenced by the cipher 601 * handle. 602 * 603 * Note, the key length determines the cipher type. Many block ciphers implement 604 * different cipher modes depending on the key size, such as AES-128 vs AES-192 605 * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128 606 * is performed. 607 * 608 * Return: 0 if the setting of the key was successful; < 0 if an error occurred 609 */ 610 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm, 611 const u8 *key, unsigned int keylen); 612 613 static inline unsigned int crypto_skcipher_min_keysize( 614 struct crypto_skcipher *tfm) 615 { 616 return crypto_skcipher_alg_common(tfm)->min_keysize; 617 } 618 619 static inline unsigned int crypto_skcipher_max_keysize( 620 struct crypto_skcipher *tfm) 621 { 622 return crypto_skcipher_alg_common(tfm)->max_keysize; 623 } 624 625 static inline unsigned int crypto_lskcipher_min_keysize( 626 struct crypto_lskcipher *tfm) 627 { 628 return crypto_lskcipher_alg(tfm)->co.min_keysize; 629 } 630 631 static inline unsigned int crypto_lskcipher_max_keysize( 632 struct crypto_lskcipher *tfm) 633 { 634 return crypto_lskcipher_alg(tfm)->co.max_keysize; 635 } 636 637 /** 638 * crypto_skcipher_reqtfm() - obtain cipher handle from request 639 * @req: skcipher_request out of which the cipher handle is to be obtained 640 * 641 * Return the crypto_skcipher handle when furnishing an skcipher_request 642 * data structure. 643 * 644 * Return: crypto_skcipher handle 645 */ 646 static inline struct crypto_skcipher *crypto_skcipher_reqtfm( 647 struct skcipher_request *req) 648 { 649 return __crypto_skcipher_cast(req->base.tfm); 650 } 651 652 static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm( 653 struct skcipher_request *req) 654 { 655 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 656 657 return container_of(tfm, struct crypto_sync_skcipher, base); 658 } 659 660 /** 661 * crypto_skcipher_encrypt() - encrypt plaintext 662 * @req: reference to the skcipher_request handle that holds all information 663 * needed to perform the cipher operation 664 * 665 * Encrypt plaintext data using the skcipher_request handle. That data 666 * structure and how it is filled with data is discussed with the 667 * skcipher_request_* functions. 668 * 669 * Return: 0 if the cipher operation was successful; < 0 if an error occurred 670 */ 671 int crypto_skcipher_encrypt(struct skcipher_request *req); 672 673 /** 674 * crypto_skcipher_decrypt() - decrypt ciphertext 675 * @req: reference to the skcipher_request handle that holds all information 676 * needed to perform the cipher operation 677 * 678 * Decrypt ciphertext data using the skcipher_request handle. That data 679 * structure and how it is filled with data is discussed with the 680 * skcipher_request_* functions. 681 * 682 * Return: 0 if the cipher operation was successful; < 0 if an error occurred 683 */ 684 int crypto_skcipher_decrypt(struct skcipher_request *req); 685 686 /** 687 * crypto_lskcipher_encrypt() - encrypt plaintext 688 * @tfm: lskcipher handle 689 * @src: source buffer 690 * @dst: destination buffer 691 * @len: number of bytes to process 692 * @iv: IV for the cipher operation which must comply with the IV size defined 693 * by crypto_lskcipher_ivsize 694 * 695 * Encrypt plaintext data using the lskcipher handle. 696 * 697 * Return: >=0 if the cipher operation was successful, if positive 698 * then this many bytes have been left unprocessed; 699 * < 0 if an error occurred 700 */ 701 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src, 702 u8 *dst, unsigned len, u8 *iv); 703 704 /** 705 * crypto_lskcipher_decrypt() - decrypt ciphertext 706 * @tfm: lskcipher handle 707 * @src: source buffer 708 * @dst: destination buffer 709 * @len: number of bytes to process 710 * @iv: IV for the cipher operation which must comply with the IV size defined 711 * by crypto_lskcipher_ivsize 712 * 713 * Decrypt ciphertext data using the lskcipher handle. 714 * 715 * Return: >=0 if the cipher operation was successful, if positive 716 * then this many bytes have been left unprocessed; 717 * < 0 if an error occurred 718 */ 719 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src, 720 u8 *dst, unsigned len, u8 *iv); 721 722 /** 723 * DOC: Symmetric Key Cipher Request Handle 724 * 725 * The skcipher_request data structure contains all pointers to data 726 * required for the symmetric key cipher operation. This includes the cipher 727 * handle (which can be used by multiple skcipher_request instances), pointer 728 * to plaintext and ciphertext, asynchronous callback function, etc. It acts 729 * as a handle to the skcipher_request_* API calls in a similar way as 730 * skcipher handle to the crypto_skcipher_* API calls. 731 */ 732 733 /** 734 * crypto_skcipher_reqsize() - obtain size of the request data structure 735 * @tfm: cipher handle 736 * 737 * Return: number of bytes 738 */ 739 static inline unsigned int crypto_skcipher_reqsize(struct crypto_skcipher *tfm) 740 { 741 return tfm->reqsize; 742 } 743 744 /** 745 * skcipher_request_set_tfm() - update cipher handle reference in request 746 * @req: request handle to be modified 747 * @tfm: cipher handle that shall be added to the request handle 748 * 749 * Allow the caller to replace the existing skcipher handle in the request 750 * data structure with a different one. 751 */ 752 static inline void skcipher_request_set_tfm(struct skcipher_request *req, 753 struct crypto_skcipher *tfm) 754 { 755 req->base.tfm = crypto_skcipher_tfm(tfm); 756 } 757 758 static inline void skcipher_request_set_sync_tfm(struct skcipher_request *req, 759 struct crypto_sync_skcipher *tfm) 760 { 761 skcipher_request_set_tfm(req, &tfm->base); 762 } 763 764 static inline struct skcipher_request *skcipher_request_cast( 765 struct crypto_async_request *req) 766 { 767 return container_of(req, struct skcipher_request, base); 768 } 769 770 /** 771 * skcipher_request_alloc() - allocate request data structure 772 * @tfm: cipher handle to be registered with the request 773 * @gfp: memory allocation flag that is handed to kmalloc by the API call. 774 * 775 * Allocate the request data structure that must be used with the skcipher 776 * encrypt and decrypt API calls. During the allocation, the provided skcipher 777 * handle is registered in the request data structure. 778 * 779 * Return: allocated request handle in case of success, or NULL if out of memory 780 */ 781 static inline struct skcipher_request *skcipher_request_alloc( 782 struct crypto_skcipher *tfm, gfp_t gfp) 783 { 784 struct skcipher_request *req; 785 786 req = kmalloc(sizeof(struct skcipher_request) + 787 crypto_skcipher_reqsize(tfm), gfp); 788 789 if (likely(req)) 790 skcipher_request_set_tfm(req, tfm); 791 792 return req; 793 } 794 795 /** 796 * skcipher_request_free() - zeroize and free request data structure 797 * @req: request data structure cipher handle to be freed 798 */ 799 static inline void skcipher_request_free(struct skcipher_request *req) 800 { 801 kfree_sensitive(req); 802 } 803 804 static inline void skcipher_request_zero(struct skcipher_request *req) 805 { 806 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 807 808 memzero_explicit(req, sizeof(*req) + crypto_skcipher_reqsize(tfm)); 809 } 810 811 /** 812 * skcipher_request_set_callback() - set asynchronous callback function 813 * @req: request handle 814 * @flags: specify zero or an ORing of the flags 815 * CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and 816 * increase the wait queue beyond the initial maximum size; 817 * CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep 818 * @compl: callback function pointer to be registered with the request handle 819 * @data: The data pointer refers to memory that is not used by the kernel 820 * crypto API, but provided to the callback function for it to use. Here, 821 * the caller can provide a reference to memory the callback function can 822 * operate on. As the callback function is invoked asynchronously to the 823 * related functionality, it may need to access data structures of the 824 * related functionality which can be referenced using this pointer. The 825 * callback function can access the memory via the "data" field in the 826 * crypto_async_request data structure provided to the callback function. 827 * 828 * This function allows setting the callback function that is triggered once the 829 * cipher operation completes. 830 * 831 * The callback function is registered with the skcipher_request handle and 832 * must comply with the following template:: 833 * 834 * void callback_function(struct crypto_async_request *req, int error) 835 */ 836 static inline void skcipher_request_set_callback(struct skcipher_request *req, 837 u32 flags, 838 crypto_completion_t compl, 839 void *data) 840 { 841 req->base.complete = compl; 842 req->base.data = data; 843 req->base.flags = flags; 844 } 845 846 /** 847 * skcipher_request_set_crypt() - set data buffers 848 * @req: request handle 849 * @src: source scatter / gather list 850 * @dst: destination scatter / gather list 851 * @cryptlen: number of bytes to process from @src 852 * @iv: IV for the cipher operation which must comply with the IV size defined 853 * by crypto_skcipher_ivsize 854 * 855 * This function allows setting of the source data and destination data 856 * scatter / gather lists. 857 * 858 * For encryption, the source is treated as the plaintext and the 859 * destination is the ciphertext. For a decryption operation, the use is 860 * reversed - the source is the ciphertext and the destination is the plaintext. 861 */ 862 static inline void skcipher_request_set_crypt( 863 struct skcipher_request *req, 864 struct scatterlist *src, struct scatterlist *dst, 865 unsigned int cryptlen, void *iv) 866 { 867 req->src = src; 868 req->dst = dst; 869 req->cryptlen = cryptlen; 870 req->iv = iv; 871 } 872 873 #endif /* _CRYPTO_SKCIPHER_H */ 874 875