xref: /linux/crypto/cipher.c (revision aa23aa55166c2865ac430168c4b9d405cf8c6980)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * Cipher operations.
6  *
7  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9  */
10 
11 #include <crypto/algapi.h>
12 #include <linux/kernel.h>
13 #include <linux/crypto.h>
14 #include <linux/errno.h>
15 #include <linux/slab.h>
16 #include <linux/string.h>
17 #include "internal.h"
18 
19 static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
20 			    unsigned int keylen)
21 {
22 	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
23 	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
24 	int ret;
25 	u8 *buffer, *alignbuffer;
26 	unsigned long absize;
27 
28 	absize = keylen + alignmask;
29 	buffer = kmalloc(absize, GFP_ATOMIC);
30 	if (!buffer)
31 		return -ENOMEM;
32 
33 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
34 	memcpy(alignbuffer, key, keylen);
35 	ret = cia->cia_setkey(tfm, alignbuffer, keylen);
36 	memset(alignbuffer, 0, keylen);
37 	kfree(buffer);
38 	return ret;
39 
40 }
41 
42 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
43 {
44 	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
45 	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
46 
47 	tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
48 	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
49 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
50 		return -EINVAL;
51 	}
52 
53 	if ((unsigned long)key & alignmask)
54 		return setkey_unaligned(tfm, key, keylen);
55 
56 	return cia->cia_setkey(tfm, key, keylen);
57 }
58 
59 static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
60 					      const u8 *),
61 				   struct crypto_tfm *tfm,
62 				   u8 *dst, const u8 *src)
63 {
64 	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
65 	unsigned int size = crypto_tfm_alg_blocksize(tfm);
66 	u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
67 	u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
68 
69 	memcpy(tmp, src, size);
70 	fn(tfm, tmp, tmp);
71 	memcpy(dst, tmp, size);
72 }
73 
74 static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
75 				     u8 *dst, const u8 *src)
76 {
77 	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
78 	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
79 
80 	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
81 		cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
82 		return;
83 	}
84 
85 	cipher->cia_encrypt(tfm, dst, src);
86 }
87 
88 static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
89 				     u8 *dst, const u8 *src)
90 {
91 	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
92 	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
93 
94 	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
95 		cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
96 		return;
97 	}
98 
99 	cipher->cia_decrypt(tfm, dst, src);
100 }
101 
102 int crypto_init_cipher_ops(struct crypto_tfm *tfm)
103 {
104 	struct cipher_tfm *ops = &tfm->crt_cipher;
105 	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
106 
107 	ops->cit_setkey = setkey;
108 	ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
109 		cipher_encrypt_unaligned : cipher->cia_encrypt;
110 	ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
111 		cipher_decrypt_unaligned : cipher->cia_decrypt;
112 
113 	return 0;
114 }
115