xref: /linux/crypto/xcbc.c (revision fbf46565c67c626849c7ce2a326972d3008d2a91)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C)2006 USAGI/WIDE Project
4  *
5  * Author:
6  * 	Kazunori Miyazawa <miyazawa@linux-ipv6.org>
7  */
8 
9 #include <crypto/internal/cipher.h>
10 #include <crypto/internal/hash.h>
11 #include <linux/err.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 
15 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
16 			   0x02020202, 0x02020202, 0x02020202, 0x02020202,
17 			   0x03030303, 0x03030303, 0x03030303, 0x03030303};
18 
19 /*
20  * +------------------------
21  * | <parent tfm>
22  * +------------------------
23  * | xcbc_tfm_ctx
24  * +------------------------
25  * | consts (block size * 2)
26  * +------------------------
27  */
28 struct xcbc_tfm_ctx {
29 	struct crypto_cipher *child;
30 	u8 ctx[];
31 };
32 
33 /*
34  * +------------------------
35  * | <shash desc>
36  * +------------------------
37  * | xcbc_desc_ctx
38  * +------------------------
39  * | odds (block size)
40  * +------------------------
41  * | prev (block size)
42  * +------------------------
43  */
44 struct xcbc_desc_ctx {
45 	unsigned int len;
46 	u8 ctx[];
47 };
48 
49 #define XCBC_BLOCKSIZE	16
50 
51 static int crypto_xcbc_digest_setkey(struct crypto_shash *parent,
52 				     const u8 *inkey, unsigned int keylen)
53 {
54 	unsigned long alignmask = crypto_shash_alignmask(parent);
55 	struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent);
56 	u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
57 	int err = 0;
58 	u8 key1[XCBC_BLOCKSIZE];
59 	int bs = sizeof(key1);
60 
61 	if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen)))
62 		return err;
63 
64 	crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs);
65 	crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2);
66 	crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks);
67 
68 	return crypto_cipher_setkey(ctx->child, key1, bs);
69 
70 }
71 
72 static int crypto_xcbc_digest_init(struct shash_desc *pdesc)
73 {
74 	unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm);
75 	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
76 	int bs = crypto_shash_blocksize(pdesc->tfm);
77 	u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs;
78 
79 	ctx->len = 0;
80 	memset(prev, 0, bs);
81 
82 	return 0;
83 }
84 
85 static int crypto_xcbc_digest_update(struct shash_desc *pdesc, const u8 *p,
86 				     unsigned int len)
87 {
88 	struct crypto_shash *parent = pdesc->tfm;
89 	unsigned long alignmask = crypto_shash_alignmask(parent);
90 	struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
91 	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
92 	struct crypto_cipher *tfm = tctx->child;
93 	int bs = crypto_shash_blocksize(parent);
94 	u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
95 	u8 *prev = odds + bs;
96 
97 	/* checking the data can fill the block */
98 	if ((ctx->len + len) <= bs) {
99 		memcpy(odds + ctx->len, p, len);
100 		ctx->len += len;
101 		return 0;
102 	}
103 
104 	/* filling odds with new data and encrypting it */
105 	memcpy(odds + ctx->len, p, bs - ctx->len);
106 	len -= bs - ctx->len;
107 	p += bs - ctx->len;
108 
109 	crypto_xor(prev, odds, bs);
110 	crypto_cipher_encrypt_one(tfm, prev, prev);
111 
112 	/* clearing the length */
113 	ctx->len = 0;
114 
115 	/* encrypting the rest of data */
116 	while (len > bs) {
117 		crypto_xor(prev, p, bs);
118 		crypto_cipher_encrypt_one(tfm, prev, prev);
119 		p += bs;
120 		len -= bs;
121 	}
122 
123 	/* keeping the surplus of blocksize */
124 	if (len) {
125 		memcpy(odds, p, len);
126 		ctx->len = len;
127 	}
128 
129 	return 0;
130 }
131 
132 static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out)
133 {
134 	struct crypto_shash *parent = pdesc->tfm;
135 	unsigned long alignmask = crypto_shash_alignmask(parent);
136 	struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
137 	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
138 	struct crypto_cipher *tfm = tctx->child;
139 	int bs = crypto_shash_blocksize(parent);
140 	u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1);
141 	u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
142 	u8 *prev = odds + bs;
143 	unsigned int offset = 0;
144 
145 	if (ctx->len != bs) {
146 		unsigned int rlen;
147 		u8 *p = odds + ctx->len;
148 
149 		*p = 0x80;
150 		p++;
151 
152 		rlen = bs - ctx->len -1;
153 		if (rlen)
154 			memset(p, 0, rlen);
155 
156 		offset += bs;
157 	}
158 
159 	crypto_xor(prev, odds, bs);
160 	crypto_xor(prev, consts + offset, bs);
161 
162 	crypto_cipher_encrypt_one(tfm, out, prev);
163 
164 	return 0;
165 }
166 
167 static int xcbc_init_tfm(struct crypto_tfm *tfm)
168 {
169 	struct crypto_cipher *cipher;
170 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
171 	struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
172 	struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
173 
174 	cipher = crypto_spawn_cipher(spawn);
175 	if (IS_ERR(cipher))
176 		return PTR_ERR(cipher);
177 
178 	ctx->child = cipher;
179 
180 	return 0;
181 };
182 
183 static void xcbc_exit_tfm(struct crypto_tfm *tfm)
184 {
185 	struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
186 	crypto_free_cipher(ctx->child);
187 }
188 
189 static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
190 {
191 	struct shash_instance *inst;
192 	struct crypto_cipher_spawn *spawn;
193 	struct crypto_alg *alg;
194 	unsigned long alignmask;
195 	u32 mask;
196 	int err;
197 
198 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask);
199 	if (err)
200 		return err;
201 
202 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
203 	if (!inst)
204 		return -ENOMEM;
205 	spawn = shash_instance_ctx(inst);
206 
207 	err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
208 				 crypto_attr_alg_name(tb[1]), 0, mask);
209 	if (err)
210 		goto err_free_inst;
211 	alg = crypto_spawn_cipher_alg(spawn);
212 
213 	err = -EINVAL;
214 	if (alg->cra_blocksize != XCBC_BLOCKSIZE)
215 		goto err_free_inst;
216 
217 	err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
218 	if (err)
219 		goto err_free_inst;
220 
221 	alignmask = alg->cra_alignmask | 3;
222 	inst->alg.base.cra_alignmask = alignmask;
223 	inst->alg.base.cra_priority = alg->cra_priority;
224 	inst->alg.base.cra_blocksize = alg->cra_blocksize;
225 
226 	inst->alg.digestsize = alg->cra_blocksize;
227 	inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx),
228 				   crypto_tfm_ctx_alignment()) +
229 			     (alignmask &
230 			      ~(crypto_tfm_ctx_alignment() - 1)) +
231 			     alg->cra_blocksize * 2;
232 
233 	inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx),
234 					   alignmask + 1) +
235 				     alg->cra_blocksize * 2;
236 	inst->alg.base.cra_init = xcbc_init_tfm;
237 	inst->alg.base.cra_exit = xcbc_exit_tfm;
238 
239 	inst->alg.init = crypto_xcbc_digest_init;
240 	inst->alg.update = crypto_xcbc_digest_update;
241 	inst->alg.final = crypto_xcbc_digest_final;
242 	inst->alg.setkey = crypto_xcbc_digest_setkey;
243 
244 	inst->free = shash_free_singlespawn_instance;
245 
246 	err = shash_register_instance(tmpl, inst);
247 	if (err) {
248 err_free_inst:
249 		shash_free_singlespawn_instance(inst);
250 	}
251 	return err;
252 }
253 
254 static struct crypto_template crypto_xcbc_tmpl = {
255 	.name = "xcbc",
256 	.create = xcbc_create,
257 	.module = THIS_MODULE,
258 };
259 
260 static int __init crypto_xcbc_module_init(void)
261 {
262 	return crypto_register_template(&crypto_xcbc_tmpl);
263 }
264 
265 static void __exit crypto_xcbc_module_exit(void)
266 {
267 	crypto_unregister_template(&crypto_xcbc_tmpl);
268 }
269 
270 subsys_initcall(crypto_xcbc_module_init);
271 module_exit(crypto_xcbc_module_exit);
272 
273 MODULE_LICENSE("GPL");
274 MODULE_DESCRIPTION("XCBC keyed hash algorithm");
275 MODULE_ALIAS_CRYPTO("xcbc");
276 MODULE_IMPORT_NS(CRYPTO_INTERNAL);
277