xref: /linux/crypto/ctr.c (revision 37744feebc086908fd89760650f458ab19071750)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * CTR: Counter mode
4  *
5  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/ctr.h>
10 #include <crypto/internal/skcipher.h>
11 #include <linux/err.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 
17 struct crypto_rfc3686_ctx {
18 	struct crypto_skcipher *child;
19 	u8 nonce[CTR_RFC3686_NONCE_SIZE];
20 };
21 
22 struct crypto_rfc3686_req_ctx {
23 	u8 iv[CTR_RFC3686_BLOCK_SIZE];
24 	struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
25 };
26 
27 static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
28 				   struct crypto_cipher *tfm)
29 {
30 	unsigned int bsize = crypto_cipher_blocksize(tfm);
31 	unsigned long alignmask = crypto_cipher_alignmask(tfm);
32 	u8 *ctrblk = walk->iv;
33 	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
34 	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
35 	u8 *src = walk->src.virt.addr;
36 	u8 *dst = walk->dst.virt.addr;
37 	unsigned int nbytes = walk->nbytes;
38 
39 	crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
40 	crypto_xor_cpy(dst, keystream, src, nbytes);
41 
42 	crypto_inc(ctrblk, bsize);
43 }
44 
45 static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
46 				    struct crypto_cipher *tfm)
47 {
48 	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 		   crypto_cipher_alg(tfm)->cia_encrypt;
50 	unsigned int bsize = crypto_cipher_blocksize(tfm);
51 	u8 *ctrblk = walk->iv;
52 	u8 *src = walk->src.virt.addr;
53 	u8 *dst = walk->dst.virt.addr;
54 	unsigned int nbytes = walk->nbytes;
55 
56 	do {
57 		/* create keystream */
58 		fn(crypto_cipher_tfm(tfm), dst, ctrblk);
59 		crypto_xor(dst, src, bsize);
60 
61 		/* increment counter in counterblock */
62 		crypto_inc(ctrblk, bsize);
63 
64 		src += bsize;
65 		dst += bsize;
66 	} while ((nbytes -= bsize) >= bsize);
67 
68 	return nbytes;
69 }
70 
71 static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
72 				    struct crypto_cipher *tfm)
73 {
74 	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 		   crypto_cipher_alg(tfm)->cia_encrypt;
76 	unsigned int bsize = crypto_cipher_blocksize(tfm);
77 	unsigned long alignmask = crypto_cipher_alignmask(tfm);
78 	unsigned int nbytes = walk->nbytes;
79 	u8 *ctrblk = walk->iv;
80 	u8 *src = walk->src.virt.addr;
81 	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
82 	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
83 
84 	do {
85 		/* create keystream */
86 		fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
87 		crypto_xor(src, keystream, bsize);
88 
89 		/* increment counter in counterblock */
90 		crypto_inc(ctrblk, bsize);
91 
92 		src += bsize;
93 	} while ((nbytes -= bsize) >= bsize);
94 
95 	return nbytes;
96 }
97 
98 static int crypto_ctr_crypt(struct skcipher_request *req)
99 {
100 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
101 	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
102 	const unsigned int bsize = crypto_cipher_blocksize(cipher);
103 	struct skcipher_walk walk;
104 	unsigned int nbytes;
105 	int err;
106 
107 	err = skcipher_walk_virt(&walk, req, false);
108 
109 	while (walk.nbytes >= bsize) {
110 		if (walk.src.virt.addr == walk.dst.virt.addr)
111 			nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
112 		else
113 			nbytes = crypto_ctr_crypt_segment(&walk, cipher);
114 
115 		err = skcipher_walk_done(&walk, nbytes);
116 	}
117 
118 	if (walk.nbytes) {
119 		crypto_ctr_crypt_final(&walk, cipher);
120 		err = skcipher_walk_done(&walk, 0);
121 	}
122 
123 	return err;
124 }
125 
126 static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
127 {
128 	struct skcipher_instance *inst;
129 	struct crypto_alg *alg;
130 	int err;
131 
132 	inst = skcipher_alloc_instance_simple(tmpl, tb);
133 	if (IS_ERR(inst))
134 		return PTR_ERR(inst);
135 
136 	alg = skcipher_ialg_simple(inst);
137 
138 	/* Block size must be >= 4 bytes. */
139 	err = -EINVAL;
140 	if (alg->cra_blocksize < 4)
141 		goto out_free_inst;
142 
143 	/* If this is false we'd fail the alignment of crypto_inc. */
144 	if (alg->cra_blocksize % 4)
145 		goto out_free_inst;
146 
147 	/* CTR mode is a stream cipher. */
148 	inst->alg.base.cra_blocksize = 1;
149 
150 	/*
151 	 * To simplify the implementation, configure the skcipher walk to only
152 	 * give a partial block at the very end, never earlier.
153 	 */
154 	inst->alg.chunksize = alg->cra_blocksize;
155 
156 	inst->alg.encrypt = crypto_ctr_crypt;
157 	inst->alg.decrypt = crypto_ctr_crypt;
158 
159 	err = skcipher_register_instance(tmpl, inst);
160 	if (err) {
161 out_free_inst:
162 		inst->free(inst);
163 	}
164 
165 	return err;
166 }
167 
168 static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
169 				 const u8 *key, unsigned int keylen)
170 {
171 	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
172 	struct crypto_skcipher *child = ctx->child;
173 
174 	/* the nonce is stored in bytes at end of key */
175 	if (keylen < CTR_RFC3686_NONCE_SIZE)
176 		return -EINVAL;
177 
178 	memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
179 	       CTR_RFC3686_NONCE_SIZE);
180 
181 	keylen -= CTR_RFC3686_NONCE_SIZE;
182 
183 	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
184 	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
185 					 CRYPTO_TFM_REQ_MASK);
186 	return crypto_skcipher_setkey(child, key, keylen);
187 }
188 
189 static int crypto_rfc3686_crypt(struct skcipher_request *req)
190 {
191 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
192 	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
193 	struct crypto_skcipher *child = ctx->child;
194 	unsigned long align = crypto_skcipher_alignmask(tfm);
195 	struct crypto_rfc3686_req_ctx *rctx =
196 		(void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
197 	struct skcipher_request *subreq = &rctx->subreq;
198 	u8 *iv = rctx->iv;
199 
200 	/* set up counter block */
201 	memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
202 	memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
203 
204 	/* initialize counter portion of counter block */
205 	*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
206 		cpu_to_be32(1);
207 
208 	skcipher_request_set_tfm(subreq, child);
209 	skcipher_request_set_callback(subreq, req->base.flags,
210 				      req->base.complete, req->base.data);
211 	skcipher_request_set_crypt(subreq, req->src, req->dst,
212 				   req->cryptlen, iv);
213 
214 	return crypto_skcipher_encrypt(subreq);
215 }
216 
217 static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
218 {
219 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
220 	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
221 	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
222 	struct crypto_skcipher *cipher;
223 	unsigned long align;
224 	unsigned int reqsize;
225 
226 	cipher = crypto_spawn_skcipher(spawn);
227 	if (IS_ERR(cipher))
228 		return PTR_ERR(cipher);
229 
230 	ctx->child = cipher;
231 
232 	align = crypto_skcipher_alignmask(tfm);
233 	align &= ~(crypto_tfm_ctx_alignment() - 1);
234 	reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
235 		  crypto_skcipher_reqsize(cipher);
236 	crypto_skcipher_set_reqsize(tfm, reqsize);
237 
238 	return 0;
239 }
240 
241 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
242 {
243 	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
244 
245 	crypto_free_skcipher(ctx->child);
246 }
247 
248 static void crypto_rfc3686_free(struct skcipher_instance *inst)
249 {
250 	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
251 
252 	crypto_drop_skcipher(spawn);
253 	kfree(inst);
254 }
255 
256 static int crypto_rfc3686_create(struct crypto_template *tmpl,
257 				 struct rtattr **tb)
258 {
259 	struct crypto_attr_type *algt;
260 	struct skcipher_instance *inst;
261 	struct skcipher_alg *alg;
262 	struct crypto_skcipher_spawn *spawn;
263 	u32 mask;
264 
265 	int err;
266 
267 	algt = crypto_get_attr_type(tb);
268 	if (IS_ERR(algt))
269 		return PTR_ERR(algt);
270 
271 	if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
272 		return -EINVAL;
273 
274 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
275 	if (!inst)
276 		return -ENOMEM;
277 
278 	mask = crypto_requires_sync(algt->type, algt->mask) |
279 		crypto_requires_off(algt->type, algt->mask,
280 				    CRYPTO_ALG_NEED_FALLBACK);
281 
282 	spawn = skcipher_instance_ctx(inst);
283 
284 	err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
285 				   crypto_attr_alg_name(tb[1]), 0, mask);
286 	if (err)
287 		goto err_free_inst;
288 
289 	alg = crypto_spawn_skcipher_alg(spawn);
290 
291 	/* We only support 16-byte blocks. */
292 	err = -EINVAL;
293 	if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
294 		goto err_free_inst;
295 
296 	/* Not a stream cipher? */
297 	if (alg->base.cra_blocksize != 1)
298 		goto err_free_inst;
299 
300 	err = -ENAMETOOLONG;
301 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
302 		     "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
303 		goto err_free_inst;
304 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
305 		     "rfc3686(%s)", alg->base.cra_driver_name) >=
306 	    CRYPTO_MAX_ALG_NAME)
307 		goto err_free_inst;
308 
309 	inst->alg.base.cra_priority = alg->base.cra_priority;
310 	inst->alg.base.cra_blocksize = 1;
311 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
312 
313 	inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
314 
315 	inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
316 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
317 	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
318 				CTR_RFC3686_NONCE_SIZE;
319 	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
320 				CTR_RFC3686_NONCE_SIZE;
321 
322 	inst->alg.setkey = crypto_rfc3686_setkey;
323 	inst->alg.encrypt = crypto_rfc3686_crypt;
324 	inst->alg.decrypt = crypto_rfc3686_crypt;
325 
326 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
327 
328 	inst->alg.init = crypto_rfc3686_init_tfm;
329 	inst->alg.exit = crypto_rfc3686_exit_tfm;
330 
331 	inst->free = crypto_rfc3686_free;
332 
333 	err = skcipher_register_instance(tmpl, inst);
334 	if (err) {
335 err_free_inst:
336 		crypto_rfc3686_free(inst);
337 	}
338 	return err;
339 }
340 
341 static struct crypto_template crypto_ctr_tmpls[] = {
342 	{
343 		.name = "ctr",
344 		.create = crypto_ctr_create,
345 		.module = THIS_MODULE,
346 	}, {
347 		.name = "rfc3686",
348 		.create = crypto_rfc3686_create,
349 		.module = THIS_MODULE,
350 	},
351 };
352 
353 static int __init crypto_ctr_module_init(void)
354 {
355 	return crypto_register_templates(crypto_ctr_tmpls,
356 					 ARRAY_SIZE(crypto_ctr_tmpls));
357 }
358 
359 static void __exit crypto_ctr_module_exit(void)
360 {
361 	crypto_unregister_templates(crypto_ctr_tmpls,
362 				    ARRAY_SIZE(crypto_ctr_tmpls));
363 }
364 
365 subsys_initcall(crypto_ctr_module_init);
366 module_exit(crypto_ctr_module_exit);
367 
368 MODULE_LICENSE("GPL");
369 MODULE_DESCRIPTION("CTR block cipher mode of operation");
370 MODULE_ALIAS_CRYPTO("rfc3686");
371 MODULE_ALIAS_CRYPTO("ctr");
372