xref: /linux/include/crypto/ctr.h (revision 24bce201d79807b668bf9d9e0aca801c5c0d5f78)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * CTR: Counter mode
4  *
5  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_CTR_H
9 #define _CRYPTO_CTR_H
10 
11 #include <crypto/algapi.h>
12 #include <crypto/internal/skcipher.h>
13 #include <linux/string.h>
14 #include <linux/types.h>
15 
16 #define CTR_RFC3686_NONCE_SIZE 4
17 #define CTR_RFC3686_IV_SIZE 8
18 #define CTR_RFC3686_BLOCK_SIZE 16
19 
20 static inline int crypto_ctr_encrypt_walk(struct skcipher_request *req,
21 					  void (*fn)(struct crypto_skcipher *,
22 						     const u8 *, u8 *))
23 {
24 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
25 	int blocksize = crypto_skcipher_chunksize(tfm);
26 	u8 buf[MAX_CIPHER_BLOCKSIZE];
27 	struct skcipher_walk walk;
28 	int err;
29 
30 	/* avoid integer division due to variable blocksize parameter */
31 	if (WARN_ON_ONCE(!is_power_of_2(blocksize)))
32 		return -EINVAL;
33 
34 	err = skcipher_walk_virt(&walk, req, false);
35 
36 	while (walk.nbytes > 0) {
37 		u8 *dst = walk.dst.virt.addr;
38 		u8 *src = walk.src.virt.addr;
39 		int nbytes = walk.nbytes;
40 		int tail = 0;
41 
42 		if (nbytes < walk.total) {
43 			tail = walk.nbytes & (blocksize - 1);
44 			nbytes -= tail;
45 		}
46 
47 		do {
48 			int bsize = min(nbytes, blocksize);
49 
50 			fn(tfm, walk.iv, buf);
51 
52 			crypto_xor_cpy(dst, src, buf, bsize);
53 			crypto_inc(walk.iv, blocksize);
54 
55 			dst += bsize;
56 			src += bsize;
57 			nbytes -= bsize;
58 		} while (nbytes > 0);
59 
60 		err = skcipher_walk_done(&walk, tail);
61 	}
62 	return err;
63 }
64 
65 #endif  /* _CRYPTO_CTR_H */
66