xref: /linux/crypto/seqiv.c (revision a69eddfd171f5570f5c7b333e41f3dead26ce859)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * seqiv: Sequence Number IV Generator
4  *
5  * This generator generates an IV based on a sequence number by xoring it
6  * with a salt.  This algorithm is mainly useful for CTR and similar modes.
7  *
8  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
9  */
10 
11 #include <crypto/internal/geniv.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/skcipher.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
20 
seqiv_aead_encrypt_complete2(struct aead_request * req,int err)21 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
22 {
23 	struct aead_request *subreq = aead_request_ctx(req);
24 	struct crypto_aead *geniv;
25 
26 	if (err == -EINPROGRESS || err == -EBUSY)
27 		return;
28 
29 	if (err)
30 		goto out;
31 
32 	geniv = crypto_aead_reqtfm(req);
33 	memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
34 
35 out:
36 	kfree_sensitive(subreq->iv);
37 }
38 
seqiv_aead_encrypt_complete(void * data,int err)39 static void seqiv_aead_encrypt_complete(void *data, int err)
40 {
41 	struct aead_request *req = data;
42 
43 	seqiv_aead_encrypt_complete2(req, err);
44 	aead_request_complete(req, err);
45 }
46 
seqiv_aead_encrypt(struct aead_request * req)47 static int seqiv_aead_encrypt(struct aead_request *req)
48 {
49 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
50 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
51 	struct aead_request *subreq = aead_request_ctx(req);
52 	crypto_completion_t compl;
53 	bool unaligned_info;
54 	void *data;
55 	u8 *info;
56 	unsigned int ivsize = 8;
57 	int err;
58 
59 	if (req->cryptlen < ivsize)
60 		return -EINVAL;
61 
62 	aead_request_set_tfm(subreq, ctx->child);
63 
64 	compl = req->base.complete;
65 	data = req->base.data;
66 	info = req->iv;
67 
68 	if (req->src != req->dst)
69 		memcpy_sglist(req->dst, req->src,
70 			      req->assoclen + req->cryptlen);
71 
72 	unaligned_info = !IS_ALIGNED((unsigned long)info,
73 				     crypto_aead_alignmask(geniv) + 1);
74 	if (unlikely(unaligned_info)) {
75 		info = kmemdup(req->iv, ivsize, req->base.flags &
76 			       CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
77 			       GFP_ATOMIC);
78 		if (!info)
79 			return -ENOMEM;
80 
81 		compl = seqiv_aead_encrypt_complete;
82 		data = req;
83 	}
84 
85 	aead_request_set_callback(subreq, req->base.flags, compl, data);
86 	aead_request_set_crypt(subreq, req->dst, req->dst,
87 			       req->cryptlen - ivsize, info);
88 	aead_request_set_ad(subreq, req->assoclen + ivsize);
89 
90 	crypto_xor(info, ctx->salt, ivsize);
91 	scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
92 
93 	err = crypto_aead_encrypt(subreq);
94 	if (unlikely(unaligned_info))
95 		seqiv_aead_encrypt_complete2(req, err);
96 	return err;
97 }
98 
seqiv_aead_decrypt(struct aead_request * req)99 static int seqiv_aead_decrypt(struct aead_request *req)
100 {
101 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
102 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
103 	struct aead_request *subreq = aead_request_ctx(req);
104 	crypto_completion_t compl;
105 	void *data;
106 	unsigned int ivsize = 8;
107 
108 	if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
109 		return -EINVAL;
110 
111 	aead_request_set_tfm(subreq, ctx->child);
112 
113 	compl = req->base.complete;
114 	data = req->base.data;
115 
116 	aead_request_set_callback(subreq, req->base.flags, compl, data);
117 	aead_request_set_crypt(subreq, req->src, req->dst,
118 			       req->cryptlen - ivsize, req->iv);
119 	aead_request_set_ad(subreq, req->assoclen + ivsize);
120 
121 	scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
122 
123 	return crypto_aead_decrypt(subreq);
124 }
125 
seqiv_aead_create(struct crypto_template * tmpl,struct rtattr ** tb)126 static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
127 {
128 	struct aead_instance *inst;
129 	int err;
130 
131 	inst = aead_geniv_alloc(tmpl, tb);
132 
133 	if (IS_ERR(inst))
134 		return PTR_ERR(inst);
135 
136 	err = -EINVAL;
137 	if (inst->alg.ivsize != sizeof(u64))
138 		goto free_inst;
139 
140 	inst->alg.encrypt = seqiv_aead_encrypt;
141 	inst->alg.decrypt = seqiv_aead_decrypt;
142 
143 	inst->alg.init = aead_init_geniv;
144 	inst->alg.exit = aead_exit_geniv;
145 
146 	inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
147 	inst->alg.base.cra_ctxsize += inst->alg.ivsize;
148 
149 	err = aead_register_instance(tmpl, inst);
150 	if (err) {
151 free_inst:
152 		inst->free(inst);
153 	}
154 	return err;
155 }
156 
157 static struct crypto_template seqiv_tmpl = {
158 	.name = "seqiv",
159 	.create = seqiv_aead_create,
160 	.module = THIS_MODULE,
161 };
162 
seqiv_module_init(void)163 static int __init seqiv_module_init(void)
164 {
165 	return crypto_register_template(&seqiv_tmpl);
166 }
167 
seqiv_module_exit(void)168 static void __exit seqiv_module_exit(void)
169 {
170 	crypto_unregister_template(&seqiv_tmpl);
171 }
172 
173 module_init(seqiv_module_init);
174 module_exit(seqiv_module_exit);
175 
176 MODULE_LICENSE("GPL");
177 MODULE_DESCRIPTION("Sequence Number IV Generator");
178 MODULE_ALIAS_CRYPTO("seqiv");
179