xref: /linux/arch/x86/crypto/ghash-clmulni-intel_glue.c (revision b7e56edba4b02f2079042c326a8cd72a44635817)
1 /*
2  * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
3  * instructions. This file contains glue code.
4  *
5  * Copyright (c) 2009 Intel Corp.
6  *   Author: Huang Ying <ying.huang@intel.com>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License version 2 as published
10  * by the Free Software Foundation.
11  */
12 
13 #include <linux/module.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/crypto.h>
17 #include <crypto/algapi.h>
18 #include <crypto/cryptd.h>
19 #include <crypto/gf128mul.h>
20 #include <crypto/internal/hash.h>
21 #include <asm/i387.h>
22 
23 #define GHASH_BLOCK_SIZE	16
24 #define GHASH_DIGEST_SIZE	16
25 
26 void clmul_ghash_mul(char *dst, const be128 *shash);
27 
28 void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
29 			const be128 *shash);
30 
31 void clmul_ghash_setkey(be128 *shash, const u8 *key);
32 
33 struct ghash_async_ctx {
34 	struct cryptd_ahash *cryptd_tfm;
35 };
36 
37 struct ghash_ctx {
38 	be128 shash;
39 };
40 
41 struct ghash_desc_ctx {
42 	u8 buffer[GHASH_BLOCK_SIZE];
43 	u32 bytes;
44 };
45 
46 static int ghash_init(struct shash_desc *desc)
47 {
48 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
49 
50 	memset(dctx, 0, sizeof(*dctx));
51 
52 	return 0;
53 }
54 
55 static int ghash_setkey(struct crypto_shash *tfm,
56 			const u8 *key, unsigned int keylen)
57 {
58 	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
59 
60 	if (keylen != GHASH_BLOCK_SIZE) {
61 		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
62 		return -EINVAL;
63 	}
64 
65 	clmul_ghash_setkey(&ctx->shash, key);
66 
67 	return 0;
68 }
69 
70 static int ghash_update(struct shash_desc *desc,
71 			 const u8 *src, unsigned int srclen)
72 {
73 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
74 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
75 	u8 *dst = dctx->buffer;
76 
77 	kernel_fpu_begin();
78 	if (dctx->bytes) {
79 		int n = min(srclen, dctx->bytes);
80 		u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
81 
82 		dctx->bytes -= n;
83 		srclen -= n;
84 
85 		while (n--)
86 			*pos++ ^= *src++;
87 
88 		if (!dctx->bytes)
89 			clmul_ghash_mul(dst, &ctx->shash);
90 	}
91 
92 	clmul_ghash_update(dst, src, srclen, &ctx->shash);
93 	kernel_fpu_end();
94 
95 	if (srclen & 0xf) {
96 		src += srclen - (srclen & 0xf);
97 		srclen &= 0xf;
98 		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
99 		while (srclen--)
100 			*dst++ ^= *src++;
101 	}
102 
103 	return 0;
104 }
105 
106 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
107 {
108 	u8 *dst = dctx->buffer;
109 
110 	if (dctx->bytes) {
111 		u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
112 
113 		while (dctx->bytes--)
114 			*tmp++ ^= 0;
115 
116 		kernel_fpu_begin();
117 		clmul_ghash_mul(dst, &ctx->shash);
118 		kernel_fpu_end();
119 	}
120 
121 	dctx->bytes = 0;
122 }
123 
124 static int ghash_final(struct shash_desc *desc, u8 *dst)
125 {
126 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
127 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
128 	u8 *buf = dctx->buffer;
129 
130 	ghash_flush(ctx, dctx);
131 	memcpy(dst, buf, GHASH_BLOCK_SIZE);
132 
133 	return 0;
134 }
135 
136 static struct shash_alg ghash_alg = {
137 	.digestsize	= GHASH_DIGEST_SIZE,
138 	.init		= ghash_init,
139 	.update		= ghash_update,
140 	.final		= ghash_final,
141 	.setkey		= ghash_setkey,
142 	.descsize	= sizeof(struct ghash_desc_ctx),
143 	.base		= {
144 		.cra_name		= "__ghash",
145 		.cra_driver_name	= "__ghash-pclmulqdqni",
146 		.cra_priority		= 0,
147 		.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
148 		.cra_blocksize		= GHASH_BLOCK_SIZE,
149 		.cra_ctxsize		= sizeof(struct ghash_ctx),
150 		.cra_module		= THIS_MODULE,
151 		.cra_list		= LIST_HEAD_INIT(ghash_alg.base.cra_list),
152 	},
153 };
154 
155 static int ghash_async_init(struct ahash_request *req)
156 {
157 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
158 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
159 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
160 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
161 
162 	if (!irq_fpu_usable()) {
163 		memcpy(cryptd_req, req, sizeof(*req));
164 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
165 		return crypto_ahash_init(cryptd_req);
166 	} else {
167 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
168 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
169 
170 		desc->tfm = child;
171 		desc->flags = req->base.flags;
172 		return crypto_shash_init(desc);
173 	}
174 }
175 
176 static int ghash_async_update(struct ahash_request *req)
177 {
178 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
179 
180 	if (!irq_fpu_usable()) {
181 		struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
182 		struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
183 		struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
184 
185 		memcpy(cryptd_req, req, sizeof(*req));
186 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
187 		return crypto_ahash_update(cryptd_req);
188 	} else {
189 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
190 		return shash_ahash_update(req, desc);
191 	}
192 }
193 
194 static int ghash_async_final(struct ahash_request *req)
195 {
196 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
197 
198 	if (!irq_fpu_usable()) {
199 		struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
200 		struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
201 		struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
202 
203 		memcpy(cryptd_req, req, sizeof(*req));
204 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
205 		return crypto_ahash_final(cryptd_req);
206 	} else {
207 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
208 		return crypto_shash_final(desc, req->result);
209 	}
210 }
211 
212 static int ghash_async_digest(struct ahash_request *req)
213 {
214 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
215 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
216 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
217 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
218 
219 	if (!irq_fpu_usable()) {
220 		memcpy(cryptd_req, req, sizeof(*req));
221 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
222 		return crypto_ahash_digest(cryptd_req);
223 	} else {
224 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
225 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
226 
227 		desc->tfm = child;
228 		desc->flags = req->base.flags;
229 		return shash_ahash_digest(req, desc);
230 	}
231 }
232 
233 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
234 			      unsigned int keylen)
235 {
236 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
237 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
238 	int err;
239 
240 	crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
241 	crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
242 			       & CRYPTO_TFM_REQ_MASK);
243 	err = crypto_ahash_setkey(child, key, keylen);
244 	crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
245 			       & CRYPTO_TFM_RES_MASK);
246 
247 	return 0;
248 }
249 
250 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
251 {
252 	struct cryptd_ahash *cryptd_tfm;
253 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
254 
255 	cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0);
256 	if (IS_ERR(cryptd_tfm))
257 		return PTR_ERR(cryptd_tfm);
258 	ctx->cryptd_tfm = cryptd_tfm;
259 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
260 				 sizeof(struct ahash_request) +
261 				 crypto_ahash_reqsize(&cryptd_tfm->base));
262 
263 	return 0;
264 }
265 
266 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
267 {
268 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
269 
270 	cryptd_free_ahash(ctx->cryptd_tfm);
271 }
272 
273 static struct ahash_alg ghash_async_alg = {
274 	.init		= ghash_async_init,
275 	.update		= ghash_async_update,
276 	.final		= ghash_async_final,
277 	.setkey		= ghash_async_setkey,
278 	.digest		= ghash_async_digest,
279 	.halg = {
280 		.digestsize	= GHASH_DIGEST_SIZE,
281 		.base = {
282 			.cra_name		= "ghash",
283 			.cra_driver_name	= "ghash-clmulni",
284 			.cra_priority		= 400,
285 			.cra_flags		= CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
286 			.cra_blocksize		= GHASH_BLOCK_SIZE,
287 			.cra_type		= &crypto_ahash_type,
288 			.cra_module		= THIS_MODULE,
289 			.cra_list		= LIST_HEAD_INIT(ghash_async_alg.halg.base.cra_list),
290 			.cra_init		= ghash_async_init_tfm,
291 			.cra_exit		= ghash_async_exit_tfm,
292 		},
293 	},
294 };
295 
296 static int __init ghash_pclmulqdqni_mod_init(void)
297 {
298 	int err;
299 
300 	if (!cpu_has_pclmulqdq) {
301 		printk(KERN_INFO "Intel PCLMULQDQ-NI instructions are not"
302 		       " detected.\n");
303 		return -ENODEV;
304 	}
305 
306 	err = crypto_register_shash(&ghash_alg);
307 	if (err)
308 		goto err_out;
309 	err = crypto_register_ahash(&ghash_async_alg);
310 	if (err)
311 		goto err_shash;
312 
313 	return 0;
314 
315 err_shash:
316 	crypto_unregister_shash(&ghash_alg);
317 err_out:
318 	return err;
319 }
320 
321 static void __exit ghash_pclmulqdqni_mod_exit(void)
322 {
323 	crypto_unregister_ahash(&ghash_async_alg);
324 	crypto_unregister_shash(&ghash_alg);
325 }
326 
327 module_init(ghash_pclmulqdqni_mod_init);
328 module_exit(ghash_pclmulqdqni_mod_exit);
329 
330 MODULE_LICENSE("GPL");
331 MODULE_DESCRIPTION("GHASH Message Digest Algorithm, "
332 		   "acclerated by PCLMULQDQ-NI");
333 MODULE_ALIAS("ghash");
334