xref: /linux/arch/x86/crypto/ghash-clmulni-intel_glue.c (revision 746680ec6696585e30db3e18c93a63df9cbec39c)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
4  * instructions. This file contains glue code.
5  *
6  * Copyright (c) 2009 Intel Corp.
7  *   Author: Huang Ying <ying.huang@intel.com>
8  */
9 
10 #include <asm/cpu_device_id.h>
11 #include <asm/simd.h>
12 #include <crypto/b128ops.h>
13 #include <crypto/ghash.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/utils.h>
16 #include <linux/errno.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/string.h>
20 #include <linux/unaligned.h>
21 
22 asmlinkage void clmul_ghash_mul(char *dst, const le128 *shash);
23 
24 asmlinkage int clmul_ghash_update(char *dst, const char *src,
25 				  unsigned int srclen, const le128 *shash);
26 
27 struct x86_ghash_ctx {
28 	le128 shash;
29 };
30 
31 static int ghash_init(struct shash_desc *desc)
32 {
33 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
34 
35 	memset(dctx, 0, sizeof(*dctx));
36 
37 	return 0;
38 }
39 
40 static int ghash_setkey(struct crypto_shash *tfm,
41 			const u8 *key, unsigned int keylen)
42 {
43 	struct x86_ghash_ctx *ctx = crypto_shash_ctx(tfm);
44 	u64 a, b;
45 
46 	if (keylen != GHASH_BLOCK_SIZE)
47 		return -EINVAL;
48 
49 	/*
50 	 * GHASH maps bits to polynomial coefficients backwards, which makes it
51 	 * hard to implement.  But it can be shown that the GHASH multiplication
52 	 *
53 	 *	D * K (mod x^128 + x^7 + x^2 + x + 1)
54 	 *
55 	 * (where D is a data block and K is the key) is equivalent to:
56 	 *
57 	 *	bitreflect(D) * bitreflect(K) * x^(-127)
58 	 *		(mod x^128 + x^127 + x^126 + x^121 + 1)
59 	 *
60 	 * So, the code below precomputes:
61 	 *
62 	 *	bitreflect(K) * x^(-127) (mod x^128 + x^127 + x^126 + x^121 + 1)
63 	 *
64 	 * ... but in Montgomery form (so that Montgomery multiplication can be
65 	 * used), i.e. with an extra x^128 factor, which means actually:
66 	 *
67 	 *	bitreflect(K) * x (mod x^128 + x^127 + x^126 + x^121 + 1)
68 	 *
69 	 * The within-a-byte part of bitreflect() cancels out GHASH's built-in
70 	 * reflection, and thus bitreflect() is actually a byteswap.
71 	 */
72 	a = get_unaligned_be64(key);
73 	b = get_unaligned_be64(key + 8);
74 	ctx->shash.a = cpu_to_le64((a << 1) | (b >> 63));
75 	ctx->shash.b = cpu_to_le64((b << 1) | (a >> 63));
76 	if (a >> 63)
77 		ctx->shash.a ^= cpu_to_le64((u64)0xc2 << 56);
78 	return 0;
79 }
80 
81 static int ghash_update(struct shash_desc *desc,
82 			 const u8 *src, unsigned int srclen)
83 {
84 	struct x86_ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
85 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
86 	u8 *dst = dctx->buffer;
87 	int remain;
88 
89 	kernel_fpu_begin();
90 	remain = clmul_ghash_update(dst, src, srclen, &ctx->shash);
91 	kernel_fpu_end();
92 	return remain;
93 }
94 
95 static void ghash_flush(struct x86_ghash_ctx *ctx, struct ghash_desc_ctx *dctx,
96 			const u8 *src, unsigned int len)
97 {
98 	u8 *dst = dctx->buffer;
99 
100 	kernel_fpu_begin();
101 	if (len) {
102 		crypto_xor(dst, src, len);
103 		clmul_ghash_mul(dst, &ctx->shash);
104 	}
105 	kernel_fpu_end();
106 }
107 
108 static int ghash_finup(struct shash_desc *desc, const u8 *src,
109 		       unsigned int len, u8 *dst)
110 {
111 	struct x86_ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
112 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
113 	u8 *buf = dctx->buffer;
114 
115 	ghash_flush(ctx, dctx, src, len);
116 	memcpy(dst, buf, GHASH_BLOCK_SIZE);
117 
118 	return 0;
119 }
120 
121 static struct shash_alg ghash_alg = {
122 	.digestsize	= GHASH_DIGEST_SIZE,
123 	.init		= ghash_init,
124 	.update		= ghash_update,
125 	.finup		= ghash_finup,
126 	.setkey		= ghash_setkey,
127 	.descsize	= sizeof(struct ghash_desc_ctx),
128 	.base		= {
129 		.cra_name		= "ghash",
130 		.cra_driver_name	= "ghash-pclmulqdqni",
131 		.cra_priority		= 400,
132 		.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY,
133 		.cra_blocksize		= GHASH_BLOCK_SIZE,
134 		.cra_ctxsize		= sizeof(struct x86_ghash_ctx),
135 		.cra_module		= THIS_MODULE,
136 	},
137 };
138 
139 static const struct x86_cpu_id pcmul_cpu_id[] = {
140 	X86_MATCH_FEATURE(X86_FEATURE_PCLMULQDQ, NULL), /* Pickle-Mickle-Duck */
141 	{}
142 };
143 MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
144 
145 static int __init ghash_pclmulqdqni_mod_init(void)
146 {
147 	if (!x86_match_cpu(pcmul_cpu_id))
148 		return -ENODEV;
149 
150 	return crypto_register_shash(&ghash_alg);
151 }
152 
153 static void __exit ghash_pclmulqdqni_mod_exit(void)
154 {
155 	crypto_unregister_shash(&ghash_alg);
156 }
157 
158 module_init(ghash_pclmulqdqni_mod_init);
159 module_exit(ghash_pclmulqdqni_mod_exit);
160 
161 MODULE_LICENSE("GPL");
162 MODULE_DESCRIPTION("GHASH hash function, accelerated by PCLMULQDQ-NI");
163 MODULE_ALIAS_CRYPTO("ghash");
164