xref: /freebsd/sys/crypto/openssl/amd64/ossl_aes_gcm_avx512.c (revision 5daf8ed625af70ebb7e4740ab98a6054e9e52329)
1 /*
2  * Copyright 2010-2022 The OpenSSL Project Authors. All Rights Reserved.
3  * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
4  *
5  * Licensed under the Apache License 2.0 (the "License").  You may not use
6  * this file except in compliance with the License.  You can obtain a copy
7  * in the file LICENSE in the source distribution or at
8  * https://www.openssl.org/source/license.html
9  */
10 
11 /*
12  * This file contains an AES-GCM wrapper implementation from OpenSSL, using
13  * VAES extensions. It was ported from cipher_aes_gcm_hw_vaes_avx512.inc.
14  */
15 
16 #include <sys/endian.h>
17 #include <sys/systm.h>
18 
19 #include <crypto/openssl/ossl.h>
20 #include <crypto/openssl/ossl_aes_gcm.h>
21 #include <crypto/openssl/ossl_cipher.h>
22 
23 #include <opencrypto/cryptodev.h>
24 
25 _Static_assert(
26     sizeof(struct ossl_gcm_context) <= sizeof(struct ossl_cipher_context),
27     "ossl_gcm_context too large");
28 
29 void aesni_set_encrypt_key(const void *key, int bits, void *ctx);
30 
31 static void
gcm_init(struct ossl_gcm_context * ctx,const void * key,size_t keylen)32 gcm_init(struct ossl_gcm_context *ctx, const void *key, size_t keylen)
33 {
34 	KASSERT(keylen == 128 || keylen == 192 || keylen == 256,
35 	    ("%s: invalid key length %zu", __func__, keylen));
36 
37 	memset(&ctx->gcm, 0, sizeof(ctx->gcm));
38 	memset(&ctx->aes_ks, 0, sizeof(ctx->aes_ks));
39 	aesni_set_encrypt_key(key, keylen, &ctx->aes_ks);
40 	ctx->ops->init(ctx, key, keylen);
41 }
42 
43 static void
gcm_tag(struct ossl_gcm_context * ctx,unsigned char * tag,size_t len)44 gcm_tag(struct ossl_gcm_context *ctx, unsigned char *tag, size_t len)
45 {
46 	(void)ctx->ops->finish(ctx, NULL, 0);
47 	memcpy(tag, ctx->gcm.Xi.c, len);
48 }
49 
50 void ossl_gcm_gmult_avx512(uint64_t Xi[2], void *gcm128ctx);
51 void ossl_aes_gcm_init_avx512(const void *ks, void *gcm128ctx);
52 void ossl_aes_gcm_setiv_avx512(const void *ks, void *gcm128ctx,
53     const unsigned char *iv, size_t ivlen);
54 void ossl_aes_gcm_update_aad_avx512(void *gcm128ctx, const unsigned char *aad,
55     size_t len);
56 void ossl_aes_gcm_encrypt_avx512(const void *ks, void *gcm128ctx,
57     unsigned int *pblocklen, const unsigned char *in, size_t len,
58     unsigned char *out);
59 void ossl_aes_gcm_decrypt_avx512(const void *ks, void *gcm128ctx,
60     unsigned int *pblocklen, const unsigned char *in, size_t len,
61     unsigned char *out);
62 void ossl_aes_gcm_finalize_avx512(void *gcm128ctx, unsigned int pblocklen);
63 
64 static void
gcm_init_avx512(struct ossl_gcm_context * ctx,const void * key,size_t keylen)65 gcm_init_avx512(struct ossl_gcm_context *ctx, const void *key, size_t keylen)
66 {
67 	ossl_aes_gcm_init_avx512(&ctx->aes_ks, &ctx->gcm);
68 }
69 
70 static void
gcm_setiv_avx512(struct ossl_gcm_context * ctx,const unsigned char * iv,size_t len)71 gcm_setiv_avx512(struct ossl_gcm_context *ctx, const unsigned char *iv,
72     size_t len)
73 {
74 	KASSERT(len == AES_GCM_IV_LEN,
75 	    ("%s: invalid IV length %zu", __func__, len));
76 
77 	ctx->gcm.Yi.u[0] = 0;		/* Current counter */
78 	ctx->gcm.Yi.u[1] = 0;
79 	ctx->gcm.Xi.u[0] = 0;		/* AAD hash */
80 	ctx->gcm.Xi.u[1] = 0;
81 	ctx->gcm.len.u[0] = 0;		/* AAD length */
82 	ctx->gcm.len.u[1] = 0;		/* Message length */
83 	ctx->gcm.ares = 0;
84 	ctx->gcm.mres = 0;
85 
86 	ossl_aes_gcm_setiv_avx512(&ctx->aes_ks, ctx, iv, len);
87 }
88 
89 static int
gcm_aad_avx512(struct ossl_gcm_context * ctx,const unsigned char * aad,size_t len)90 gcm_aad_avx512(struct ossl_gcm_context *ctx, const unsigned char *aad,
91     size_t len)
92 {
93 	uint64_t alen = ctx->gcm.len.u[0];
94 	size_t lenblks;
95 	unsigned int ares;
96 
97 	/* Bad sequence: call of AAD update after message processing */
98 	if (ctx->gcm.len.u[1])
99 		return -2;
100 
101 	alen += len;
102 	/* AAD is limited by 2^64 bits, thus 2^61 bytes */
103 	if (alen > (1ull << 61) || (sizeof(len) == 8 && alen < len))
104 		return -1;
105 	ctx->gcm.len.u[0] = alen;
106 
107 	ares = ctx->gcm.ares;
108 	/* Partial AAD block left from previous AAD update calls */
109 	if (ares > 0) {
110 		/*
111 		 * Fill partial block buffer till full block
112 		 * (note, the hash is stored reflected)
113 		 */
114 		while (ares > 0 && len > 0) {
115 			ctx->gcm.Xi.c[15 - ares] ^= *(aad++);
116 			--len;
117 			ares = (ares + 1) % AES_BLOCK_LEN;
118 		}
119 		/* Full block gathered */
120 		if (ares == 0) {
121 			ossl_gcm_gmult_avx512(ctx->gcm.Xi.u, ctx);
122 		} else { /* no more AAD */
123 			ctx->gcm.ares = ares;
124 			return 0;
125 		}
126 	}
127 
128 	/* Bulk AAD processing */
129 	lenblks = len & ((size_t)(-AES_BLOCK_LEN));
130 	if (lenblks > 0) {
131 		ossl_aes_gcm_update_aad_avx512(ctx, aad, lenblks);
132 		aad += lenblks;
133 		len -= lenblks;
134 	}
135 
136 	/* Add remaining AAD to the hash (note, the hash is stored reflected) */
137 	if (len > 0) {
138 		ares = (unsigned int)len;
139 		for (size_t i = 0; i < len; ++i)
140 			ctx->gcm.Xi.c[15 - i] ^= aad[i];
141 	}
142 
143 	ctx->gcm.ares = ares;
144 
145 	return 0;
146 }
147 
148 static int
_gcm_encrypt_avx512(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len,bool encrypt)149 _gcm_encrypt_avx512(struct ossl_gcm_context *ctx, const unsigned char *in,
150     unsigned char *out, size_t len, bool encrypt)
151 {
152 	uint64_t mlen = ctx->gcm.len.u[1];
153 
154 	mlen += len;
155 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
156 		return -1;
157 
158 	ctx->gcm.len.u[1] = mlen;
159 
160 	/* Finalize GHASH(AAD) if AAD partial blocks left unprocessed */
161 	if (ctx->gcm.ares > 0) {
162 		ossl_gcm_gmult_avx512(ctx->gcm.Xi.u, ctx);
163 		ctx->gcm.ares = 0;
164 	}
165 
166 	if (encrypt) {
167 		ossl_aes_gcm_encrypt_avx512(&ctx->aes_ks, ctx, &ctx->gcm.mres,
168 		    in, len, out);
169 	} else {
170 		ossl_aes_gcm_decrypt_avx512(&ctx->aes_ks, ctx, &ctx->gcm.mres,
171 		    in, len, out);
172 	}
173 
174 	return 0;
175 }
176 
177 static int
gcm_encrypt_avx512(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)178 gcm_encrypt_avx512(struct ossl_gcm_context *ctx, const unsigned char *in,
179     unsigned char *out, size_t len)
180 {
181 	return _gcm_encrypt_avx512(ctx, in, out, len, true);
182 }
183 
184 static int
gcm_decrypt_avx512(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)185 gcm_decrypt_avx512(struct ossl_gcm_context *ctx, const unsigned char *in,
186     unsigned char *out, size_t len)
187 {
188 	return _gcm_encrypt_avx512(ctx, in, out, len, false);
189 }
190 
191 static int
gcm_finish_avx512(struct ossl_gcm_context * ctx,const unsigned char * tag,size_t len)192 gcm_finish_avx512(struct ossl_gcm_context *ctx, const unsigned char *tag,
193     size_t len)
194 {
195 	unsigned int *res = &ctx->gcm.mres;
196 
197 	/* Finalize AAD processing */
198 	if (ctx->gcm.ares > 0)
199 		res = &ctx->gcm.ares;
200 
201 	ossl_aes_gcm_finalize_avx512(ctx, *res);
202 
203 	ctx->gcm.ares = ctx->gcm.mres = 0;
204 
205 	if (tag != NULL)
206 		return timingsafe_bcmp(ctx->gcm.Xi.c, tag, len);
207 	return 0;
208 }
209 
210 static const struct ossl_aes_gcm_ops gcm_ops_avx512 = {
211 	.init = gcm_init_avx512,
212 	.setiv = gcm_setiv_avx512,
213 	.aad = gcm_aad_avx512,
214 	.encrypt = gcm_encrypt_avx512,
215 	.decrypt = gcm_decrypt_avx512,
216 	.finish = gcm_finish_avx512,
217 	.tag = gcm_tag,
218 };
219 
220 int ossl_aes_gcm_setkey_avx512(const unsigned char *key, int klen, void *_ctx);
221 
222 int
ossl_aes_gcm_setkey_avx512(const unsigned char * key,int klen,void * _ctx)223 ossl_aes_gcm_setkey_avx512(const unsigned char *key, int klen,
224     void *_ctx)
225 {
226 	struct ossl_gcm_context *ctx;
227 
228 	ctx = _ctx;
229 	ctx->ops = &gcm_ops_avx512;
230 	gcm_init(ctx, key, klen);
231 	return (0);
232 }
233