xref: /freebsd/sys/crypto/openssl/ossl_aes_gcm.c (revision 5daf8ed625af70ebb7e4740ab98a6054e9e52329)
1 /*
2  * Copyright 2010-2022 The OpenSSL Project Authors. All Rights Reserved.
3  * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
4  * Copyright (c) 2023, Raptor Engineering, LLC. All Rights Reserved.
5  *
6  * Licensed under the Apache License 2.0 (the "License").  You may not use
7  * this file except in compliance with the License.  You can obtain a copy
8  * in the file LICENSE in the source distribution or at
9  * https://www.openssl.org/source/license.html
10  */
11 
12 /*
13  * This file contains an AES-GCM wrapper implementation from OpenSSL, using
14  * AES-NI (x86) or POWER8 Crypto Extensions (ppc). It was ported from
15  * cipher_aes_gcm_hw_aesni.inc and it makes use of a generic C implementation
16  * for partial blocks, ported from gcm128.c with OPENSSL_SMALL_FOOTPRINT defined.
17  */
18 
19 #include <sys/endian.h>
20 #include <sys/systm.h>
21 
22 #include <crypto/openssl/ossl.h>
23 #include <crypto/openssl/ossl_aes_gcm.h>
24 #include <crypto/openssl/ossl_cipher.h>
25 
26 #include <opencrypto/cryptodev.h>
27 
28 _Static_assert(
29     sizeof(struct ossl_gcm_context) <= sizeof(struct ossl_cipher_context),
30     "ossl_gcm_context too large");
31 
32 #if defined(__amd64__) || defined(__i386__)
33 #define	AES_set_encrypt_key	aesni_set_encrypt_key
34 #define	AES_gcm_encrypt	aesni_gcm_encrypt
35 #define	AES_gcm_decrypt	aesni_gcm_decrypt
36 #define	AES_encrypt	aesni_encrypt
37 #define	AES_ctr32_encrypt_blocks	aesni_ctr32_encrypt_blocks
38 #define	GCM_init 	gcm_init_avx
39 #define	GCM_gmult	gcm_gmult_avx
40 #define	GCM_ghash	gcm_ghash_avx
41 
42 void AES_set_encrypt_key(const void *key, int bits, void *ctx);
43 size_t AES_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len,
44     const void *key, unsigned char ivec[16], uint64_t *Xi);
45 size_t AES_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len,
46     const void *key, unsigned char ivec[16], uint64_t *Xi);
47 void AES_encrypt(const unsigned char *in, unsigned char *out, void *ks);
48 void AES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
49     size_t blocks, void *ks, const unsigned char *iv);
50 
51 void GCM_init(__uint128_t Htable[16], uint64_t Xi[2]);
52 void GCM_gmult(uint64_t Xi[2], const __uint128_t Htable[16]);
53 void GCM_ghash(uint64_t Xi[2], const __uint128_t Htable[16], const void *in,
54     size_t len);
55 
56 #elif defined(__powerpc64__)
57 #define	AES_set_encrypt_key	aes_p8_set_encrypt_key
58 #define AES_gcm_encrypt(i,o,l,k,v,x) 	ppc_aes_gcm_crypt(i,o,l,k,v,x,1)
59 #define AES_gcm_decrypt(i,o,l,k,v,x) 	ppc_aes_gcm_crypt(i,o,l,k,v,x,0)
60 #define	AES_encrypt	aes_p8_encrypt
61 #define	AES_ctr32_encrypt_blocks	aes_p8_ctr32_encrypt_blocks
62 #define	GCM_init	gcm_init_p8
63 #define	GCM_gmult	gcm_gmult_p8
64 #define	GCM_ghash	gcm_ghash_p8
65 
66 size_t ppc_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len,
67     const void *key, unsigned char ivec[16], uint64_t *Xi);
68 size_t ppc_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len,
69     const void *key, unsigned char ivec[16], uint64_t *Xi);
70 
71 void AES_set_encrypt_key(const void *key, int bits, void *ctx);
72 void AES_encrypt(const unsigned char *in, unsigned char *out, void *ks);
73 void AES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
74     size_t blocks, void *ks, const unsigned char *iv);
75 
76 void GCM_init(__uint128_t Htable[16], uint64_t Xi[2]);
77 void GCM_gmult(uint64_t Xi[2], const __uint128_t Htable[16]);
78 void GCM_ghash(uint64_t Xi[2], const __uint128_t Htable[16], const void *in,
79     size_t len);
80 
81 static size_t
ppc_aes_gcm_crypt(const unsigned char * in,unsigned char * out,size_t len,const void * key,unsigned char ivec_[16],uint64_t * Xi,int encrypt)82 ppc_aes_gcm_crypt(const unsigned char *in, unsigned char *out,
83     size_t len, const void *key, unsigned char ivec_[16], uint64_t *Xi,
84     int encrypt)
85 {
86 	union {
87 		uint32_t d[4];
88 		uint8_t c[16];
89 	} *ivec = (void *)ivec_;
90 	int s = 0;
91 	int ndone = 0;
92 	int ctr_reset = 0;
93 	uint32_t ivec_val;
94 	uint64_t blocks_unused;
95 	uint64_t nb = len / 16;
96 	uint64_t next_ctr = 0;
97 	unsigned char ctr_saved[12];
98 
99 	memcpy(ctr_saved, ivec, 12);
100 
101 	while (nb) {
102 		ivec_val = ivec->d[3];
103 #if BYTE_ORDER == LITTLE_ENDIAN
104 		ivec_val = bswap32(ivec_val);
105 #endif
106 
107 		blocks_unused = (uint64_t)0xffffffffU + 1 - (uint64_t)ivec_val;
108 		if (nb > blocks_unused) {
109 			len = blocks_unused * 16;
110 			nb -= blocks_unused;
111 			next_ctr = blocks_unused;
112 			ctr_reset = 1;
113 		} else {
114 			len = nb * 16;
115 			next_ctr = nb;
116 			nb = 0;
117 		}
118 
119 		s = encrypt ? ppc_aes_gcm_encrypt(in, out, len, key, ivec->c, Xi) :
120 		    ppc_aes_gcm_decrypt(in, out, len, key, ivec->c, Xi);
121 
122 		/* add counter to ivec */
123 #if BYTE_ORDER == LITTLE_ENDIAN
124 		ivec->d[3] = bswap32(ivec_val + next_ctr);
125 #else
126 		ivec->d[3] += next_ctr;
127 #endif
128 		if (ctr_reset) {
129 			ctr_reset = 0;
130 			in += len;
131 			out += len;
132 		}
133 		memcpy(ivec, ctr_saved, 12);
134 		ndone += s;
135 	}
136 
137 	return ndone;
138 }
139 
140 #else
141 #error "Unsupported architecture!"
142 #endif
143 
144 static void
gcm_init(struct ossl_gcm_context * ctx,const void * key,size_t keylen)145 gcm_init(struct ossl_gcm_context *ctx, const void *key, size_t keylen)
146 {
147 	KASSERT(keylen == 128 || keylen == 192 || keylen == 256,
148 	    ("%s: invalid key length %zu", __func__, keylen));
149 
150 	memset(&ctx->gcm, 0, sizeof(ctx->gcm));
151 	memset(&ctx->aes_ks, 0, sizeof(ctx->aes_ks));
152 	AES_set_encrypt_key(key, keylen, &ctx->aes_ks);
153 	ctx->ops->init(ctx, key, keylen);
154 }
155 
156 static void
gcm_tag_op(struct ossl_gcm_context * ctx,unsigned char * tag,size_t len)157 gcm_tag_op(struct ossl_gcm_context *ctx, unsigned char *tag, size_t len)
158 {
159 	(void)ctx->ops->finish(ctx, NULL, 0);
160 	memcpy(tag, ctx->gcm.Xi.c, len);
161 }
162 
163 static void
gcm_init_op(struct ossl_gcm_context * ctx,const void * key,size_t keylen)164 gcm_init_op(struct ossl_gcm_context *ctx, const void *key, size_t keylen)
165 {
166 	AES_encrypt(ctx->gcm.H.c, ctx->gcm.H.c, &ctx->aes_ks);
167 
168 #if BYTE_ORDER == LITTLE_ENDIAN
169 	ctx->gcm.H.u[0] = bswap64(ctx->gcm.H.u[0]);
170 	ctx->gcm.H.u[1] = bswap64(ctx->gcm.H.u[1]);
171 #endif
172 
173 	GCM_init(ctx->gcm.Htable, ctx->gcm.H.u);
174 }
175 
176 static void
gcm_setiv_op(struct ossl_gcm_context * ctx,const unsigned char * iv,size_t len)177 gcm_setiv_op(struct ossl_gcm_context *ctx, const unsigned char *iv,
178     size_t len)
179 {
180 	uint32_t ctr;
181 
182 	KASSERT(len == AES_GCM_IV_LEN,
183 	    ("%s: invalid IV length %zu", __func__, len));
184 
185 	ctx->gcm.len.u[0] = 0;
186 	ctx->gcm.len.u[1] = 0;
187 	ctx->gcm.ares = ctx->gcm.mres = 0;
188 
189 	memcpy(ctx->gcm.Yi.c, iv, len);
190 	ctx->gcm.Yi.c[12] = 0;
191 	ctx->gcm.Yi.c[13] = 0;
192 	ctx->gcm.Yi.c[14] = 0;
193 	ctx->gcm.Yi.c[15] = 1;
194 	ctr = 1;
195 
196 	ctx->gcm.Xi.u[0] = 0;
197 	ctx->gcm.Xi.u[1] = 0;
198 
199 	AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c, &ctx->aes_ks);
200 	ctr++;
201 
202 #if BYTE_ORDER == LITTLE_ENDIAN
203 	ctx->gcm.Yi.d[3] = bswap32(ctr);
204 #else
205 	ctx->gcm.Yi.d[3] = ctr;
206 #endif
207 }
208 
209 static int
gcm_aad_op(struct ossl_gcm_context * ctx,const unsigned char * aad,size_t len)210 gcm_aad_op(struct ossl_gcm_context *ctx, const unsigned char *aad,
211     size_t len)
212 {
213 	size_t i;
214 	unsigned int n;
215 	uint64_t alen = ctx->gcm.len.u[0];
216 
217 	if (ctx->gcm.len.u[1])
218 		return -2;
219 
220 	alen += len;
221 	if (alen > (1ull << 61) || (sizeof(len) == 8 && alen < len))
222 		return -1;
223 	ctx->gcm.len.u[0] = alen;
224 
225 	n = ctx->gcm.ares;
226 	if (n) {
227 		while (n && len) {
228 			ctx->gcm.Xi.c[n] ^= *(aad++);
229 			--len;
230 			n = (n + 1) % 16;
231 		}
232 		if (n == 0)
233 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
234 		else {
235 			ctx->gcm.ares = n;
236 			return 0;
237 		}
238 	}
239 	if ((i = (len & (size_t)-AES_BLOCK_LEN))) {
240 		GCM_ghash(ctx->gcm.Xi.u, ctx->gcm.Htable, aad, i);
241 		aad += i;
242 		len -= i;
243 	}
244 	if (len) {
245 		n = (unsigned int)len;
246 		for (i = 0; i < len; ++i)
247 			ctx->gcm.Xi.c[i] ^= aad[i];
248 	}
249 
250 	ctx->gcm.ares = n;
251 	return 0;
252 }
253 
254 static int
gcm_encrypt(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)255 gcm_encrypt(struct ossl_gcm_context *ctx, const unsigned char *in,
256     unsigned char *out, size_t len)
257 {
258 	unsigned int n, ctr, mres;
259 	size_t i;
260 	uint64_t mlen = ctx->gcm.len.u[1];
261 
262 	mlen += len;
263 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
264 		return -1;
265 	ctx->gcm.len.u[1] = mlen;
266 
267 	mres = ctx->gcm.mres;
268 
269 	if (ctx->gcm.ares) {
270 		/* First call to encrypt finalizes GHASH(AAD) */
271 		GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
272 		ctx->gcm.ares = 0;
273 	}
274 
275 #if BYTE_ORDER == LITTLE_ENDIAN
276 	ctr = bswap32(ctx->gcm.Yi.d[3]);
277 #else
278 	ctr = ctx->gcm.Yi.d[3];
279 #endif
280 
281 	n = mres % 16;
282 	for (i = 0; i < len; ++i) {
283 		if (n == 0) {
284 			AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c,
285 			    &ctx->aes_ks);
286 			++ctr;
287 #if BYTE_ORDER == LITTLE_ENDIAN
288 			ctx->gcm.Yi.d[3] = bswap32(ctr);
289 #else
290 			ctx->gcm.Yi.d[3] = ctr;
291 #endif
292 		}
293 		ctx->gcm.Xi.c[n] ^= out[i] = in[i] ^ ctx->gcm.EKi.c[n];
294 		mres = n = (n + 1) % 16;
295 		if (n == 0)
296 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
297 	}
298 
299 	ctx->gcm.mres = mres;
300 	return 0;
301 }
302 
303 static int
gcm_encrypt_ctr32(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)304 gcm_encrypt_ctr32(struct ossl_gcm_context *ctx, const unsigned char *in,
305     unsigned char *out, size_t len)
306 {
307 	unsigned int n, ctr, mres;
308 	size_t i;
309 	uint64_t mlen = ctx->gcm.len.u[1];
310 
311 	mlen += len;
312 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
313 		return -1;
314 	ctx->gcm.len.u[1] = mlen;
315 
316 	mres = ctx->gcm.mres;
317 
318 	if (ctx->gcm.ares) {
319 		/* First call to encrypt finalizes GHASH(AAD) */
320 		GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
321 		ctx->gcm.ares = 0;
322 	}
323 
324 #if BYTE_ORDER == LITTLE_ENDIAN
325 	ctr = bswap32(ctx->gcm.Yi.d[3]);
326 #else
327 	ctr = ctx->gcm.Yi.d[3];
328 #endif
329 
330 	n = mres % 16;
331 	if (n) {
332 		while (n && len) {
333 			ctx->gcm.Xi.c[n] ^= *(out++) = *(in++) ^ ctx->gcm.EKi.c[n];
334 			--len;
335 			n = (n + 1) % 16;
336 		}
337 		if (n == 0) {
338 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
339 			mres = 0;
340 		} else {
341 			ctx->gcm.mres = n;
342 			return 0;
343 		}
344 	}
345 	if ((i = (len & (size_t)-16))) {
346 		size_t j = i / 16;
347 
348 		AES_ctr32_encrypt_blocks(in, out, j, &ctx->aes_ks, ctx->gcm.Yi.c);
349 		ctr += (unsigned int)j;
350 #if BYTE_ORDER == LITTLE_ENDIAN
351 		ctx->gcm.Yi.d[3] = bswap32(ctr);
352 #else
353 		ctx->gcm.Yi.d[3] = ctr;
354 #endif
355 		in += i;
356 		len -= i;
357 		while (j--) {
358 			for (i = 0; i < 16; ++i)
359 				ctx->gcm.Xi.c[i] ^= out[i];
360 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
361 			out += 16;
362 		}
363 	}
364 	if (len) {
365 		AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c, &ctx->aes_ks);
366 		++ctr;
367 #if BYTE_ORDER == LITTLE_ENDIAN
368 		ctx->gcm.Yi.d[3] = bswap32(ctr);
369 #else
370 		ctx->gcm.Yi.d[3] = ctr;
371 #endif
372 		while (len--) {
373 			ctx->gcm.Xi.c[mres++] ^= out[n] = in[n] ^ ctx->gcm.EKi.c[n];
374 			++n;
375 		}
376 	}
377 
378 	ctx->gcm.mres = mres;
379 	return 0;
380 }
381 
382 static int
gcm_encrypt_op(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)383 gcm_encrypt_op(struct ossl_gcm_context *ctx, const unsigned char *in,
384     unsigned char *out, size_t len)
385 {
386 	size_t bulk = 0, res;
387 	int error;
388 
389 	res = MIN(len, (AES_BLOCK_LEN - ctx->gcm.mres) % AES_BLOCK_LEN);
390 	if ((error = gcm_encrypt(ctx, in, out, res)) != 0)
391 		return error;
392 
393 	bulk = AES_gcm_encrypt(in + res, out + res, len - res,
394 	    &ctx->aes_ks, ctx->gcm.Yi.c, ctx->gcm.Xi.u);
395 	ctx->gcm.len.u[1] += bulk;
396 	bulk += res;
397 
398 	if ((error = gcm_encrypt_ctr32(ctx, in + bulk, out + bulk,
399 	    len - bulk)) != 0)
400 		return error;
401 
402 	return 0;
403 }
404 
405 static int
gcm_decrypt(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)406 gcm_decrypt(struct ossl_gcm_context *ctx, const unsigned char *in,
407     unsigned char *out, size_t len)
408 {
409 	unsigned int n, ctr, mres;
410 	size_t i;
411 	uint64_t mlen = ctx->gcm.len.u[1];
412 
413 	mlen += len;
414 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
415 		return -1;
416 	ctx->gcm.len.u[1] = mlen;
417 
418 	mres = ctx->gcm.mres;
419 
420 	if (ctx->gcm.ares) {
421 		/* First call to encrypt finalizes GHASH(AAD) */
422 		GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
423 		ctx->gcm.ares = 0;
424 	}
425 
426 #if BYTE_ORDER == LITTLE_ENDIAN
427 	ctr = bswap32(ctx->gcm.Yi.d[3]);
428 #else
429 	ctr = ctx->gcm.Yi.d[3];
430 #endif
431 
432 	n = mres % 16;
433 	for (i = 0; i < len; ++i) {
434 		uint8_t c;
435 		if (n == 0) {
436 			AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c,
437 			    &ctx->aes_ks);
438 			++ctr;
439 #if BYTE_ORDER == LITTLE_ENDIAN
440 			ctx->gcm.Yi.d[3] = bswap32(ctr);
441 #else
442 			ctx->gcm.Yi.d[3] = ctr;
443 #endif
444 		}
445 		c = in[i];
446 		out[i] = c ^ ctx->gcm.EKi.c[n];
447 		ctx->gcm.Xi.c[n] ^= c;
448 		mres = n = (n + 1) % 16;
449 		if (n == 0)
450 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
451 	}
452 
453 	ctx->gcm.mres = mres;
454 	return 0;
455 }
456 
457 static int
gcm_decrypt_ctr32(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)458 gcm_decrypt_ctr32(struct ossl_gcm_context *ctx, const unsigned char *in,
459     unsigned char *out, size_t len)
460 {
461 	unsigned int n, ctr, mres;
462 	size_t i;
463 	uint64_t mlen = ctx->gcm.len.u[1];
464 
465 	mlen += len;
466 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
467 		return -1;
468 	ctx->gcm.len.u[1] = mlen;
469 
470 	mres = ctx->gcm.mres;
471 
472 	if (ctx->gcm.ares) {
473 		/* First call to decrypt finalizes GHASH(AAD) */
474 		GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
475 		ctx->gcm.ares = 0;
476 	}
477 
478 #if BYTE_ORDER == LITTLE_ENDIAN
479 	ctr = bswap32(ctx->gcm.Yi.d[3]);
480 #else
481 	ctr = ctx->gcm.Yi.d[3];
482 #endif
483 
484 	n = mres % 16;
485 	if (n) {
486 		while (n && len) {
487 			uint8_t c = *(in++);
488 			*(out++) = c ^ ctx->gcm.EKi.c[n];
489 			ctx->gcm.Xi.c[n] ^= c;
490 			--len;
491 			n = (n + 1) % 16;
492 		}
493 		if (n == 0) {
494 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
495 			mres = 0;
496 		} else {
497 			ctx->gcm.mres = n;
498 			return 0;
499 		}
500 	}
501 	if ((i = (len & (size_t)-16))) {
502 		size_t j = i / 16;
503 
504 		while (j--) {
505 			size_t k;
506 			for (k = 0; k < 16; ++k)
507 				ctx->gcm.Xi.c[k] ^= in[k];
508 			GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
509 			in += 16;
510 		}
511 		j = i / 16;
512 		in -= i;
513 		AES_ctr32_encrypt_blocks(in, out, j, &ctx->aes_ks, ctx->gcm.Yi.c);
514 		ctr += (unsigned int)j;
515 #if BYTE_ORDER == LITTLE_ENDIAN
516 		ctx->gcm.Yi.d[3] = bswap32(ctr);
517 #else
518 		ctx->gcm.Yi.d[3] = ctr;
519 #endif
520 		out += i;
521 		in += i;
522 		len -= i;
523 	}
524 	if (len) {
525 		AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c, &ctx->aes_ks);
526 		++ctr;
527 #if BYTE_ORDER == LITTLE_ENDIAN
528 		ctx->gcm.Yi.d[3] = bswap32(ctr);
529 #else
530 		ctx->gcm.Yi.d[3] = ctr;
531 #endif
532 		while (len--) {
533 			uint8_t c = in[n];
534 			ctx->gcm.Xi.c[mres++] ^= c;
535 			out[n] = c ^ ctx->gcm.EKi.c[n];
536 			++n;
537 		}
538 	}
539 
540 	ctx->gcm.mres = mres;
541 	return 0;
542 }
543 
544 static int
gcm_decrypt_op(struct ossl_gcm_context * ctx,const unsigned char * in,unsigned char * out,size_t len)545 gcm_decrypt_op(struct ossl_gcm_context *ctx, const unsigned char *in,
546     unsigned char *out, size_t len)
547 {
548 	size_t bulk = 0, res;
549 	int error;
550 
551 	res = MIN(len, (AES_BLOCK_LEN - ctx->gcm.mres) % AES_BLOCK_LEN);
552 	if ((error = gcm_decrypt(ctx, in, out, res)) != 0)
553 		return error;
554 
555 	bulk = AES_gcm_decrypt(in + res, out + res, len - res, &ctx->aes_ks,
556 	    ctx->gcm.Yi.c, ctx->gcm.Xi.u);
557 	ctx->gcm.len.u[1] += bulk;
558 	bulk += res;
559 
560 	if ((error = gcm_decrypt_ctr32(ctx, in + bulk, out + bulk, len - bulk)) != 0)
561 		return error;
562 
563 	return 0;
564 }
565 
566 static int
gcm_finish_op(struct ossl_gcm_context * ctx,const unsigned char * tag,size_t len)567 gcm_finish_op(struct ossl_gcm_context *ctx, const unsigned char *tag,
568     size_t len)
569 {
570 	uint64_t alen = ctx->gcm.len.u[0] << 3;
571 	uint64_t clen = ctx->gcm.len.u[1] << 3;
572 
573 	if (ctx->gcm.mres || ctx->gcm.ares)
574 		GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
575 
576 #if BYTE_ORDER == LITTLE_ENDIAN
577 	alen = bswap64(alen);
578 	clen = bswap64(clen);
579 #endif
580 
581 	ctx->gcm.Xi.u[0] ^= alen;
582 	ctx->gcm.Xi.u[1] ^= clen;
583 	GCM_gmult(ctx->gcm.Xi.u, ctx->gcm.Htable);
584 
585 	ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0];
586 	ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1];
587 
588 	if (tag != NULL)
589 		return timingsafe_bcmp(ctx->gcm.Xi.c, tag, len);
590 	return 0;
591 }
592 
593 static const struct ossl_aes_gcm_ops gcm_ops = {
594 	.init = gcm_init_op,
595 	.setiv = gcm_setiv_op,
596 	.aad = gcm_aad_op,
597 	.encrypt = gcm_encrypt_op,
598 	.decrypt = gcm_decrypt_op,
599 	.finish = gcm_finish_op,
600 	.tag = gcm_tag_op,
601 };
602 
603 int ossl_aes_gcm_setkey(const unsigned char *key, int klen, void *_ctx);
604 
605 int
ossl_aes_gcm_setkey(const unsigned char * key,int klen,void * _ctx)606 ossl_aes_gcm_setkey(const unsigned char *key, int klen,
607     void *_ctx)
608 {
609 	struct ossl_gcm_context *ctx;
610 
611 	ctx = _ctx;
612 	ctx->ops = &gcm_ops;
613 	gcm_init(ctx, key, klen);
614 	return (0);
615 }
616