xref: /freebsd/sys/crypto/openssl/arm/ossl_aes_gcm.c (revision 1da7f3f6f72b2245e458fc7195733268ae4a1136)
1 /*
2  * Copyright 2010-2022 The OpenSSL Project Authors. All Rights Reserved.
3  *
4  * Licensed under the Apache License 2.0 (the "License").  You may not use
5  * this file except in compliance with the License.  You can obtain a copy
6  * in the file LICENSE in the source distribution or at
7  * https://www.openssl.org/source/license.html
8  */
9 
10 #include <sys/types.h>
11 #include <sys/endian.h>
12 #include <sys/systm.h>
13 
14 #include <crypto/openssl/ossl.h>
15 #include <crypto/openssl/ossl_arm.h>
16 #include <crypto/openssl/ossl_aes_gcm.h>
17 #include <crypto/openssl/ossl_cipher.h>
18 #include <crypto/openssl/arm/arm_arch.h>
19 
20 #include <opencrypto/cryptodev.h>
21 
22 _Static_assert(
23     sizeof(struct ossl_gcm_context) <= sizeof(struct ossl_cipher_context),
24     "ossl_gcm_context too large");
25 
26 void AES_encrypt(const void *in, void *out, const void *ks);
27 void AES_set_encrypt_key(const void *key, int keylen, void *ks);
28 
29 void gcm_init_neon(__uint128_t Htable[16], const uint64_t Xi[2]);
30 void gcm_gmult_neon(uint64_t Xi[2], const __uint128_t Htable[16]);
31 void gcm_ghash_neon(uint64_t Xi[2], const __uint128_t Htable[16],
32     const void *in, size_t len);
33 
34 void ossl_bsaes_ctr32_encrypt_blocks(const unsigned char *in,
35     unsigned char *out, size_t blocks, void *ks, const unsigned char *iv);
36 
37 static void
38 gcm_init(struct ossl_gcm_context *ctx, const void *key, size_t keylen)
39 {
40 	memset(&ctx->gcm, 0, sizeof(ctx->gcm));
41 	memset(&ctx->aes_ks, 0, sizeof(ctx->aes_ks));
42 
43 	AES_set_encrypt_key(key, keylen, &ctx->aes_ks);
44 	AES_encrypt(ctx->gcm.H.c, ctx->gcm.H.c, &ctx->aes_ks);
45 
46 #if BYTE_ORDER == LITTLE_ENDIAN
47 	ctx->gcm.H.u[0] = bswap64(ctx->gcm.H.u[0]);
48 	ctx->gcm.H.u[1] = bswap64(ctx->gcm.H.u[1]);
49 #endif
50 
51 	gcm_init_neon(ctx->gcm.Htable, ctx->gcm.H.u);
52 }
53 
54 static void
55 gcm_setiv(struct ossl_gcm_context *ctx, const unsigned char *iv, size_t len)
56 {
57 	uint32_t ctr;
58 
59 	KASSERT(len == AES_GCM_IV_LEN,
60 	    ("%s: invalid IV length %zu", __func__, len));
61 
62 	ctx->gcm.len.u[0] = 0;
63 	ctx->gcm.len.u[1] = 0;
64 	ctx->gcm.ares = ctx->gcm.mres = 0;
65 
66 	memcpy(ctx->gcm.Yi.c, iv, len);
67 	ctx->gcm.Yi.c[12] = 0;
68 	ctx->gcm.Yi.c[13] = 0;
69 	ctx->gcm.Yi.c[14] = 0;
70 	ctx->gcm.Yi.c[15] = 1;
71 	ctr = 1;
72 
73 	ctx->gcm.Xi.u[0] = 0;
74 	ctx->gcm.Xi.u[1] = 0;
75 
76 	AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c, &ctx->aes_ks);
77 	ctr++;
78 
79 #if BYTE_ORDER == LITTLE_ENDIAN
80 	ctx->gcm.Yi.d[3] = bswap32(ctr);
81 #else
82 	ctx->gcm.Yi.d[3] = ctr;
83 #endif
84 }
85 
86 static int
87 gcm_finish(struct ossl_gcm_context *ctx, const unsigned char *tag, size_t len)
88 {
89 	uint64_t alen = ctx->gcm.len.u[0] << 3;
90 	uint64_t clen = ctx->gcm.len.u[1] << 3;
91 
92 	if (ctx->gcm.mres || ctx->gcm.ares)
93 		gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
94 
95 #if BYTE_ORDER == LITTLE_ENDIAN
96 	alen = bswap64(alen);
97 	clen = bswap64(clen);
98 #endif
99 
100 	ctx->gcm.Xi.u[0] ^= alen;
101 	ctx->gcm.Xi.u[1] ^= clen;
102 	gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
103 
104 	ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0];
105 	ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1];
106 
107 	if (tag != NULL)
108 		return timingsafe_bcmp(ctx->gcm.Xi.c, tag, len);
109 	return 0;
110 }
111 
112 static int
113 gcm_aad(struct ossl_gcm_context *ctx, const unsigned char *aad, size_t len)
114 {
115 	size_t i;
116 	unsigned int n;
117 	uint64_t alen = ctx->gcm.len.u[0];
118 
119 	if (ctx->gcm.len.u[1])
120 		return -2;
121 
122 	alen += len;
123 	if (alen > ((uint64_t)1 << 61) || (sizeof(len) == 8 && alen < len))
124 		return -1;
125 	ctx->gcm.len.u[0] = alen;
126 
127 	n = ctx->gcm.ares;
128 	if (n) {
129 		while (n && len) {
130 			ctx->gcm.Xi.c[n] ^= *(aad++);
131 			--len;
132 			n = (n + 1) % 16;
133 		}
134 		if (n == 0)
135 			gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
136 		else {
137 			ctx->gcm.ares = n;
138 			return 0;
139 		}
140 	}
141 	if ((i = (len & (size_t)-AES_BLOCK_LEN))) {
142 		gcm_ghash_neon(ctx->gcm.Xi.u, ctx->gcm.Htable, aad, i);
143 		aad += i;
144 		len -= i;
145 	}
146 	if (len) {
147 		n = (unsigned int)len;
148 		for (i = 0; i < len; ++i)
149 			ctx->gcm.Xi.c[i] ^= aad[i];
150 	}
151 
152 	ctx->gcm.ares = n;
153 	return 0;
154 }
155 
156 static int
157 gcm_encrypt(struct ossl_gcm_context *ctx, const unsigned char *in,
158     unsigned char *out, size_t len)
159 {
160 	struct bsaes_key bsks;
161 	unsigned int n, ctr, mres;
162 	size_t i;
163 	uint64_t mlen = ctx->gcm.len.u[1];
164 
165 	mlen += len;
166 	if (mlen > (((uint64_t)1 << 36) - 32) ||
167 	    (sizeof(len) == 8 && mlen < len))
168 		return -1;
169 	ctx->gcm.len.u[1] = mlen;
170 
171 	mres = ctx->gcm.mres;
172 
173 	if (ctx->gcm.ares) {
174 		/* First call to encrypt finalizes GHASH(AAD) */
175 		gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
176 		ctx->gcm.ares = 0;
177 	}
178 
179 #if BYTE_ORDER == LITTLE_ENDIAN
180 	ctr = bswap32(ctx->gcm.Yi.d[3]);
181 #else
182 	ctr = ctx->gcm.Yi.d[3];
183 #endif
184 
185 	n = mres % 16;
186 	if (n) {
187 		while (n && len) {
188 			ctx->gcm.Xi.c[n] ^= *(out++) = *(in++) ^ ctx->gcm.EKi.c[n];
189 			--len;
190 			n = (n + 1) % 16;
191 		}
192 		if (n == 0) {
193 			gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
194 			mres = 0;
195 		} else {
196 			ctx->gcm.mres = n;
197 			return 0;
198 		}
199 	}
200 	if ((i = (len & (size_t)-16))) {
201 		size_t j = i / 16;
202 
203 		memcpy(&bsks.ks, &ctx->aes_ks, sizeof(bsks.ks));
204 		bsks.converted = 0;
205 		ossl_bsaes_ctr32_encrypt_blocks(in, out, j, &bsks,
206 		    ctx->gcm.Yi.c);
207 		ctr += (unsigned int)j;
208 #if BYTE_ORDER == LITTLE_ENDIAN
209 		ctx->gcm.Yi.d[3] = bswap32(ctr);
210 #else
211 		ctx->gcm.Yi.d[3] = ctr;
212 #endif
213 		in += i;
214 		len -= i;
215 		while (j--) {
216 			for (i = 0; i < 16; ++i)
217 				ctx->gcm.Xi.c[i] ^= out[i];
218 			gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
219 			out += 16;
220 		}
221 	}
222 	if (len) {
223 		AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c, &ctx->aes_ks);
224 		++ctr;
225 #if BYTE_ORDER == LITTLE_ENDIAN
226 		ctx->gcm.Yi.d[3] = bswap32(ctr);
227 #else
228 		ctx->gcm.Yi.d[3] = ctr;
229 #endif
230 		while (len--) {
231 			ctx->gcm.Xi.c[mres++] ^= out[n] = in[n] ^ ctx->gcm.EKi.c[n];
232 			++n;
233 		}
234 	}
235 
236 	ctx->gcm.mres = mres;
237 	return 0;
238 }
239 
240 static int
241 gcm_decrypt(struct ossl_gcm_context *ctx, const unsigned char *in,
242     unsigned char *out, size_t len)
243 {
244 	struct bsaes_key bsks;
245 	unsigned int n, ctr, mres;
246 	size_t i;
247 	uint64_t mlen = ctx->gcm.len.u[1];
248 
249 	mlen += len;
250 	if (mlen > ((1ull << 36) - 32) || (sizeof(len) == 8 && mlen < len))
251 		return -1;
252 	ctx->gcm.len.u[1] = mlen;
253 
254 	mres = ctx->gcm.mres;
255 
256 	if (ctx->gcm.ares) {
257 		/* First call to decrypt finalizes GHASH(AAD) */
258 		gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
259 		ctx->gcm.ares = 0;
260 	}
261 
262 #if BYTE_ORDER == LITTLE_ENDIAN
263 	ctr = bswap32(ctx->gcm.Yi.d[3]);
264 #else
265 	ctr = ctx->gcm.Yi.d[3];
266 #endif
267 
268 	n = mres % 16;
269 	if (n) {
270 		while (n && len) {
271 			uint8_t c = *(in++);
272 			*(out++) = c ^ ctx->gcm.EKi.c[n];
273 			ctx->gcm.Xi.c[n] ^= c;
274 			--len;
275 			n = (n + 1) % 16;
276 		}
277 		if (n == 0) {
278 			gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
279 			mres = 0;
280 		} else {
281 			ctx->gcm.mres = n;
282 			return 0;
283 		}
284 	}
285 	if ((i = (len & (size_t)-16))) {
286 		size_t j = i / 16;
287 
288 		while (j--) {
289 			size_t k;
290 			for (k = 0; k < 16; ++k)
291 				ctx->gcm.Xi.c[k] ^= in[k];
292 			gcm_gmult_neon(ctx->gcm.Xi.u, ctx->gcm.Htable);
293 			in += 16;
294 		}
295 		j = i / 16;
296 		in -= i;
297 		memcpy(&bsks.ks, &ctx->aes_ks, sizeof(bsks.ks));
298 		bsks.converted = 0;
299 		ossl_bsaes_ctr32_encrypt_blocks(in, out, j, &bsks,
300 		    ctx->gcm.Yi.c);
301 		ctr += (unsigned int)j;
302 #if BYTE_ORDER == LITTLE_ENDIAN
303 		ctx->gcm.Yi.d[3] = bswap32(ctr);
304 #else
305 		ctx->gcm.Yi.d[3] = ctr;
306 #endif
307 		out += i;
308 		in += i;
309 		len -= i;
310 	}
311 	if (len) {
312 		AES_encrypt(ctx->gcm.Yi.c, ctx->gcm.EKi.c, &ctx->aes_ks);
313 		++ctr;
314 #if BYTE_ORDER == LITTLE_ENDIAN
315 		ctx->gcm.Yi.d[3] = bswap32(ctr);
316 #else
317 		ctx->gcm.Yi.d[3] = ctr;
318 #endif
319 		while (len--) {
320 			uint8_t c = in[n];
321 			ctx->gcm.Xi.c[mres++] ^= c;
322 			out[n] = c ^ ctx->gcm.EKi.c[n];
323 			++n;
324 		}
325 	}
326 
327 	ctx->gcm.mres = mres;
328 	return 0;
329 }
330 
331 static void
332 gcm_tag(struct ossl_gcm_context *ctx, unsigned char *tag, size_t len)
333 {
334 	gcm_finish(ctx, NULL, 0);
335 	memcpy(tag, ctx->gcm.Xi.c, len);
336 }
337 
338 static const struct ossl_aes_gcm_ops gcm_ops_neon = {
339 	.init = gcm_init,
340 	.setiv = gcm_setiv,
341 	.aad = gcm_aad,
342 	.encrypt = gcm_encrypt,
343 	.decrypt = gcm_decrypt,
344 	.finish = gcm_finish,
345 	.tag = gcm_tag,
346 };
347 
348 int ossl_aes_gcm_setkey(const unsigned char *key, int klen, void *_ctx);
349 
350 int
351 ossl_aes_gcm_setkey(const unsigned char *key, int klen, void *_ctx)
352 {
353 	struct ossl_gcm_context *ctx;
354 
355 	ctx = _ctx;
356 	ctx->ops = &gcm_ops_neon;
357 	gcm_init(ctx, key, klen);
358 	return (0);
359 }
360