xref: /freebsd/sys/crypto/aesni/aesni_wrap.c (revision 884a2a699669ec61e2366e3e358342dbc94be24a)
1 /*-
2  * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
3  * Copyright (c) 2010 Pawel Jakub Dawidek <pjd@FreeBSD.org>
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #include <sys/cdefs.h>
29 __FBSDID("$FreeBSD$");
30 
31 #include <sys/param.h>
32 #include <sys/libkern.h>
33 #include <sys/malloc.h>
34 #include <sys/proc.h>
35 #include <sys/systm.h>
36 #include <crypto/aesni/aesni.h>
37 
38 MALLOC_DECLARE(M_AESNI);
39 
40 void
41 aesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
42     const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
43 {
44 	const uint8_t *ivp;
45 	size_t i;
46 
47 	len /= AES_BLOCK_LEN;
48 	ivp = iv;
49 	for (i = 0; i < len; i++) {
50 		aesni_enc(rounds - 1, key_schedule, from, to, ivp);
51 		ivp = to;
52 		from += AES_BLOCK_LEN;
53 		to += AES_BLOCK_LEN;
54 	}
55 }
56 
57 void
58 aesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
59     const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
60 {
61 	size_t i;
62 
63 	len /= AES_BLOCK_LEN;
64 	for (i = 0; i < len; i++) {
65 		aesni_enc(rounds - 1, key_schedule, from, to, NULL);
66 		from += AES_BLOCK_LEN;
67 		to += AES_BLOCK_LEN;
68 	}
69 }
70 
71 void
72 aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
73     const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
74 {
75 	size_t i;
76 
77 	len /= AES_BLOCK_LEN;
78 	for (i = 0; i < len; i++) {
79 		aesni_dec(rounds - 1, key_schedule, from, to, NULL);
80 		from += AES_BLOCK_LEN;
81 		to += AES_BLOCK_LEN;
82 	}
83 }
84 
85 #define	AES_XTS_BLOCKSIZE	16
86 #define	AES_XTS_IVSIZE		8
87 #define	AES_XTS_ALPHA		0x87	/* GF(2^128) generator polynomial */
88 
89 static void
90 aesni_crypt_xts_block(int rounds, const void *key_schedule, uint8_t *tweak,
91     const uint8_t *from, uint8_t *to, int do_encrypt)
92 {
93 	uint8_t block[AES_XTS_BLOCKSIZE];
94 	u_int i, carry_in, carry_out;
95 
96 	for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
97 		block[i] = from[i] ^ tweak[i];
98 
99 	if (do_encrypt)
100 		aesni_enc(rounds - 1, key_schedule, block, to, NULL);
101 	else
102 		aesni_dec(rounds - 1, key_schedule, block, to, NULL);
103 
104 	for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
105 		to[i] ^= tweak[i];
106 
107 	/* Exponentiate tweak. */
108 	carry_in = 0;
109 	for (i = 0; i < AES_XTS_BLOCKSIZE; i++) {
110 		carry_out = tweak[i] & 0x80;
111 		tweak[i] = (tweak[i] << 1) | (carry_in ? 1 : 0);
112 		carry_in = carry_out;
113 	}
114 	if (carry_in)
115 		tweak[0] ^= AES_XTS_ALPHA;
116 	bzero(block, sizeof(block));
117 }
118 
119 static void
120 aesni_crypt_xts(int rounds, const void *data_schedule,
121     const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
122     const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
123 {
124 	uint8_t tweak[AES_XTS_BLOCKSIZE];
125 	uint64_t blocknum;
126 	size_t i;
127 
128 	/*
129 	 * Prepare tweak as E_k2(IV). IV is specified as LE representation
130 	 * of a 64-bit block number which we allow to be passed in directly.
131 	 */
132 	bcopy(iv, &blocknum, AES_XTS_IVSIZE);
133 	for (i = 0; i < AES_XTS_IVSIZE; i++) {
134 		tweak[i] = blocknum & 0xff;
135 		blocknum >>= 8;
136 	}
137 	/* Last 64 bits of IV are always zero. */
138 	bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
139 	aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
140 
141 	len /= AES_XTS_BLOCKSIZE;
142 	for (i = 0; i < len; i++) {
143 		aesni_crypt_xts_block(rounds, data_schedule, tweak, from, to,
144 		    do_encrypt);
145 		from += AES_XTS_BLOCKSIZE;
146 		to += AES_XTS_BLOCKSIZE;
147 	}
148 
149 	bzero(tweak, sizeof(tweak));
150 }
151 
152 static void
153 aesni_encrypt_xts(int rounds, const void *data_schedule,
154     const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
155     const uint8_t iv[AES_BLOCK_LEN])
156 {
157 
158 	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
159 	    iv, 1);
160 }
161 
162 static void
163 aesni_decrypt_xts(int rounds, const void *data_schedule,
164     const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
165     const uint8_t iv[AES_BLOCK_LEN])
166 {
167 
168 	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
169 	    iv, 0);
170 }
171 
172 static int
173 aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
174     int keylen)
175 {
176 
177 	switch (ses->algo) {
178 	case CRYPTO_AES_CBC:
179 		switch (keylen) {
180 		case 128:
181 			ses->rounds = AES128_ROUNDS;
182 			break;
183 		case 192:
184 			ses->rounds = AES192_ROUNDS;
185 			break;
186 		case 256:
187 			ses->rounds = AES256_ROUNDS;
188 			break;
189 		default:
190 			return (EINVAL);
191 		}
192 		break;
193 	case CRYPTO_AES_XTS:
194 		switch (keylen) {
195 		case 256:
196 			ses->rounds = AES128_ROUNDS;
197 			break;
198 		case 512:
199 			ses->rounds = AES256_ROUNDS;
200 			break;
201 		default:
202 			return (EINVAL);
203 		}
204 		break;
205 	default:
206 		return (EINVAL);
207 	}
208 
209 	aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
210 	aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
211 	if (ses->algo == CRYPTO_AES_CBC)
212 		arc4rand(ses->iv, sizeof(ses->iv), 0);
213 	else /* if (ses->algo == CRYPTO_AES_XTS) */ {
214 		aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
215 		    ses->rounds);
216 	}
217 
218 	return (0);
219 }
220 
221 int
222 aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
223 {
224 	struct thread *td;
225 	int error, saved_ctx;
226 
227 	td = curthread;
228 	if (!is_fpu_kern_thread(0)) {
229 		error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
230 		saved_ctx = 1;
231 	} else {
232 		error = 0;
233 		saved_ctx = 0;
234 	}
235 	if (error == 0) {
236 		error = aesni_cipher_setup_common(ses, encini->cri_key,
237 		    encini->cri_klen);
238 		if (saved_ctx)
239 			fpu_kern_leave(td, &ses->fpu_ctx);
240 	}
241 	return (error);
242 }
243 
244 int
245 aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
246     struct cryptop *crp)
247 {
248 	struct thread *td;
249 	uint8_t *buf;
250 	int error, allocated, saved_ctx;
251 
252 	buf = aesni_cipher_alloc(enccrd, crp, &allocated);
253 	if (buf == NULL)
254 		return (ENOMEM);
255 
256 	td = curthread;
257 	if (!is_fpu_kern_thread(0)) {
258 		error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
259 		if (error != 0)
260 			goto out;
261 		saved_ctx = 1;
262 	} else {
263 		saved_ctx = 0;
264 		error = 0;
265 	}
266 
267 	if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
268 		error = aesni_cipher_setup_common(ses, enccrd->crd_key,
269 		    enccrd->crd_klen);
270 		if (error != 0)
271 			goto out;
272 	}
273 
274 	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
275 		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
276 			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
277 		if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
278 			crypto_copyback(crp->crp_flags, crp->crp_buf,
279 			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
280 		if (ses->algo == CRYPTO_AES_CBC) {
281 			aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
282 			    enccrd->crd_len, buf, buf, ses->iv);
283 		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
284 			aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
285 			    ses->xts_schedule, enccrd->crd_len, buf, buf,
286 			    ses->iv);
287 		}
288 	} else {
289 		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
290 			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
291 		else
292 			crypto_copydata(crp->crp_flags, crp->crp_buf,
293 			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
294 		if (ses->algo == CRYPTO_AES_CBC) {
295 			aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
296 			    enccrd->crd_len, buf, ses->iv);
297 		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
298 			aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
299 			    ses->xts_schedule, enccrd->crd_len, buf, buf,
300 			    ses->iv);
301 		}
302 	}
303 	if (saved_ctx)
304 		fpu_kern_leave(td, &ses->fpu_ctx);
305 	if (allocated)
306 		crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
307 		    enccrd->crd_len, buf);
308 	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
309 		crypto_copydata(crp->crp_flags, crp->crp_buf,
310 		    enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
311 		    AES_BLOCK_LEN, ses->iv);
312  out:
313 	if (allocated) {
314 		bzero(buf, enccrd->crd_len);
315 		free(buf, M_AESNI);
316 	}
317 	return (error);
318 }
319