1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or https://opensource.org/licenses/CDDL-1.0.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #define _AES_IMPL
36 #include <aes/aes_impl.h>
37 #include <modes/gcm_impl.h>
38
39 /*
40 * Mechanism info structure passed to KCF during registration.
41 */
42 static const crypto_mech_info_t aes_mech_info_tab[] = {
43 /* AES_CCM */
44 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
45 CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
46 /* AES_GCM */
47 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
48 CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
49 };
50
51 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
52 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
53
54 static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
55 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
56
57 static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
58 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
59
60 static const crypto_cipher_ops_t aes_cipher_ops = {
61 .encrypt_atomic = aes_encrypt_atomic,
62 .decrypt_atomic = aes_decrypt_atomic
63 };
64
65 static int aes_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
66 crypto_spi_ctx_template_t *, size_t *);
67 static int aes_free_context(crypto_ctx_t *);
68
69 static const crypto_ctx_ops_t aes_ctx_ops = {
70 .create_ctx_template = aes_create_ctx_template,
71 .free_context = aes_free_context
72 };
73
74 static const crypto_ops_t aes_crypto_ops = {
75 &aes_cipher_ops,
76 NULL,
77 &aes_ctx_ops,
78 };
79
80 static const crypto_provider_info_t aes_prov_info = {
81 "AES Software Provider",
82 &aes_crypto_ops,
83 sizeof (aes_mech_info_tab) / sizeof (crypto_mech_info_t),
84 aes_mech_info_tab
85 };
86
87 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
88
89 int
aes_mod_init(void)90 aes_mod_init(void)
91 {
92 /* Determine the fastest available implementation. */
93 aes_impl_init();
94 gcm_impl_init();
95
96 /* Register with KCF. If the registration fails, remove the module. */
97 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle))
98 return (EACCES);
99
100 return (0);
101 }
102
103 int
aes_mod_fini(void)104 aes_mod_fini(void)
105 {
106 /* Unregister from KCF if module is registered */
107 if (aes_prov_handle != 0) {
108 if (crypto_unregister_provider(aes_prov_handle))
109 return (EBUSY);
110
111 aes_prov_handle = 0;
112 }
113
114 return (0);
115 }
116
117 static int
aes_check_mech_param(crypto_mechanism_t * mechanism,aes_ctx_t ** ctx)118 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx)
119 {
120 void *p = NULL;
121 boolean_t param_required = B_TRUE;
122 size_t param_len;
123 void *(*alloc_fun)(int);
124 int rv = CRYPTO_SUCCESS;
125
126 switch (mechanism->cm_type) {
127 case AES_CCM_MECH_INFO_TYPE:
128 param_len = sizeof (CK_AES_CCM_PARAMS);
129 alloc_fun = ccm_alloc_ctx;
130 break;
131 case AES_GCM_MECH_INFO_TYPE:
132 param_len = sizeof (CK_AES_GCM_PARAMS);
133 alloc_fun = gcm_alloc_ctx;
134 break;
135 default:
136 __builtin_unreachable();
137 }
138 if (param_required && mechanism->cm_param != NULL &&
139 mechanism->cm_param_len != param_len) {
140 rv = CRYPTO_MECHANISM_PARAM_INVALID;
141 }
142 if (ctx != NULL) {
143 p = (alloc_fun)(KM_SLEEP);
144 *ctx = p;
145 }
146 return (rv);
147 }
148
149 /*
150 * Initialize key schedules for AES
151 */
152 static int
init_keysched(crypto_key_t * key,void * newbie)153 init_keysched(crypto_key_t *key, void *newbie)
154 {
155 if (key->ck_length < AES_MINBITS ||
156 key->ck_length > AES_MAXBITS) {
157 return (CRYPTO_KEY_SIZE_RANGE);
158 }
159
160 /* key length must be either 128, 192, or 256 */
161 if ((key->ck_length & 63) != 0)
162 return (CRYPTO_KEY_SIZE_RANGE);
163
164 aes_init_keysched(key->ck_data, key->ck_length, newbie);
165 return (CRYPTO_SUCCESS);
166 }
167
168 /*
169 * KCF software provider encrypt entry points.
170 */
171 static int
aes_encrypt_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_spi_ctx_template_t template)172 aes_encrypt_atomic(crypto_mechanism_t *mechanism,
173 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
174 crypto_spi_ctx_template_t template)
175 {
176 aes_ctx_t aes_ctx;
177 off_t saved_offset;
178 size_t saved_length;
179 size_t length_needed;
180 int ret;
181
182 memset(&aes_ctx, 0, sizeof (aes_ctx_t));
183
184 ASSERT(ciphertext != NULL);
185
186 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
187 return (ret);
188
189 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
190 KM_SLEEP, B_TRUE);
191 if (ret != CRYPTO_SUCCESS)
192 return (ret);
193
194 switch (mechanism->cm_type) {
195 case AES_CCM_MECH_INFO_TYPE:
196 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
197 break;
198 case AES_GCM_MECH_INFO_TYPE:
199 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
200 break;
201 default:
202 __builtin_unreachable();
203 }
204
205 /* return size of buffer needed to store output */
206 if (ciphertext->cd_length < length_needed) {
207 ciphertext->cd_length = length_needed;
208 ret = CRYPTO_BUFFER_TOO_SMALL;
209 goto out;
210 }
211
212 saved_offset = ciphertext->cd_offset;
213 saved_length = ciphertext->cd_length;
214
215 /*
216 * Do an update on the specified input data.
217 */
218 switch (plaintext->cd_format) {
219 case CRYPTO_DATA_RAW:
220 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
221 aes_encrypt_contiguous_blocks);
222 break;
223 case CRYPTO_DATA_UIO:
224 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
225 aes_encrypt_contiguous_blocks);
226 break;
227 default:
228 ret = CRYPTO_ARGUMENTS_BAD;
229 }
230
231 if (ret == CRYPTO_SUCCESS) {
232 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
233 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
234 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
235 aes_xor_block);
236 if (ret != CRYPTO_SUCCESS)
237 goto out;
238 ASSERT(aes_ctx.ac_remainder_len == 0);
239 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) {
240 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
241 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
242 aes_copy_block, aes_xor_block);
243 if (ret != CRYPTO_SUCCESS)
244 goto out;
245 ASSERT(aes_ctx.ac_remainder_len == 0);
246 } else {
247 ASSERT(aes_ctx.ac_remainder_len == 0);
248 }
249
250 if (plaintext != ciphertext) {
251 ciphertext->cd_length =
252 ciphertext->cd_offset - saved_offset;
253 }
254 } else {
255 ciphertext->cd_length = saved_length;
256 }
257 ciphertext->cd_offset = saved_offset;
258
259 out:
260 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
261 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
262 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
263 }
264 if (aes_ctx.ac_flags & GCM_MODE) {
265 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx);
266 }
267 return (ret);
268 }
269
270 static int
aes_decrypt_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_spi_ctx_template_t template)271 aes_decrypt_atomic(crypto_mechanism_t *mechanism,
272 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
273 crypto_spi_ctx_template_t template)
274 {
275 aes_ctx_t aes_ctx;
276 off_t saved_offset;
277 size_t saved_length;
278 size_t length_needed;
279 int ret;
280
281 memset(&aes_ctx, 0, sizeof (aes_ctx_t));
282
283 ASSERT(plaintext != NULL);
284
285 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
286 return (ret);
287
288 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
289 KM_SLEEP, B_FALSE);
290 if (ret != CRYPTO_SUCCESS)
291 return (ret);
292
293 switch (mechanism->cm_type) {
294 case AES_CCM_MECH_INFO_TYPE:
295 length_needed = aes_ctx.ac_data_len;
296 break;
297 case AES_GCM_MECH_INFO_TYPE:
298 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
299 break;
300 default:
301 __builtin_unreachable();
302 }
303
304 /* return size of buffer needed to store output */
305 if (plaintext->cd_length < length_needed) {
306 plaintext->cd_length = length_needed;
307 ret = CRYPTO_BUFFER_TOO_SMALL;
308 goto out;
309 }
310
311 saved_offset = plaintext->cd_offset;
312 saved_length = plaintext->cd_length;
313
314 /*
315 * Do an update on the specified input data.
316 */
317 switch (ciphertext->cd_format) {
318 case CRYPTO_DATA_RAW:
319 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
320 aes_decrypt_contiguous_blocks);
321 break;
322 case CRYPTO_DATA_UIO:
323 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
324 aes_decrypt_contiguous_blocks);
325 break;
326 default:
327 ret = CRYPTO_ARGUMENTS_BAD;
328 }
329
330 if (ret == CRYPTO_SUCCESS) {
331 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
332 ASSERT(aes_ctx.ac_processed_data_len
333 == aes_ctx.ac_data_len);
334 ASSERT(aes_ctx.ac_processed_mac_len
335 == aes_ctx.ac_mac_len);
336 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
337 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
338 aes_copy_block, aes_xor_block);
339 ASSERT(aes_ctx.ac_remainder_len == 0);
340 if ((ret == CRYPTO_SUCCESS) &&
341 (ciphertext != plaintext)) {
342 plaintext->cd_length =
343 plaintext->cd_offset - saved_offset;
344 } else {
345 plaintext->cd_length = saved_length;
346 }
347 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) {
348 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
349 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
350 aes_xor_block);
351 ASSERT(aes_ctx.ac_remainder_len == 0);
352 if ((ret == CRYPTO_SUCCESS) &&
353 (ciphertext != plaintext)) {
354 plaintext->cd_length =
355 plaintext->cd_offset - saved_offset;
356 } else {
357 plaintext->cd_length = saved_length;
358 }
359 } else
360 __builtin_unreachable();
361 } else {
362 plaintext->cd_length = saved_length;
363 }
364 plaintext->cd_offset = saved_offset;
365
366 out:
367 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
368 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
369 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
370 }
371
372 if (aes_ctx.ac_flags & CCM_MODE) {
373 if (aes_ctx.ac_pt_buf != NULL) {
374 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
375 }
376 } else if (aes_ctx.ac_flags & GCM_MODE) {
377 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx);
378 }
379
380 return (ret);
381 }
382
383 /*
384 * KCF software provider context template entry points.
385 */
386 static int
aes_create_ctx_template(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t * tmpl,size_t * tmpl_size)387 aes_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
388 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size)
389 {
390 void *keysched;
391 size_t size;
392 int rv;
393
394 if (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
395 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE)
396 return (CRYPTO_MECHANISM_INVALID);
397
398 if ((keysched = aes_alloc_keysched(&size, KM_SLEEP)) == NULL) {
399 return (CRYPTO_HOST_MEMORY);
400 }
401
402 /*
403 * Initialize key schedule. Key length information is stored
404 * in the key.
405 */
406 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
407 memset(keysched, 0, size);
408 kmem_free(keysched, size);
409 return (rv);
410 }
411
412 *tmpl = keysched;
413 *tmpl_size = size;
414
415 return (CRYPTO_SUCCESS);
416 }
417
418
419 static int
aes_free_context(crypto_ctx_t * ctx)420 aes_free_context(crypto_ctx_t *ctx)
421 {
422 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
423
424 if (aes_ctx != NULL) {
425 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
426 ASSERT(aes_ctx->ac_keysched_len != 0);
427 memset(aes_ctx->ac_keysched, 0,
428 aes_ctx->ac_keysched_len);
429 kmem_free(aes_ctx->ac_keysched,
430 aes_ctx->ac_keysched_len);
431 }
432 crypto_free_mode_ctx(aes_ctx);
433 ctx->cc_provider_private = NULL;
434 }
435
436 return (CRYPTO_SUCCESS);
437 }
438
439
440 static int
aes_common_init_ctx(aes_ctx_t * aes_ctx,crypto_spi_ctx_template_t * template,crypto_mechanism_t * mechanism,crypto_key_t * key,int kmflag,boolean_t is_encrypt_init)441 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
442 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
443 boolean_t is_encrypt_init)
444 {
445 int rv = CRYPTO_SUCCESS;
446 void *keysched;
447 size_t size = 0;
448
449 if (template == NULL) {
450 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
451 return (CRYPTO_HOST_MEMORY);
452 /*
453 * Initialize key schedule.
454 * Key length is stored in the key.
455 */
456 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
457 kmem_free(keysched, size);
458 return (rv);
459 }
460
461 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
462 aes_ctx->ac_keysched_len = size;
463 } else {
464 keysched = template;
465 }
466 aes_ctx->ac_keysched = keysched;
467
468 switch (mechanism->cm_type) {
469 case AES_CCM_MECH_INFO_TYPE:
470 if (mechanism->cm_param == NULL ||
471 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
472 return (CRYPTO_MECHANISM_PARAM_INVALID);
473 }
474 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
475 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
476 aes_xor_block);
477 break;
478 case AES_GCM_MECH_INFO_TYPE:
479 if (mechanism->cm_param == NULL ||
480 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
481 return (CRYPTO_MECHANISM_PARAM_INVALID);
482 }
483 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
484 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
485 aes_xor_block);
486 break;
487 }
488
489 if (rv != CRYPTO_SUCCESS) {
490 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
491 memset(keysched, 0, size);
492 kmem_free(keysched, size);
493 }
494 }
495
496 return (rv);
497 }
498