1 // SPDX-License-Identifier: CDDL-1.0
2 /*
3 * CDDL HEADER START
4 *
5 * The contents of this file are subject to the terms of the
6 * Common Development and Distribution License (the "License").
7 * You may not use this file except in compliance with the License.
8 *
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or https://opensource.org/licenses/CDDL-1.0.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
13 *
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
19 *
20 * CDDL HEADER END
21 */
22 /*
23 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
24 */
25
26 /*
27 * AES provider for the Kernel Cryptographic Framework (KCF)
28 */
29
30 #include <sys/zfs_context.h>
31 #include <sys/crypto/common.h>
32 #include <sys/crypto/impl.h>
33 #include <sys/crypto/spi.h>
34 #include <sys/crypto/icp.h>
35 #include <modes/modes.h>
36 #define _AES_IMPL
37 #include <aes/aes_impl.h>
38 #include <modes/gcm_impl.h>
39
40 /*
41 * Mechanism info structure passed to KCF during registration.
42 */
43 static const crypto_mech_info_t aes_mech_info_tab[] = {
44 /* AES_CCM */
45 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
46 CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
47 /* AES_GCM */
48 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
49 CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
50 };
51
52 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
53 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
54
55 static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
56 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
57
58 static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
59 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
60
61 static const crypto_cipher_ops_t aes_cipher_ops = {
62 .encrypt_atomic = aes_encrypt_atomic,
63 .decrypt_atomic = aes_decrypt_atomic
64 };
65
66 static int aes_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
67 crypto_spi_ctx_template_t *, size_t *);
68 static int aes_free_context(crypto_ctx_t *);
69
70 static const crypto_ctx_ops_t aes_ctx_ops = {
71 .create_ctx_template = aes_create_ctx_template,
72 .free_context = aes_free_context
73 };
74
75 static const crypto_ops_t aes_crypto_ops = {
76 &aes_cipher_ops,
77 NULL,
78 &aes_ctx_ops,
79 };
80
81 static const crypto_provider_info_t aes_prov_info = {
82 "AES Software Provider",
83 &aes_crypto_ops,
84 sizeof (aes_mech_info_tab) / sizeof (crypto_mech_info_t),
85 aes_mech_info_tab
86 };
87
88 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
89
90 int
aes_mod_init(void)91 aes_mod_init(void)
92 {
93 /* Determine the fastest available implementation. */
94 aes_impl_init();
95 gcm_impl_init();
96
97 /* Register with KCF. If the registration fails, remove the module. */
98 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle))
99 return (EACCES);
100
101 return (0);
102 }
103
104 int
aes_mod_fini(void)105 aes_mod_fini(void)
106 {
107 /* Unregister from KCF if module is registered */
108 if (aes_prov_handle != 0) {
109 if (crypto_unregister_provider(aes_prov_handle))
110 return (EBUSY);
111
112 aes_prov_handle = 0;
113 }
114
115 return (0);
116 }
117
118 static int
aes_check_mech_param(crypto_mechanism_t * mechanism,aes_ctx_t ** ctx)119 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx)
120 {
121 void *p = NULL;
122 boolean_t param_required = B_TRUE;
123 size_t param_len;
124 void *(*alloc_fun)(int);
125 int rv = CRYPTO_SUCCESS;
126
127 switch (mechanism->cm_type) {
128 case AES_CCM_MECH_INFO_TYPE:
129 param_len = sizeof (CK_AES_CCM_PARAMS);
130 alloc_fun = ccm_alloc_ctx;
131 break;
132 case AES_GCM_MECH_INFO_TYPE:
133 param_len = sizeof (CK_AES_GCM_PARAMS);
134 alloc_fun = gcm_alloc_ctx;
135 break;
136 default:
137 __builtin_unreachable();
138 }
139 if (param_required && mechanism->cm_param != NULL &&
140 mechanism->cm_param_len != param_len) {
141 rv = CRYPTO_MECHANISM_PARAM_INVALID;
142 }
143 if (ctx != NULL) {
144 p = (alloc_fun)(KM_SLEEP);
145 *ctx = p;
146 }
147 return (rv);
148 }
149
150 /*
151 * Initialize key schedules for AES
152 */
153 static int
init_keysched(crypto_key_t * key,void * newbie)154 init_keysched(crypto_key_t *key, void *newbie)
155 {
156 if (key->ck_length < AES_MINBITS ||
157 key->ck_length > AES_MAXBITS) {
158 return (CRYPTO_KEY_SIZE_RANGE);
159 }
160
161 /* key length must be either 128, 192, or 256 */
162 if ((key->ck_length & 63) != 0)
163 return (CRYPTO_KEY_SIZE_RANGE);
164
165 aes_init_keysched(key->ck_data, key->ck_length, newbie);
166 return (CRYPTO_SUCCESS);
167 }
168
169 /*
170 * KCF software provider encrypt entry points.
171 */
172 static int
aes_encrypt_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_spi_ctx_template_t template)173 aes_encrypt_atomic(crypto_mechanism_t *mechanism,
174 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
175 crypto_spi_ctx_template_t template)
176 {
177 aes_ctx_t aes_ctx;
178 off_t saved_offset;
179 size_t saved_length;
180 size_t length_needed;
181 int ret;
182
183 memset(&aes_ctx, 0, sizeof (aes_ctx_t));
184
185 ASSERT(ciphertext != NULL);
186
187 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
188 return (ret);
189
190 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
191 KM_SLEEP, B_TRUE);
192 if (ret != CRYPTO_SUCCESS)
193 return (ret);
194
195 switch (mechanism->cm_type) {
196 case AES_CCM_MECH_INFO_TYPE:
197 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
198 break;
199 case AES_GCM_MECH_INFO_TYPE:
200 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
201 break;
202 default:
203 __builtin_unreachable();
204 }
205
206 /* return size of buffer needed to store output */
207 if (ciphertext->cd_length < length_needed) {
208 ciphertext->cd_length = length_needed;
209 ret = CRYPTO_BUFFER_TOO_SMALL;
210 goto out;
211 }
212
213 saved_offset = ciphertext->cd_offset;
214 saved_length = ciphertext->cd_length;
215
216 /*
217 * Do an update on the specified input data.
218 */
219 switch (plaintext->cd_format) {
220 case CRYPTO_DATA_RAW:
221 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
222 aes_encrypt_contiguous_blocks);
223 break;
224 case CRYPTO_DATA_UIO:
225 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
226 aes_encrypt_contiguous_blocks);
227 break;
228 default:
229 ret = CRYPTO_ARGUMENTS_BAD;
230 }
231
232 if (ret == CRYPTO_SUCCESS) {
233 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
234 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
235 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
236 aes_xor_block);
237 if (ret != CRYPTO_SUCCESS)
238 goto out;
239 ASSERT(aes_ctx.ac_remainder_len == 0);
240 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) {
241 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
242 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
243 aes_copy_block, aes_xor_block);
244 if (ret != CRYPTO_SUCCESS)
245 goto out;
246 ASSERT(aes_ctx.ac_remainder_len == 0);
247 } else {
248 ASSERT(aes_ctx.ac_remainder_len == 0);
249 }
250
251 if (plaintext != ciphertext) {
252 ciphertext->cd_length =
253 ciphertext->cd_offset - saved_offset;
254 }
255 } else {
256 ciphertext->cd_length = saved_length;
257 }
258 ciphertext->cd_offset = saved_offset;
259
260 out:
261 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
262 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
263 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
264 }
265 if (aes_ctx.ac_flags & GCM_MODE) {
266 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx);
267 }
268 return (ret);
269 }
270
271 static int
aes_decrypt_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_spi_ctx_template_t template)272 aes_decrypt_atomic(crypto_mechanism_t *mechanism,
273 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
274 crypto_spi_ctx_template_t template)
275 {
276 aes_ctx_t aes_ctx;
277 off_t saved_offset;
278 size_t saved_length;
279 size_t length_needed;
280 int ret;
281
282 memset(&aes_ctx, 0, sizeof (aes_ctx_t));
283
284 ASSERT(plaintext != NULL);
285
286 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
287 return (ret);
288
289 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
290 KM_SLEEP, B_FALSE);
291 if (ret != CRYPTO_SUCCESS)
292 return (ret);
293
294 switch (mechanism->cm_type) {
295 case AES_CCM_MECH_INFO_TYPE:
296 length_needed = aes_ctx.ac_data_len;
297 break;
298 case AES_GCM_MECH_INFO_TYPE:
299 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
300 break;
301 default:
302 __builtin_unreachable();
303 }
304
305 /* return size of buffer needed to store output */
306 if (plaintext->cd_length < length_needed) {
307 plaintext->cd_length = length_needed;
308 ret = CRYPTO_BUFFER_TOO_SMALL;
309 goto out;
310 }
311
312 saved_offset = plaintext->cd_offset;
313 saved_length = plaintext->cd_length;
314
315 /*
316 * Do an update on the specified input data.
317 */
318 switch (ciphertext->cd_format) {
319 case CRYPTO_DATA_RAW:
320 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
321 aes_decrypt_contiguous_blocks);
322 break;
323 case CRYPTO_DATA_UIO:
324 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
325 aes_decrypt_contiguous_blocks);
326 break;
327 default:
328 ret = CRYPTO_ARGUMENTS_BAD;
329 }
330
331 if (ret == CRYPTO_SUCCESS) {
332 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
333 ASSERT(aes_ctx.ac_processed_data_len
334 == aes_ctx.ac_data_len);
335 ASSERT(aes_ctx.ac_processed_mac_len
336 == aes_ctx.ac_mac_len);
337 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
338 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
339 aes_copy_block, aes_xor_block);
340 ASSERT(aes_ctx.ac_remainder_len == 0);
341 if ((ret == CRYPTO_SUCCESS) &&
342 (ciphertext != plaintext)) {
343 plaintext->cd_length =
344 plaintext->cd_offset - saved_offset;
345 } else {
346 plaintext->cd_length = saved_length;
347 }
348 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) {
349 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
350 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
351 aes_xor_block);
352 ASSERT(aes_ctx.ac_remainder_len == 0);
353 if ((ret == CRYPTO_SUCCESS) &&
354 (ciphertext != plaintext)) {
355 plaintext->cd_length =
356 plaintext->cd_offset - saved_offset;
357 } else {
358 plaintext->cd_length = saved_length;
359 }
360 } else
361 __builtin_unreachable();
362 } else {
363 plaintext->cd_length = saved_length;
364 }
365 plaintext->cd_offset = saved_offset;
366
367 out:
368 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
369 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
370 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
371 }
372
373 if (aes_ctx.ac_flags & CCM_MODE) {
374 if (aes_ctx.ac_pt_buf != NULL) {
375 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
376 }
377 } else if (aes_ctx.ac_flags & GCM_MODE) {
378 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx);
379 }
380
381 return (ret);
382 }
383
384 /*
385 * KCF software provider context template entry points.
386 */
387 static int
aes_create_ctx_template(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t * tmpl,size_t * tmpl_size)388 aes_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
389 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size)
390 {
391 void *keysched;
392 size_t size;
393 int rv;
394
395 if (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
396 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE)
397 return (CRYPTO_MECHANISM_INVALID);
398
399 if ((keysched = aes_alloc_keysched(&size, KM_SLEEP)) == NULL) {
400 return (CRYPTO_HOST_MEMORY);
401 }
402
403 /*
404 * Initialize key schedule. Key length information is stored
405 * in the key.
406 */
407 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
408 memset(keysched, 0, size);
409 kmem_free(keysched, size);
410 return (rv);
411 }
412
413 *tmpl = keysched;
414 *tmpl_size = size;
415
416 return (CRYPTO_SUCCESS);
417 }
418
419
420 static int
aes_free_context(crypto_ctx_t * ctx)421 aes_free_context(crypto_ctx_t *ctx)
422 {
423 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
424
425 if (aes_ctx != NULL) {
426 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
427 ASSERT(aes_ctx->ac_keysched_len != 0);
428 memset(aes_ctx->ac_keysched, 0,
429 aes_ctx->ac_keysched_len);
430 kmem_free(aes_ctx->ac_keysched,
431 aes_ctx->ac_keysched_len);
432 }
433 crypto_free_mode_ctx(aes_ctx);
434 ctx->cc_provider_private = NULL;
435 }
436
437 return (CRYPTO_SUCCESS);
438 }
439
440
441 static int
aes_common_init_ctx(aes_ctx_t * aes_ctx,crypto_spi_ctx_template_t * template,crypto_mechanism_t * mechanism,crypto_key_t * key,int kmflag,boolean_t is_encrypt_init)442 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
443 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
444 boolean_t is_encrypt_init)
445 {
446 int rv = CRYPTO_SUCCESS;
447 void *keysched;
448 size_t size = 0;
449
450 if (template == NULL) {
451 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
452 return (CRYPTO_HOST_MEMORY);
453 /*
454 * Initialize key schedule.
455 * Key length is stored in the key.
456 */
457 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
458 kmem_free(keysched, size);
459 return (rv);
460 }
461
462 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
463 aes_ctx->ac_keysched_len = size;
464 } else {
465 keysched = template;
466 }
467 aes_ctx->ac_keysched = keysched;
468
469 switch (mechanism->cm_type) {
470 case AES_CCM_MECH_INFO_TYPE:
471 if (mechanism->cm_param == NULL ||
472 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
473 return (CRYPTO_MECHANISM_PARAM_INVALID);
474 }
475 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
476 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
477 aes_xor_block);
478 break;
479 case AES_GCM_MECH_INFO_TYPE:
480 if (mechanism->cm_param == NULL ||
481 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
482 return (CRYPTO_MECHANISM_PARAM_INVALID);
483 }
484 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
485 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
486 aes_xor_block);
487 break;
488 }
489
490 if (rv != CRYPTO_SUCCESS) {
491 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
492 memset(keysched, 0, size);
493 kmem_free(keysched, size);
494 }
495 }
496
497 return (rv);
498 }
499