xref: /freebsd/sys/contrib/openzfs/module/icp/io/sha2_mod.c (revision dd41de95a84d979615a2ef11df6850622bf6184e)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/zfs_context.h>
28 #include <sys/modctl.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/crypto/icp.h>
32 #define	_SHA2_IMPL
33 #include <sys/sha2.h>
34 #include <sha2/sha2_impl.h>
35 
36 /*
37  * The sha2 module is created with two modlinkages:
38  * - a modlmisc that allows consumers to directly call the entry points
39  *   SHA2Init, SHA2Update, and SHA2Final.
40  * - a modlcrypto that allows the module to register with the Kernel
41  *   Cryptographic Framework (KCF) as a software provider for the SHA2
42  *   mechanisms.
43  */
44 
45 static struct modlcrypto modlcrypto = {
46 	&mod_cryptoops,
47 	"SHA2 Kernel SW Provider"
48 };
49 
50 static struct modlinkage modlinkage = {
51 	MODREV_1, {&modlcrypto, NULL}
52 };
53 
54 /*
55  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
56  * by KCF to one of the entry points.
57  */
58 
59 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
60 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
61 
62 /* to extract the digest length passed as mechanism parameter */
63 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
64 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
65 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
66 	else {								\
67 		ulong_t tmp_ulong;					\
68 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
69 		(len) = (uint32_t)tmp_ulong;				\
70 	}								\
71 }
72 
73 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
74 	SHA2Init(mech, ctx);				\
75 	SHA2Update(ctx, key, len);			\
76 	SHA2Final(digest, ctx);				\
77 }
78 
79 /*
80  * Mechanism info structure passed to KCF during registration.
81  */
82 static crypto_mech_info_t sha2_mech_info_tab[] = {
83 	/* SHA256 */
84 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
85 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
86 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
87 	/* SHA256-HMAC */
88 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
89 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
90 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
91 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 	/* SHA256-HMAC GENERAL */
93 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
94 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
95 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
96 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
97 	/* SHA384 */
98 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
99 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
100 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
101 	/* SHA384-HMAC */
102 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
103 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
104 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
105 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
106 	/* SHA384-HMAC GENERAL */
107 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
108 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
109 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
110 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
111 	/* SHA512 */
112 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
113 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
114 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
115 	/* SHA512-HMAC */
116 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
117 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
118 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
119 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
120 	/* SHA512-HMAC GENERAL */
121 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
122 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
123 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
124 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
125 };
126 
127 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
128 
129 static crypto_control_ops_t sha2_control_ops = {
130 	sha2_provider_status
131 };
132 
133 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
134     crypto_req_handle_t);
135 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
136     crypto_req_handle_t);
137 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
138     crypto_req_handle_t);
139 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
140     crypto_req_handle_t);
141 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
142     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
143     crypto_req_handle_t);
144 
145 static crypto_digest_ops_t sha2_digest_ops = {
146 	.digest_init = sha2_digest_init,
147 	.digest = sha2_digest,
148 	.digest_update = sha2_digest_update,
149 	.digest_key = NULL,
150 	.digest_final = sha2_digest_final,
151 	.digest_atomic = sha2_digest_atomic
152 };
153 
154 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
155     crypto_spi_ctx_template_t, crypto_req_handle_t);
156 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
157     crypto_req_handle_t);
158 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
159 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
160     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
161     crypto_spi_ctx_template_t, crypto_req_handle_t);
162 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
163     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164     crypto_spi_ctx_template_t, crypto_req_handle_t);
165 
166 static crypto_mac_ops_t sha2_mac_ops = {
167 	.mac_init = sha2_mac_init,
168 	.mac = NULL,
169 	.mac_update = sha2_mac_update,
170 	.mac_final = sha2_mac_final,
171 	.mac_atomic = sha2_mac_atomic,
172 	.mac_verify_atomic = sha2_mac_verify_atomic
173 };
174 
175 static int sha2_create_ctx_template(crypto_provider_handle_t,
176     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
177     size_t *, crypto_req_handle_t);
178 static int sha2_free_context(crypto_ctx_t *);
179 
180 static crypto_ctx_ops_t sha2_ctx_ops = {
181 	.create_ctx_template = sha2_create_ctx_template,
182 	.free_context = sha2_free_context
183 };
184 
185 static crypto_ops_t sha2_crypto_ops = {{{{{
186 	&sha2_control_ops,
187 	&sha2_digest_ops,
188 	NULL,
189 	&sha2_mac_ops,
190 	NULL,
191 	NULL,
192 	NULL,
193 	NULL,
194 	NULL,
195 	NULL,
196 	NULL,
197 	NULL,
198 	NULL,
199 	&sha2_ctx_ops
200 }}}}};
201 
202 static crypto_provider_info_t sha2_prov_info = {{{{
203 	CRYPTO_SPI_VERSION_1,
204 	"SHA2 Software Provider",
205 	CRYPTO_SW_PROVIDER,
206 	NULL,
207 	&sha2_crypto_ops,
208 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
209 	sha2_mech_info_tab
210 }}}};
211 
212 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
213 
214 int
215 sha2_mod_init(void)
216 {
217 	int ret;
218 
219 	if ((ret = mod_install(&modlinkage)) != 0)
220 		return (ret);
221 
222 	/*
223 	 * Register with KCF. If the registration fails, log an
224 	 * error but do not uninstall the module, since the functionality
225 	 * provided by misc/sha2 should still be available.
226 	 */
227 	if ((ret = crypto_register_provider(&sha2_prov_info,
228 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
229 		cmn_err(CE_WARN, "sha2 _init: "
230 		    "crypto_register_provider() failed (0x%x)", ret);
231 
232 	return (0);
233 }
234 
235 int
236 sha2_mod_fini(void)
237 {
238 	int ret;
239 
240 	if (sha2_prov_handle != 0) {
241 		if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
242 		    CRYPTO_SUCCESS) {
243 			cmn_err(CE_WARN,
244 			    "sha2 _fini: crypto_unregister_provider() "
245 			    "failed (0x%x)", ret);
246 			return (EBUSY);
247 		}
248 		sha2_prov_handle = 0;
249 	}
250 
251 	return (mod_remove(&modlinkage));
252 }
253 
254 /*
255  * KCF software provider control entry points.
256  */
257 /* ARGSUSED */
258 static void
259 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
260 {
261 	*status = CRYPTO_PROVIDER_READY;
262 }
263 
264 /*
265  * KCF software provider digest entry points.
266  */
267 
268 static int
269 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
270     crypto_req_handle_t req)
271 {
272 
273 	/*
274 	 * Allocate and initialize SHA2 context.
275 	 */
276 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
277 	    crypto_kmflag(req));
278 	if (ctx->cc_provider_private == NULL)
279 		return (CRYPTO_HOST_MEMORY);
280 
281 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
282 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
283 
284 	return (CRYPTO_SUCCESS);
285 }
286 
287 /*
288  * Helper SHA2 digest update function for uio data.
289  */
290 static int
291 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
292 {
293 	off_t offset = data->cd_offset;
294 	size_t length = data->cd_length;
295 	uint_t vec_idx = 0;
296 	size_t cur_len;
297 
298 	/* we support only kernel buffer */
299 	if (zfs_uio_segflg(data->cd_uio) != UIO_SYSSPACE)
300 		return (CRYPTO_ARGUMENTS_BAD);
301 
302 	/*
303 	 * Jump to the first iovec containing data to be
304 	 * digested.
305 	 */
306 	offset = zfs_uio_index_at_offset(data->cd_uio, offset, &vec_idx);
307 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio)) {
308 		/*
309 		 * The caller specified an offset that is larger than the
310 		 * total size of the buffers it provided.
311 		 */
312 		return (CRYPTO_DATA_LEN_RANGE);
313 	}
314 
315 	/*
316 	 * Now do the digesting on the iovecs.
317 	 */
318 	while (vec_idx < zfs_uio_iovcnt(data->cd_uio) && length > 0) {
319 		cur_len = MIN(zfs_uio_iovlen(data->cd_uio, vec_idx) -
320 		    offset, length);
321 
322 		SHA2Update(sha2_ctx, (uint8_t *)zfs_uio_iovbase(data->cd_uio,
323 		    vec_idx) + offset, cur_len);
324 		length -= cur_len;
325 		vec_idx++;
326 		offset = 0;
327 	}
328 
329 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio) && length > 0) {
330 		/*
331 		 * The end of the specified iovec's was reached but
332 		 * the length requested could not be processed, i.e.
333 		 * The caller requested to digest more data than it provided.
334 		 */
335 		return (CRYPTO_DATA_LEN_RANGE);
336 	}
337 
338 	return (CRYPTO_SUCCESS);
339 }
340 
341 /*
342  * Helper SHA2 digest final function for uio data.
343  * digest_len is the length of the desired digest. If digest_len
344  * is smaller than the default SHA2 digest length, the caller
345  * must pass a scratch buffer, digest_scratch, which must
346  * be at least the algorithm's digest length bytes.
347  */
348 static int
349 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
350     ulong_t digest_len, uchar_t *digest_scratch)
351 {
352 	off_t offset = digest->cd_offset;
353 	uint_t vec_idx = 0;
354 
355 	/* we support only kernel buffer */
356 	if (zfs_uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
357 		return (CRYPTO_ARGUMENTS_BAD);
358 
359 	/*
360 	 * Jump to the first iovec containing ptr to the digest to
361 	 * be returned.
362 	 */
363 	offset = zfs_uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
364 	if (vec_idx == zfs_uio_iovcnt(digest->cd_uio)) {
365 		/*
366 		 * The caller specified an offset that is
367 		 * larger than the total size of the buffers
368 		 * it provided.
369 		 */
370 		return (CRYPTO_DATA_LEN_RANGE);
371 	}
372 
373 	if (offset + digest_len <=
374 	    zfs_uio_iovlen(digest->cd_uio, vec_idx)) {
375 		/*
376 		 * The computed SHA2 digest will fit in the current
377 		 * iovec.
378 		 */
379 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
380 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
381 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
382 		    (digest_len != SHA512_DIGEST_LENGTH))) {
383 			/*
384 			 * The caller requested a short digest. Digest
385 			 * into a scratch buffer and return to
386 			 * the user only what was requested.
387 			 */
388 			SHA2Final(digest_scratch, sha2_ctx);
389 
390 			bcopy(digest_scratch, (uchar_t *)
391 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
392 			    digest_len);
393 		} else {
394 			SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
395 			    cd_uio, vec_idx) + offset,
396 			    sha2_ctx);
397 
398 		}
399 	} else {
400 		/*
401 		 * The computed digest will be crossing one or more iovec's.
402 		 * This is bad performance-wise but we need to support it.
403 		 * Allocate a small scratch buffer on the stack and
404 		 * copy it piece meal to the specified digest iovec's.
405 		 */
406 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
407 		off_t scratch_offset = 0;
408 		size_t length = digest_len;
409 		size_t cur_len;
410 
411 		SHA2Final(digest_tmp, sha2_ctx);
412 
413 		while (vec_idx < zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
414 			cur_len =
415 			    MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
416 			    offset, length);
417 			bcopy(digest_tmp + scratch_offset,
418 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
419 			    cur_len);
420 
421 			length -= cur_len;
422 			vec_idx++;
423 			scratch_offset += cur_len;
424 			offset = 0;
425 		}
426 
427 		if (vec_idx == zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
428 			/*
429 			 * The end of the specified iovec's was reached but
430 			 * the length requested could not be processed, i.e.
431 			 * The caller requested to digest more data than it
432 			 * provided.
433 			 */
434 			return (CRYPTO_DATA_LEN_RANGE);
435 		}
436 	}
437 
438 	return (CRYPTO_SUCCESS);
439 }
440 
441 /* ARGSUSED */
442 static int
443 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
444     crypto_req_handle_t req)
445 {
446 	int ret = CRYPTO_SUCCESS;
447 	uint_t sha_digest_len;
448 
449 	ASSERT(ctx->cc_provider_private != NULL);
450 
451 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
452 	case SHA256_MECH_INFO_TYPE:
453 		sha_digest_len = SHA256_DIGEST_LENGTH;
454 		break;
455 	case SHA384_MECH_INFO_TYPE:
456 		sha_digest_len = SHA384_DIGEST_LENGTH;
457 		break;
458 	case SHA512_MECH_INFO_TYPE:
459 		sha_digest_len = SHA512_DIGEST_LENGTH;
460 		break;
461 	default:
462 		return (CRYPTO_MECHANISM_INVALID);
463 	}
464 
465 	/*
466 	 * We need to just return the length needed to store the output.
467 	 * We should not destroy the context for the following cases.
468 	 */
469 	if ((digest->cd_length == 0) ||
470 	    (digest->cd_length < sha_digest_len)) {
471 		digest->cd_length = sha_digest_len;
472 		return (CRYPTO_BUFFER_TOO_SMALL);
473 	}
474 
475 	/*
476 	 * Do the SHA2 update on the specified input data.
477 	 */
478 	switch (data->cd_format) {
479 	case CRYPTO_DATA_RAW:
480 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
481 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
482 		    data->cd_length);
483 		break;
484 	case CRYPTO_DATA_UIO:
485 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
486 		    data);
487 		break;
488 	default:
489 		ret = CRYPTO_ARGUMENTS_BAD;
490 	}
491 
492 	if (ret != CRYPTO_SUCCESS) {
493 		/* the update failed, free context and bail */
494 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
495 		ctx->cc_provider_private = NULL;
496 		digest->cd_length = 0;
497 		return (ret);
498 	}
499 
500 	/*
501 	 * Do a SHA2 final, must be done separately since the digest
502 	 * type can be different than the input data type.
503 	 */
504 	switch (digest->cd_format) {
505 	case CRYPTO_DATA_RAW:
506 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
507 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
508 		break;
509 	case CRYPTO_DATA_UIO:
510 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
511 		    digest, sha_digest_len, NULL);
512 		break;
513 	default:
514 		ret = CRYPTO_ARGUMENTS_BAD;
515 	}
516 
517 	/* all done, free context and return */
518 
519 	if (ret == CRYPTO_SUCCESS)
520 		digest->cd_length = sha_digest_len;
521 	else
522 		digest->cd_length = 0;
523 
524 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
525 	ctx->cc_provider_private = NULL;
526 	return (ret);
527 }
528 
529 /* ARGSUSED */
530 static int
531 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
532     crypto_req_handle_t req)
533 {
534 	int ret = CRYPTO_SUCCESS;
535 
536 	ASSERT(ctx->cc_provider_private != NULL);
537 
538 	/*
539 	 * Do the SHA2 update on the specified input data.
540 	 */
541 	switch (data->cd_format) {
542 	case CRYPTO_DATA_RAW:
543 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
544 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
545 		    data->cd_length);
546 		break;
547 	case CRYPTO_DATA_UIO:
548 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
549 		    data);
550 		break;
551 	default:
552 		ret = CRYPTO_ARGUMENTS_BAD;
553 	}
554 
555 	return (ret);
556 }
557 
558 /* ARGSUSED */
559 static int
560 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
561     crypto_req_handle_t req)
562 {
563 	int ret = CRYPTO_SUCCESS;
564 	uint_t sha_digest_len;
565 
566 	ASSERT(ctx->cc_provider_private != NULL);
567 
568 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
569 	case SHA256_MECH_INFO_TYPE:
570 		sha_digest_len = SHA256_DIGEST_LENGTH;
571 		break;
572 	case SHA384_MECH_INFO_TYPE:
573 		sha_digest_len = SHA384_DIGEST_LENGTH;
574 		break;
575 	case SHA512_MECH_INFO_TYPE:
576 		sha_digest_len = SHA512_DIGEST_LENGTH;
577 		break;
578 	default:
579 		return (CRYPTO_MECHANISM_INVALID);
580 	}
581 
582 	/*
583 	 * We need to just return the length needed to store the output.
584 	 * We should not destroy the context for the following cases.
585 	 */
586 	if ((digest->cd_length == 0) ||
587 	    (digest->cd_length < sha_digest_len)) {
588 		digest->cd_length = sha_digest_len;
589 		return (CRYPTO_BUFFER_TOO_SMALL);
590 	}
591 
592 	/*
593 	 * Do a SHA2 final.
594 	 */
595 	switch (digest->cd_format) {
596 	case CRYPTO_DATA_RAW:
597 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
598 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
599 		break;
600 	case CRYPTO_DATA_UIO:
601 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
602 		    digest, sha_digest_len, NULL);
603 		break;
604 	default:
605 		ret = CRYPTO_ARGUMENTS_BAD;
606 	}
607 
608 	/* all done, free context and return */
609 
610 	if (ret == CRYPTO_SUCCESS)
611 		digest->cd_length = sha_digest_len;
612 	else
613 		digest->cd_length = 0;
614 
615 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
616 	ctx->cc_provider_private = NULL;
617 
618 	return (ret);
619 }
620 
621 /* ARGSUSED */
622 static int
623 sha2_digest_atomic(crypto_provider_handle_t provider,
624     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
625     crypto_data_t *data, crypto_data_t *digest,
626     crypto_req_handle_t req)
627 {
628 	int ret = CRYPTO_SUCCESS;
629 	SHA2_CTX sha2_ctx;
630 	uint32_t sha_digest_len;
631 
632 	/*
633 	 * Do the SHA inits.
634 	 */
635 
636 	SHA2Init(mechanism->cm_type, &sha2_ctx);
637 
638 	switch (data->cd_format) {
639 	case CRYPTO_DATA_RAW:
640 		SHA2Update(&sha2_ctx, (uint8_t *)data->
641 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
642 		break;
643 	case CRYPTO_DATA_UIO:
644 		ret = sha2_digest_update_uio(&sha2_ctx, data);
645 		break;
646 	default:
647 		ret = CRYPTO_ARGUMENTS_BAD;
648 	}
649 
650 	/*
651 	 * Do the SHA updates on the specified input data.
652 	 */
653 
654 	if (ret != CRYPTO_SUCCESS) {
655 		/* the update failed, bail */
656 		digest->cd_length = 0;
657 		return (ret);
658 	}
659 
660 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
661 		sha_digest_len = SHA256_DIGEST_LENGTH;
662 	else
663 		sha_digest_len = SHA512_DIGEST_LENGTH;
664 
665 	/*
666 	 * Do a SHA2 final, must be done separately since the digest
667 	 * type can be different than the input data type.
668 	 */
669 	switch (digest->cd_format) {
670 	case CRYPTO_DATA_RAW:
671 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
672 		    digest->cd_offset, &sha2_ctx);
673 		break;
674 	case CRYPTO_DATA_UIO:
675 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
676 		    sha_digest_len, NULL);
677 		break;
678 	default:
679 		ret = CRYPTO_ARGUMENTS_BAD;
680 	}
681 
682 	if (ret == CRYPTO_SUCCESS)
683 		digest->cd_length = sha_digest_len;
684 	else
685 		digest->cd_length = 0;
686 
687 	return (ret);
688 }
689 
690 /*
691  * KCF software provider mac entry points.
692  *
693  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
694  *
695  * Init:
696  * The initialization routine initializes what we denote
697  * as the inner and outer contexts by doing
698  * - for inner context: SHA2(key XOR ipad)
699  * - for outer context: SHA2(key XOR opad)
700  *
701  * Update:
702  * Each subsequent SHA2 HMAC update will result in an
703  * update of the inner context with the specified data.
704  *
705  * Final:
706  * The SHA2 HMAC final will do a SHA2 final operation on the
707  * inner context, and the resulting digest will be used
708  * as the data for an update on the outer context. Last
709  * but not least, a SHA2 final on the outer context will
710  * be performed to obtain the SHA2 HMAC digest to return
711  * to the user.
712  */
713 
714 /*
715  * Initialize a SHA2-HMAC context.
716  */
717 static void
718 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
719 {
720 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
721 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
722 	int i, block_size, blocks_per_int64;
723 
724 	/* Determine the block size */
725 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
726 		block_size = SHA256_HMAC_BLOCK_SIZE;
727 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
728 	} else {
729 		block_size = SHA512_HMAC_BLOCK_SIZE;
730 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
731 	}
732 
733 	(void) bzero(ipad, block_size);
734 	(void) bzero(opad, block_size);
735 	(void) bcopy(keyval, ipad, length_in_bytes);
736 	(void) bcopy(keyval, opad, length_in_bytes);
737 
738 	/* XOR key with ipad (0x36) and opad (0x5c) */
739 	for (i = 0; i < blocks_per_int64; i ++) {
740 		ipad[i] ^= 0x3636363636363636;
741 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
742 	}
743 
744 	/* perform SHA2 on ipad */
745 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
746 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
747 
748 	/* perform SHA2 on opad */
749 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
750 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
751 
752 }
753 
754 /*
755  */
756 static int
757 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
758     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
759     crypto_req_handle_t req)
760 {
761 	int ret = CRYPTO_SUCCESS;
762 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
763 	uint_t sha_digest_len, sha_hmac_block_size;
764 
765 	/*
766 	 * Set the digest length and block size to values appropriate to the
767 	 * mechanism
768 	 */
769 	switch (mechanism->cm_type) {
770 	case SHA256_HMAC_MECH_INFO_TYPE:
771 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
772 		sha_digest_len = SHA256_DIGEST_LENGTH;
773 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
774 		break;
775 	case SHA384_HMAC_MECH_INFO_TYPE:
776 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
777 	case SHA512_HMAC_MECH_INFO_TYPE:
778 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
779 		sha_digest_len = SHA512_DIGEST_LENGTH;
780 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
781 		break;
782 	default:
783 		return (CRYPTO_MECHANISM_INVALID);
784 	}
785 
786 	if (key->ck_format != CRYPTO_KEY_RAW)
787 		return (CRYPTO_ARGUMENTS_BAD);
788 
789 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
790 	    crypto_kmflag(req));
791 	if (ctx->cc_provider_private == NULL)
792 		return (CRYPTO_HOST_MEMORY);
793 
794 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
795 	if (ctx_template != NULL) {
796 		/* reuse context template */
797 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
798 		    sizeof (sha2_hmac_ctx_t));
799 	} else {
800 		/* no context template, compute context */
801 		if (keylen_in_bytes > sha_hmac_block_size) {
802 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
803 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
804 
805 			/*
806 			 * Hash the passed-in key to get a smaller key.
807 			 * The inner context is used since it hasn't been
808 			 * initialized yet.
809 			 */
810 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
811 			    &hmac_ctx->hc_icontext,
812 			    key->ck_data, keylen_in_bytes, digested_key);
813 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
814 			    digested_key, sha_digest_len);
815 		} else {
816 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
817 			    key->ck_data, keylen_in_bytes);
818 		}
819 	}
820 
821 	/*
822 	 * Get the mechanism parameters, if applicable.
823 	 */
824 	if (mechanism->cm_type % 3 == 2) {
825 		if (mechanism->cm_param == NULL ||
826 		    mechanism->cm_param_len != sizeof (ulong_t))
827 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
828 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
829 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
830 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
831 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
832 	}
833 
834 	if (ret != CRYPTO_SUCCESS) {
835 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
836 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
837 		ctx->cc_provider_private = NULL;
838 	}
839 
840 	return (ret);
841 }
842 
843 /* ARGSUSED */
844 static int
845 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
846     crypto_req_handle_t req)
847 {
848 	int ret = CRYPTO_SUCCESS;
849 
850 	ASSERT(ctx->cc_provider_private != NULL);
851 
852 	/*
853 	 * Do a SHA2 update of the inner context using the specified
854 	 * data.
855 	 */
856 	switch (data->cd_format) {
857 	case CRYPTO_DATA_RAW:
858 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
859 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
860 		    data->cd_length);
861 		break;
862 	case CRYPTO_DATA_UIO:
863 		ret = sha2_digest_update_uio(
864 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
865 		break;
866 	default:
867 		ret = CRYPTO_ARGUMENTS_BAD;
868 	}
869 
870 	return (ret);
871 }
872 
873 /* ARGSUSED */
874 static int
875 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
876 {
877 	int ret = CRYPTO_SUCCESS;
878 	uchar_t digest[SHA512_DIGEST_LENGTH];
879 	uint32_t digest_len, sha_digest_len;
880 
881 	ASSERT(ctx->cc_provider_private != NULL);
882 
883 	/* Set the digest lengths to values appropriate to the mechanism */
884 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
885 	case SHA256_HMAC_MECH_INFO_TYPE:
886 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
887 		break;
888 	case SHA384_HMAC_MECH_INFO_TYPE:
889 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
890 		break;
891 	case SHA512_HMAC_MECH_INFO_TYPE:
892 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
893 		break;
894 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
895 		sha_digest_len = SHA256_DIGEST_LENGTH;
896 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
897 		break;
898 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
899 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
900 		sha_digest_len = SHA512_DIGEST_LENGTH;
901 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
902 		break;
903 	default:
904 		return (CRYPTO_ARGUMENTS_BAD);
905 	}
906 
907 	/*
908 	 * We need to just return the length needed to store the output.
909 	 * We should not destroy the context for the following cases.
910 	 */
911 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
912 		mac->cd_length = digest_len;
913 		return (CRYPTO_BUFFER_TOO_SMALL);
914 	}
915 
916 	/*
917 	 * Do a SHA2 final on the inner context.
918 	 */
919 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
920 
921 	/*
922 	 * Do a SHA2 update on the outer context, feeding the inner
923 	 * digest as data.
924 	 */
925 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
926 	    sha_digest_len);
927 
928 	/*
929 	 * Do a SHA2 final on the outer context, storing the computing
930 	 * digest in the users buffer.
931 	 */
932 	switch (mac->cd_format) {
933 	case CRYPTO_DATA_RAW:
934 		if (digest_len != sha_digest_len) {
935 			/*
936 			 * The caller requested a short digest. Digest
937 			 * into a scratch buffer and return to
938 			 * the user only what was requested.
939 			 */
940 			SHA2Final(digest,
941 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
942 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
943 			    mac->cd_offset, digest_len);
944 		} else {
945 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
946 			    mac->cd_offset,
947 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
948 		}
949 		break;
950 	case CRYPTO_DATA_UIO:
951 		ret = sha2_digest_final_uio(
952 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
953 		    digest_len, digest);
954 		break;
955 	default:
956 		ret = CRYPTO_ARGUMENTS_BAD;
957 	}
958 
959 	if (ret == CRYPTO_SUCCESS)
960 		mac->cd_length = digest_len;
961 	else
962 		mac->cd_length = 0;
963 
964 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
965 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
966 	ctx->cc_provider_private = NULL;
967 
968 	return (ret);
969 }
970 
971 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
972 	switch (data->cd_format) {					\
973 	case CRYPTO_DATA_RAW:						\
974 		SHA2Update(&(ctx).hc_icontext,				\
975 		    (uint8_t *)data->cd_raw.iov_base +			\
976 		    data->cd_offset, data->cd_length);			\
977 		break;							\
978 	case CRYPTO_DATA_UIO:						\
979 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
980 		break;							\
981 	default:							\
982 		ret = CRYPTO_ARGUMENTS_BAD;				\
983 	}								\
984 }
985 
986 /* ARGSUSED */
987 static int
988 sha2_mac_atomic(crypto_provider_handle_t provider,
989     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
990     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
991     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
992 {
993 	int ret = CRYPTO_SUCCESS;
994 	uchar_t digest[SHA512_DIGEST_LENGTH];
995 	sha2_hmac_ctx_t sha2_hmac_ctx;
996 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
997 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
998 
999 	/*
1000 	 * Set the digest length and block size to values appropriate to the
1001 	 * mechanism
1002 	 */
1003 	switch (mechanism->cm_type) {
1004 	case SHA256_HMAC_MECH_INFO_TYPE:
1005 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1006 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1007 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1008 		break;
1009 	case SHA384_HMAC_MECH_INFO_TYPE:
1010 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1011 	case SHA512_HMAC_MECH_INFO_TYPE:
1012 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1013 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1014 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1015 		break;
1016 	default:
1017 		return (CRYPTO_MECHANISM_INVALID);
1018 	}
1019 
1020 	/* Add support for key by attributes (RFE 4706552) */
1021 	if (key->ck_format != CRYPTO_KEY_RAW)
1022 		return (CRYPTO_ARGUMENTS_BAD);
1023 
1024 	if (ctx_template != NULL) {
1025 		/* reuse context template */
1026 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1027 	} else {
1028 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1029 		/* no context template, initialize context */
1030 		if (keylen_in_bytes > sha_hmac_block_size) {
1031 			/*
1032 			 * Hash the passed-in key to get a smaller key.
1033 			 * The inner context is used since it hasn't been
1034 			 * initialized yet.
1035 			 */
1036 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1037 			    &sha2_hmac_ctx.hc_icontext,
1038 			    key->ck_data, keylen_in_bytes, digest);
1039 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1040 			    sha_digest_len);
1041 		} else {
1042 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1043 			    keylen_in_bytes);
1044 		}
1045 	}
1046 
1047 	/* get the mechanism parameters, if applicable */
1048 	if ((mechanism->cm_type % 3) == 2) {
1049 		if (mechanism->cm_param == NULL ||
1050 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1051 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1052 			goto bail;
1053 		}
1054 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1055 		if (digest_len > sha_digest_len) {
1056 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1057 			goto bail;
1058 		}
1059 	}
1060 
1061 	/* do a SHA2 update of the inner context using the specified data */
1062 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1063 	if (ret != CRYPTO_SUCCESS)
1064 		/* the update failed, free context and bail */
1065 		goto bail;
1066 
1067 	/*
1068 	 * Do a SHA2 final on the inner context.
1069 	 */
1070 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1071 
1072 	/*
1073 	 * Do an SHA2 update on the outer context, feeding the inner
1074 	 * digest as data.
1075 	 *
1076 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1077 	 * bytes of the inner hash value.
1078 	 */
1079 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1080 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1081 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1082 		    SHA384_DIGEST_LENGTH);
1083 	else
1084 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1085 
1086 	/*
1087 	 * Do a SHA2 final on the outer context, storing the computed
1088 	 * digest in the users buffer.
1089 	 */
1090 	switch (mac->cd_format) {
1091 	case CRYPTO_DATA_RAW:
1092 		if (digest_len != sha_digest_len) {
1093 			/*
1094 			 * The caller requested a short digest. Digest
1095 			 * into a scratch buffer and return to
1096 			 * the user only what was requested.
1097 			 */
1098 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1099 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1100 			    mac->cd_offset, digest_len);
1101 		} else {
1102 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1103 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1104 		}
1105 		break;
1106 	case CRYPTO_DATA_UIO:
1107 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1108 		    digest_len, digest);
1109 		break;
1110 	default:
1111 		ret = CRYPTO_ARGUMENTS_BAD;
1112 	}
1113 
1114 	if (ret == CRYPTO_SUCCESS) {
1115 		mac->cd_length = digest_len;
1116 		return (CRYPTO_SUCCESS);
1117 	}
1118 bail:
1119 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1120 	mac->cd_length = 0;
1121 	return (ret);
1122 }
1123 
1124 /* ARGSUSED */
1125 static int
1126 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1127     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1128     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1129     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1130 {
1131 	int ret = CRYPTO_SUCCESS;
1132 	uchar_t digest[SHA512_DIGEST_LENGTH];
1133 	sha2_hmac_ctx_t sha2_hmac_ctx;
1134 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1135 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1136 
1137 	/*
1138 	 * Set the digest length and block size to values appropriate to the
1139 	 * mechanism
1140 	 */
1141 	switch (mechanism->cm_type) {
1142 	case SHA256_HMAC_MECH_INFO_TYPE:
1143 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1144 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1145 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1146 		break;
1147 	case SHA384_HMAC_MECH_INFO_TYPE:
1148 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1149 	case SHA512_HMAC_MECH_INFO_TYPE:
1150 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1151 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1152 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1153 		break;
1154 	default:
1155 		return (CRYPTO_MECHANISM_INVALID);
1156 	}
1157 
1158 	/* Add support for key by attributes (RFE 4706552) */
1159 	if (key->ck_format != CRYPTO_KEY_RAW)
1160 		return (CRYPTO_ARGUMENTS_BAD);
1161 
1162 	if (ctx_template != NULL) {
1163 		/* reuse context template */
1164 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1165 	} else {
1166 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1167 		/* no context template, initialize context */
1168 		if (keylen_in_bytes > sha_hmac_block_size) {
1169 			/*
1170 			 * Hash the passed-in key to get a smaller key.
1171 			 * The inner context is used since it hasn't been
1172 			 * initialized yet.
1173 			 */
1174 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1175 			    &sha2_hmac_ctx.hc_icontext,
1176 			    key->ck_data, keylen_in_bytes, digest);
1177 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1178 			    sha_digest_len);
1179 		} else {
1180 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1181 			    keylen_in_bytes);
1182 		}
1183 	}
1184 
1185 	/* get the mechanism parameters, if applicable */
1186 	if (mechanism->cm_type % 3 == 2) {
1187 		if (mechanism->cm_param == NULL ||
1188 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1189 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1190 			goto bail;
1191 		}
1192 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1193 		if (digest_len > sha_digest_len) {
1194 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1195 			goto bail;
1196 		}
1197 	}
1198 
1199 	if (mac->cd_length != digest_len) {
1200 		ret = CRYPTO_INVALID_MAC;
1201 		goto bail;
1202 	}
1203 
1204 	/* do a SHA2 update of the inner context using the specified data */
1205 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1206 	if (ret != CRYPTO_SUCCESS)
1207 		/* the update failed, free context and bail */
1208 		goto bail;
1209 
1210 	/* do a SHA2 final on the inner context */
1211 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1212 
1213 	/*
1214 	 * Do an SHA2 update on the outer context, feeding the inner
1215 	 * digest as data.
1216 	 *
1217 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1218 	 * bytes of the inner hash value.
1219 	 */
1220 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1221 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1222 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1223 		    SHA384_DIGEST_LENGTH);
1224 	else
1225 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1226 
1227 	/*
1228 	 * Do a SHA2 final on the outer context, storing the computed
1229 	 * digest in the users buffer.
1230 	 */
1231 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1232 
1233 	/*
1234 	 * Compare the computed digest against the expected digest passed
1235 	 * as argument.
1236 	 */
1237 
1238 	switch (mac->cd_format) {
1239 
1240 	case CRYPTO_DATA_RAW:
1241 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1242 		    mac->cd_offset, digest_len) != 0)
1243 			ret = CRYPTO_INVALID_MAC;
1244 		break;
1245 
1246 	case CRYPTO_DATA_UIO: {
1247 		off_t offset = mac->cd_offset;
1248 		uint_t vec_idx = 0;
1249 		off_t scratch_offset = 0;
1250 		size_t length = digest_len;
1251 		size_t cur_len;
1252 
1253 		/* we support only kernel buffer */
1254 		if (zfs_uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1255 			return (CRYPTO_ARGUMENTS_BAD);
1256 
1257 		/* jump to the first iovec containing the expected digest */
1258 		offset = zfs_uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1259 		if (vec_idx == zfs_uio_iovcnt(mac->cd_uio)) {
1260 			/*
1261 			 * The caller specified an offset that is
1262 			 * larger than the total size of the buffers
1263 			 * it provided.
1264 			 */
1265 			ret = CRYPTO_DATA_LEN_RANGE;
1266 			break;
1267 		}
1268 
1269 		/* do the comparison of computed digest vs specified one */
1270 		while (vec_idx < zfs_uio_iovcnt(mac->cd_uio) && length > 0) {
1271 			cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
1272 			    offset, length);
1273 
1274 			if (bcmp(digest + scratch_offset,
1275 			    zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
1276 			    cur_len) != 0) {
1277 				ret = CRYPTO_INVALID_MAC;
1278 				break;
1279 			}
1280 
1281 			length -= cur_len;
1282 			vec_idx++;
1283 			scratch_offset += cur_len;
1284 			offset = 0;
1285 		}
1286 		break;
1287 	}
1288 
1289 	default:
1290 		ret = CRYPTO_ARGUMENTS_BAD;
1291 	}
1292 
1293 	return (ret);
1294 bail:
1295 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1296 	mac->cd_length = 0;
1297 	return (ret);
1298 }
1299 
1300 /*
1301  * KCF software provider context management entry points.
1302  */
1303 
1304 /* ARGSUSED */
1305 static int
1306 sha2_create_ctx_template(crypto_provider_handle_t provider,
1307     crypto_mechanism_t *mechanism, crypto_key_t *key,
1308     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1309     crypto_req_handle_t req)
1310 {
1311 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1312 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1313 	uint32_t sha_digest_len, sha_hmac_block_size;
1314 
1315 	/*
1316 	 * Set the digest length and block size to values appropriate to the
1317 	 * mechanism
1318 	 */
1319 	switch (mechanism->cm_type) {
1320 	case SHA256_HMAC_MECH_INFO_TYPE:
1321 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1322 		sha_digest_len = SHA256_DIGEST_LENGTH;
1323 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1324 		break;
1325 	case SHA384_HMAC_MECH_INFO_TYPE:
1326 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1327 	case SHA512_HMAC_MECH_INFO_TYPE:
1328 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1329 		sha_digest_len = SHA512_DIGEST_LENGTH;
1330 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1331 		break;
1332 	default:
1333 		return (CRYPTO_MECHANISM_INVALID);
1334 	}
1335 
1336 	/* Add support for key by attributes (RFE 4706552) */
1337 	if (key->ck_format != CRYPTO_KEY_RAW)
1338 		return (CRYPTO_ARGUMENTS_BAD);
1339 
1340 	/*
1341 	 * Allocate and initialize SHA2 context.
1342 	 */
1343 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1344 	    crypto_kmflag(req));
1345 	if (sha2_hmac_ctx_tmpl == NULL)
1346 		return (CRYPTO_HOST_MEMORY);
1347 
1348 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1349 
1350 	if (keylen_in_bytes > sha_hmac_block_size) {
1351 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1352 
1353 		/*
1354 		 * Hash the passed-in key to get a smaller key.
1355 		 * The inner context is used since it hasn't been
1356 		 * initialized yet.
1357 		 */
1358 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1359 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1360 		    key->ck_data, keylen_in_bytes, digested_key);
1361 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1362 		    sha_digest_len);
1363 	} else {
1364 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1365 		    keylen_in_bytes);
1366 	}
1367 
1368 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1369 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1370 
1371 	return (CRYPTO_SUCCESS);
1372 }
1373 
1374 static int
1375 sha2_free_context(crypto_ctx_t *ctx)
1376 {
1377 	uint_t ctx_len;
1378 
1379 	if (ctx->cc_provider_private == NULL)
1380 		return (CRYPTO_SUCCESS);
1381 
1382 	/*
1383 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1384 	 * have different lengths.
1385 	 *
1386 	 * Note: Below is dependent on the mechanism ordering.
1387 	 */
1388 
1389 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1390 		ctx_len = sizeof (sha2_ctx_t);
1391 	else
1392 		ctx_len = sizeof (sha2_hmac_ctx_t);
1393 
1394 	bzero(ctx->cc_provider_private, ctx_len);
1395 	kmem_free(ctx->cc_provider_private, ctx_len);
1396 	ctx->cc_provider_private = NULL;
1397 
1398 	return (CRYPTO_SUCCESS);
1399 }
1400