xref: /freebsd/sys/contrib/openzfs/module/icp/io/sha2_mod.c (revision b1c5f60ce87cc2f179dfb81de507d9b7bf59564c)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/zfs_context.h>
28 #include <sys/crypto/common.h>
29 #include <sys/crypto/spi.h>
30 #include <sys/crypto/icp.h>
31 #define	_SHA2_IMPL
32 #include <sys/sha2.h>
33 #include <sha2/sha2_impl.h>
34 
35 /*
36  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
37  * by KCF to one of the entry points.
38  */
39 
40 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
41 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
42 
43 /* to extract the digest length passed as mechanism parameter */
44 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
45 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
46 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
47 	else {								\
48 		ulong_t tmp_ulong;					\
49 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
50 		(len) = (uint32_t)tmp_ulong;				\
51 	}								\
52 }
53 
54 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
55 	SHA2Init(mech, ctx);				\
56 	SHA2Update(ctx, key, len);			\
57 	SHA2Final(digest, ctx);				\
58 }
59 
60 /*
61  * Mechanism info structure passed to KCF during registration.
62  */
63 static const crypto_mech_info_t sha2_mech_info_tab[] = {
64 	/* SHA256 */
65 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
66 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
67 	/* SHA256-HMAC */
68 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
69 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
70 	/* SHA256-HMAC GENERAL */
71 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
72 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
73 	/* SHA384 */
74 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
75 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
76 	/* SHA384-HMAC */
77 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
78 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
79 	/* SHA384-HMAC GENERAL */
80 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
81 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
82 	/* SHA512 */
83 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
84 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
85 	/* SHA512-HMAC */
86 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
87 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
88 	/* SHA512-HMAC GENERAL */
89 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
90 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
91 };
92 
93 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *);
94 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
95 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *);
96 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *);
97 static int sha2_digest_atomic(crypto_mechanism_t *, crypto_data_t *,
98     crypto_data_t *);
99 
100 static const crypto_digest_ops_t sha2_digest_ops = {
101 	.digest_init = sha2_digest_init,
102 	.digest = sha2_digest,
103 	.digest_update = sha2_digest_update,
104 	.digest_final = sha2_digest_final,
105 	.digest_atomic = sha2_digest_atomic
106 };
107 
108 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
109     crypto_spi_ctx_template_t);
110 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *);
111 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *);
112 static int sha2_mac_atomic(crypto_mechanism_t *, crypto_key_t *,
113     crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
114 static int sha2_mac_verify_atomic(crypto_mechanism_t *, crypto_key_t *,
115     crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
116 
117 static const crypto_mac_ops_t sha2_mac_ops = {
118 	.mac_init = sha2_mac_init,
119 	.mac = NULL,
120 	.mac_update = sha2_mac_update,
121 	.mac_final = sha2_mac_final,
122 	.mac_atomic = sha2_mac_atomic,
123 	.mac_verify_atomic = sha2_mac_verify_atomic
124 };
125 
126 static int sha2_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
127     crypto_spi_ctx_template_t *, size_t *);
128 static int sha2_free_context(crypto_ctx_t *);
129 
130 static const crypto_ctx_ops_t sha2_ctx_ops = {
131 	.create_ctx_template = sha2_create_ctx_template,
132 	.free_context = sha2_free_context
133 };
134 
135 static const crypto_ops_t sha2_crypto_ops = {
136 	&sha2_digest_ops,
137 	NULL,
138 	&sha2_mac_ops,
139 	&sha2_ctx_ops,
140 };
141 
142 static const crypto_provider_info_t sha2_prov_info = {
143 	"SHA2 Software Provider",
144 	&sha2_crypto_ops,
145 	sizeof (sha2_mech_info_tab) / sizeof (crypto_mech_info_t),
146 	sha2_mech_info_tab
147 };
148 
149 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
150 
151 int
152 sha2_mod_init(void)
153 {
154 	int ret;
155 
156 	/*
157 	 * Register with KCF. If the registration fails, log an
158 	 * error but do not uninstall the module, since the functionality
159 	 * provided by misc/sha2 should still be available.
160 	 */
161 	if ((ret = crypto_register_provider(&sha2_prov_info,
162 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
163 		cmn_err(CE_WARN, "sha2 _init: "
164 		    "crypto_register_provider() failed (0x%x)", ret);
165 
166 	return (0);
167 }
168 
169 int
170 sha2_mod_fini(void)
171 {
172 	int ret = 0;
173 
174 	if (sha2_prov_handle != 0) {
175 		if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
176 		    CRYPTO_SUCCESS) {
177 			cmn_err(CE_WARN,
178 			    "sha2 _fini: crypto_unregister_provider() "
179 			    "failed (0x%x)", ret);
180 			return (EBUSY);
181 		}
182 		sha2_prov_handle = 0;
183 	}
184 
185 	return (ret);
186 }
187 
188 /*
189  * KCF software provider digest entry points.
190  */
191 
192 static int
193 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism)
194 {
195 
196 	/*
197 	 * Allocate and initialize SHA2 context.
198 	 */
199 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t), KM_SLEEP);
200 	if (ctx->cc_provider_private == NULL)
201 		return (CRYPTO_HOST_MEMORY);
202 
203 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
204 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
205 
206 	return (CRYPTO_SUCCESS);
207 }
208 
209 /*
210  * Helper SHA2 digest update function for uio data.
211  */
212 static int
213 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
214 {
215 	off_t offset = data->cd_offset;
216 	size_t length = data->cd_length;
217 	uint_t vec_idx = 0;
218 	size_t cur_len;
219 
220 	/* we support only kernel buffer */
221 	if (zfs_uio_segflg(data->cd_uio) != UIO_SYSSPACE)
222 		return (CRYPTO_ARGUMENTS_BAD);
223 
224 	/*
225 	 * Jump to the first iovec containing data to be
226 	 * digested.
227 	 */
228 	offset = zfs_uio_index_at_offset(data->cd_uio, offset, &vec_idx);
229 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio)) {
230 		/*
231 		 * The caller specified an offset that is larger than the
232 		 * total size of the buffers it provided.
233 		 */
234 		return (CRYPTO_DATA_LEN_RANGE);
235 	}
236 
237 	/*
238 	 * Now do the digesting on the iovecs.
239 	 */
240 	while (vec_idx < zfs_uio_iovcnt(data->cd_uio) && length > 0) {
241 		cur_len = MIN(zfs_uio_iovlen(data->cd_uio, vec_idx) -
242 		    offset, length);
243 
244 		SHA2Update(sha2_ctx, (uint8_t *)zfs_uio_iovbase(data->cd_uio,
245 		    vec_idx) + offset, cur_len);
246 		length -= cur_len;
247 		vec_idx++;
248 		offset = 0;
249 	}
250 
251 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio) && length > 0) {
252 		/*
253 		 * The end of the specified iovec's was reached but
254 		 * the length requested could not be processed, i.e.
255 		 * The caller requested to digest more data than it provided.
256 		 */
257 		return (CRYPTO_DATA_LEN_RANGE);
258 	}
259 
260 	return (CRYPTO_SUCCESS);
261 }
262 
263 /*
264  * Helper SHA2 digest final function for uio data.
265  * digest_len is the length of the desired digest. If digest_len
266  * is smaller than the default SHA2 digest length, the caller
267  * must pass a scratch buffer, digest_scratch, which must
268  * be at least the algorithm's digest length bytes.
269  */
270 static int
271 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
272     ulong_t digest_len, uchar_t *digest_scratch)
273 {
274 	off_t offset = digest->cd_offset;
275 	uint_t vec_idx = 0;
276 
277 	/* we support only kernel buffer */
278 	if (zfs_uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
279 		return (CRYPTO_ARGUMENTS_BAD);
280 
281 	/*
282 	 * Jump to the first iovec containing ptr to the digest to
283 	 * be returned.
284 	 */
285 	offset = zfs_uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
286 	if (vec_idx == zfs_uio_iovcnt(digest->cd_uio)) {
287 		/*
288 		 * The caller specified an offset that is
289 		 * larger than the total size of the buffers
290 		 * it provided.
291 		 */
292 		return (CRYPTO_DATA_LEN_RANGE);
293 	}
294 
295 	if (offset + digest_len <=
296 	    zfs_uio_iovlen(digest->cd_uio, vec_idx)) {
297 		/*
298 		 * The computed SHA2 digest will fit in the current
299 		 * iovec.
300 		 */
301 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
302 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
303 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
304 		    (digest_len != SHA512_DIGEST_LENGTH))) {
305 			/*
306 			 * The caller requested a short digest. Digest
307 			 * into a scratch buffer and return to
308 			 * the user only what was requested.
309 			 */
310 			SHA2Final(digest_scratch, sha2_ctx);
311 
312 			bcopy(digest_scratch, (uchar_t *)
313 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
314 			    digest_len);
315 		} else {
316 			SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
317 			    cd_uio, vec_idx) + offset,
318 			    sha2_ctx);
319 
320 		}
321 	} else {
322 		/*
323 		 * The computed digest will be crossing one or more iovec's.
324 		 * This is bad performance-wise but we need to support it.
325 		 * Allocate a small scratch buffer on the stack and
326 		 * copy it piece meal to the specified digest iovec's.
327 		 */
328 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
329 		off_t scratch_offset = 0;
330 		size_t length = digest_len;
331 		size_t cur_len;
332 
333 		SHA2Final(digest_tmp, sha2_ctx);
334 
335 		while (vec_idx < zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
336 			cur_len =
337 			    MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
338 			    offset, length);
339 			bcopy(digest_tmp + scratch_offset,
340 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
341 			    cur_len);
342 
343 			length -= cur_len;
344 			vec_idx++;
345 			scratch_offset += cur_len;
346 			offset = 0;
347 		}
348 
349 		if (vec_idx == zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
350 			/*
351 			 * The end of the specified iovec's was reached but
352 			 * the length requested could not be processed, i.e.
353 			 * The caller requested to digest more data than it
354 			 * provided.
355 			 */
356 			return (CRYPTO_DATA_LEN_RANGE);
357 		}
358 	}
359 
360 	return (CRYPTO_SUCCESS);
361 }
362 
363 static int
364 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest)
365 {
366 	int ret = CRYPTO_SUCCESS;
367 	uint_t sha_digest_len;
368 
369 	ASSERT(ctx->cc_provider_private != NULL);
370 
371 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
372 	case SHA256_MECH_INFO_TYPE:
373 		sha_digest_len = SHA256_DIGEST_LENGTH;
374 		break;
375 	case SHA384_MECH_INFO_TYPE:
376 		sha_digest_len = SHA384_DIGEST_LENGTH;
377 		break;
378 	case SHA512_MECH_INFO_TYPE:
379 		sha_digest_len = SHA512_DIGEST_LENGTH;
380 		break;
381 	default:
382 		return (CRYPTO_MECHANISM_INVALID);
383 	}
384 
385 	/*
386 	 * We need to just return the length needed to store the output.
387 	 * We should not destroy the context for the following cases.
388 	 */
389 	if ((digest->cd_length == 0) ||
390 	    (digest->cd_length < sha_digest_len)) {
391 		digest->cd_length = sha_digest_len;
392 		return (CRYPTO_BUFFER_TOO_SMALL);
393 	}
394 
395 	/*
396 	 * Do the SHA2 update on the specified input data.
397 	 */
398 	switch (data->cd_format) {
399 	case CRYPTO_DATA_RAW:
400 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
401 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
402 		    data->cd_length);
403 		break;
404 	case CRYPTO_DATA_UIO:
405 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
406 		    data);
407 		break;
408 	default:
409 		ret = CRYPTO_ARGUMENTS_BAD;
410 	}
411 
412 	if (ret != CRYPTO_SUCCESS) {
413 		/* the update failed, free context and bail */
414 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
415 		ctx->cc_provider_private = NULL;
416 		digest->cd_length = 0;
417 		return (ret);
418 	}
419 
420 	/*
421 	 * Do a SHA2 final, must be done separately since the digest
422 	 * type can be different than the input data type.
423 	 */
424 	switch (digest->cd_format) {
425 	case CRYPTO_DATA_RAW:
426 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
427 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
428 		break;
429 	case CRYPTO_DATA_UIO:
430 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
431 		    digest, sha_digest_len, NULL);
432 		break;
433 	default:
434 		ret = CRYPTO_ARGUMENTS_BAD;
435 	}
436 
437 	/* all done, free context and return */
438 
439 	if (ret == CRYPTO_SUCCESS)
440 		digest->cd_length = sha_digest_len;
441 	else
442 		digest->cd_length = 0;
443 
444 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
445 	ctx->cc_provider_private = NULL;
446 	return (ret);
447 }
448 
449 static int
450 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data)
451 {
452 	int ret = CRYPTO_SUCCESS;
453 
454 	ASSERT(ctx->cc_provider_private != NULL);
455 
456 	/*
457 	 * Do the SHA2 update on the specified input data.
458 	 */
459 	switch (data->cd_format) {
460 	case CRYPTO_DATA_RAW:
461 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
462 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
463 		    data->cd_length);
464 		break;
465 	case CRYPTO_DATA_UIO:
466 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
467 		    data);
468 		break;
469 	default:
470 		ret = CRYPTO_ARGUMENTS_BAD;
471 	}
472 
473 	return (ret);
474 }
475 
476 static int
477 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest)
478 {
479 	int ret = CRYPTO_SUCCESS;
480 	uint_t sha_digest_len;
481 
482 	ASSERT(ctx->cc_provider_private != NULL);
483 
484 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
485 	case SHA256_MECH_INFO_TYPE:
486 		sha_digest_len = SHA256_DIGEST_LENGTH;
487 		break;
488 	case SHA384_MECH_INFO_TYPE:
489 		sha_digest_len = SHA384_DIGEST_LENGTH;
490 		break;
491 	case SHA512_MECH_INFO_TYPE:
492 		sha_digest_len = SHA512_DIGEST_LENGTH;
493 		break;
494 	default:
495 		return (CRYPTO_MECHANISM_INVALID);
496 	}
497 
498 	/*
499 	 * We need to just return the length needed to store the output.
500 	 * We should not destroy the context for the following cases.
501 	 */
502 	if ((digest->cd_length == 0) ||
503 	    (digest->cd_length < sha_digest_len)) {
504 		digest->cd_length = sha_digest_len;
505 		return (CRYPTO_BUFFER_TOO_SMALL);
506 	}
507 
508 	/*
509 	 * Do a SHA2 final.
510 	 */
511 	switch (digest->cd_format) {
512 	case CRYPTO_DATA_RAW:
513 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
514 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
515 		break;
516 	case CRYPTO_DATA_UIO:
517 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
518 		    digest, sha_digest_len, NULL);
519 		break;
520 	default:
521 		ret = CRYPTO_ARGUMENTS_BAD;
522 	}
523 
524 	/* all done, free context and return */
525 
526 	if (ret == CRYPTO_SUCCESS)
527 		digest->cd_length = sha_digest_len;
528 	else
529 		digest->cd_length = 0;
530 
531 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
532 	ctx->cc_provider_private = NULL;
533 
534 	return (ret);
535 }
536 
537 static int
538 sha2_digest_atomic(crypto_mechanism_t *mechanism, crypto_data_t *data,
539     crypto_data_t *digest)
540 {
541 	int ret = CRYPTO_SUCCESS;
542 	SHA2_CTX sha2_ctx;
543 	uint32_t sha_digest_len;
544 
545 	/*
546 	 * Do the SHA inits.
547 	 */
548 
549 	SHA2Init(mechanism->cm_type, &sha2_ctx);
550 
551 	switch (data->cd_format) {
552 	case CRYPTO_DATA_RAW:
553 		SHA2Update(&sha2_ctx, (uint8_t *)data->
554 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
555 		break;
556 	case CRYPTO_DATA_UIO:
557 		ret = sha2_digest_update_uio(&sha2_ctx, data);
558 		break;
559 	default:
560 		ret = CRYPTO_ARGUMENTS_BAD;
561 	}
562 
563 	/*
564 	 * Do the SHA updates on the specified input data.
565 	 */
566 
567 	if (ret != CRYPTO_SUCCESS) {
568 		/* the update failed, bail */
569 		digest->cd_length = 0;
570 		return (ret);
571 	}
572 
573 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
574 		sha_digest_len = SHA256_DIGEST_LENGTH;
575 	else
576 		sha_digest_len = SHA512_DIGEST_LENGTH;
577 
578 	/*
579 	 * Do a SHA2 final, must be done separately since the digest
580 	 * type can be different than the input data type.
581 	 */
582 	switch (digest->cd_format) {
583 	case CRYPTO_DATA_RAW:
584 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
585 		    digest->cd_offset, &sha2_ctx);
586 		break;
587 	case CRYPTO_DATA_UIO:
588 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
589 		    sha_digest_len, NULL);
590 		break;
591 	default:
592 		ret = CRYPTO_ARGUMENTS_BAD;
593 	}
594 
595 	if (ret == CRYPTO_SUCCESS)
596 		digest->cd_length = sha_digest_len;
597 	else
598 		digest->cd_length = 0;
599 
600 	return (ret);
601 }
602 
603 /*
604  * KCF software provider mac entry points.
605  *
606  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
607  *
608  * Init:
609  * The initialization routine initializes what we denote
610  * as the inner and outer contexts by doing
611  * - for inner context: SHA2(key XOR ipad)
612  * - for outer context: SHA2(key XOR opad)
613  *
614  * Update:
615  * Each subsequent SHA2 HMAC update will result in an
616  * update of the inner context with the specified data.
617  *
618  * Final:
619  * The SHA2 HMAC final will do a SHA2 final operation on the
620  * inner context, and the resulting digest will be used
621  * as the data for an update on the outer context. Last
622  * but not least, a SHA2 final on the outer context will
623  * be performed to obtain the SHA2 HMAC digest to return
624  * to the user.
625  */
626 
627 /*
628  * Initialize a SHA2-HMAC context.
629  */
630 static void
631 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
632 {
633 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
634 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
635 	int i, block_size, blocks_per_int64;
636 
637 	/* Determine the block size */
638 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
639 		block_size = SHA256_HMAC_BLOCK_SIZE;
640 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
641 	} else {
642 		block_size = SHA512_HMAC_BLOCK_SIZE;
643 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
644 	}
645 
646 	(void) bzero(ipad, block_size);
647 	(void) bzero(opad, block_size);
648 
649 	if (keyval != NULL) {
650 		(void) bcopy(keyval, ipad, length_in_bytes);
651 		(void) bcopy(keyval, opad, length_in_bytes);
652 	} else {
653 		ASSERT0(length_in_bytes);
654 	}
655 
656 	/* XOR key with ipad (0x36) and opad (0x5c) */
657 	for (i = 0; i < blocks_per_int64; i ++) {
658 		ipad[i] ^= 0x3636363636363636;
659 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
660 	}
661 
662 	/* perform SHA2 on ipad */
663 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
664 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
665 
666 	/* perform SHA2 on opad */
667 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
668 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
669 
670 }
671 
672 /*
673  */
674 static int
675 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
676     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template)
677 {
678 	int ret = CRYPTO_SUCCESS;
679 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
680 	uint_t sha_digest_len, sha_hmac_block_size;
681 
682 	/*
683 	 * Set the digest length and block size to values appropriate to the
684 	 * mechanism
685 	 */
686 	switch (mechanism->cm_type) {
687 	case SHA256_HMAC_MECH_INFO_TYPE:
688 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
689 		sha_digest_len = SHA256_DIGEST_LENGTH;
690 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
691 		break;
692 	case SHA384_HMAC_MECH_INFO_TYPE:
693 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
694 	case SHA512_HMAC_MECH_INFO_TYPE:
695 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
696 		sha_digest_len = SHA512_DIGEST_LENGTH;
697 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
698 		break;
699 	default:
700 		return (CRYPTO_MECHANISM_INVALID);
701 	}
702 
703 	ctx->cc_provider_private =
704 	    kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
705 	if (ctx->cc_provider_private == NULL)
706 		return (CRYPTO_HOST_MEMORY);
707 
708 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
709 	if (ctx_template != NULL) {
710 		/* reuse context template */
711 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
712 		    sizeof (sha2_hmac_ctx_t));
713 	} else {
714 		/* no context template, compute context */
715 		if (keylen_in_bytes > sha_hmac_block_size) {
716 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
717 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
718 
719 			/*
720 			 * Hash the passed-in key to get a smaller key.
721 			 * The inner context is used since it hasn't been
722 			 * initialized yet.
723 			 */
724 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
725 			    &hmac_ctx->hc_icontext,
726 			    key->ck_data, keylen_in_bytes, digested_key);
727 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
728 			    digested_key, sha_digest_len);
729 		} else {
730 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
731 			    key->ck_data, keylen_in_bytes);
732 		}
733 	}
734 
735 	/*
736 	 * Get the mechanism parameters, if applicable.
737 	 */
738 	if (mechanism->cm_type % 3 == 2) {
739 		if (mechanism->cm_param == NULL ||
740 		    mechanism->cm_param_len != sizeof (ulong_t))
741 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
742 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
743 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
744 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
745 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
746 	}
747 
748 	if (ret != CRYPTO_SUCCESS) {
749 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
750 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
751 		ctx->cc_provider_private = NULL;
752 	}
753 
754 	return (ret);
755 }
756 
757 static int
758 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data)
759 {
760 	int ret = CRYPTO_SUCCESS;
761 
762 	ASSERT(ctx->cc_provider_private != NULL);
763 
764 	/*
765 	 * Do a SHA2 update of the inner context using the specified
766 	 * data.
767 	 */
768 	switch (data->cd_format) {
769 	case CRYPTO_DATA_RAW:
770 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
771 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
772 		    data->cd_length);
773 		break;
774 	case CRYPTO_DATA_UIO:
775 		ret = sha2_digest_update_uio(
776 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
777 		break;
778 	default:
779 		ret = CRYPTO_ARGUMENTS_BAD;
780 	}
781 
782 	return (ret);
783 }
784 
785 static int
786 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac)
787 {
788 	int ret = CRYPTO_SUCCESS;
789 	uchar_t digest[SHA512_DIGEST_LENGTH];
790 	uint32_t digest_len, sha_digest_len;
791 
792 	ASSERT(ctx->cc_provider_private != NULL);
793 
794 	/* Set the digest lengths to values appropriate to the mechanism */
795 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
796 	case SHA256_HMAC_MECH_INFO_TYPE:
797 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
798 		break;
799 	case SHA384_HMAC_MECH_INFO_TYPE:
800 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
801 		break;
802 	case SHA512_HMAC_MECH_INFO_TYPE:
803 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
804 		break;
805 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
806 		sha_digest_len = SHA256_DIGEST_LENGTH;
807 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
808 		break;
809 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
810 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
811 		sha_digest_len = SHA512_DIGEST_LENGTH;
812 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
813 		break;
814 	default:
815 		return (CRYPTO_ARGUMENTS_BAD);
816 	}
817 
818 	/*
819 	 * We need to just return the length needed to store the output.
820 	 * We should not destroy the context for the following cases.
821 	 */
822 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
823 		mac->cd_length = digest_len;
824 		return (CRYPTO_BUFFER_TOO_SMALL);
825 	}
826 
827 	/*
828 	 * Do a SHA2 final on the inner context.
829 	 */
830 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
831 
832 	/*
833 	 * Do a SHA2 update on the outer context, feeding the inner
834 	 * digest as data.
835 	 */
836 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
837 	    sha_digest_len);
838 
839 	/*
840 	 * Do a SHA2 final on the outer context, storing the computing
841 	 * digest in the users buffer.
842 	 */
843 	switch (mac->cd_format) {
844 	case CRYPTO_DATA_RAW:
845 		if (digest_len != sha_digest_len) {
846 			/*
847 			 * The caller requested a short digest. Digest
848 			 * into a scratch buffer and return to
849 			 * the user only what was requested.
850 			 */
851 			SHA2Final(digest,
852 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
853 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
854 			    mac->cd_offset, digest_len);
855 		} else {
856 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
857 			    mac->cd_offset,
858 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
859 		}
860 		break;
861 	case CRYPTO_DATA_UIO:
862 		ret = sha2_digest_final_uio(
863 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
864 		    digest_len, digest);
865 		break;
866 	default:
867 		ret = CRYPTO_ARGUMENTS_BAD;
868 	}
869 
870 	if (ret == CRYPTO_SUCCESS)
871 		mac->cd_length = digest_len;
872 	else
873 		mac->cd_length = 0;
874 
875 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
876 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
877 	ctx->cc_provider_private = NULL;
878 
879 	return (ret);
880 }
881 
882 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
883 	switch (data->cd_format) {					\
884 	case CRYPTO_DATA_RAW:						\
885 		SHA2Update(&(ctx).hc_icontext,				\
886 		    (uint8_t *)data->cd_raw.iov_base +			\
887 		    data->cd_offset, data->cd_length);			\
888 		break;							\
889 	case CRYPTO_DATA_UIO:						\
890 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
891 		break;							\
892 	default:							\
893 		ret = CRYPTO_ARGUMENTS_BAD;				\
894 	}								\
895 }
896 
897 static int
898 sha2_mac_atomic(crypto_mechanism_t *mechanism,
899     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
900     crypto_spi_ctx_template_t ctx_template)
901 {
902 	int ret = CRYPTO_SUCCESS;
903 	uchar_t digest[SHA512_DIGEST_LENGTH];
904 	sha2_hmac_ctx_t sha2_hmac_ctx;
905 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
906 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
907 
908 	/*
909 	 * Set the digest length and block size to values appropriate to the
910 	 * mechanism
911 	 */
912 	switch (mechanism->cm_type) {
913 	case SHA256_HMAC_MECH_INFO_TYPE:
914 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
915 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
916 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
917 		break;
918 	case SHA384_HMAC_MECH_INFO_TYPE:
919 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
920 	case SHA512_HMAC_MECH_INFO_TYPE:
921 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
922 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
923 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
924 		break;
925 	default:
926 		return (CRYPTO_MECHANISM_INVALID);
927 	}
928 
929 	if (ctx_template != NULL) {
930 		/* reuse context template */
931 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
932 	} else {
933 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
934 		/* no context template, initialize context */
935 		if (keylen_in_bytes > sha_hmac_block_size) {
936 			/*
937 			 * Hash the passed-in key to get a smaller key.
938 			 * The inner context is used since it hasn't been
939 			 * initialized yet.
940 			 */
941 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
942 			    &sha2_hmac_ctx.hc_icontext,
943 			    key->ck_data, keylen_in_bytes, digest);
944 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
945 			    sha_digest_len);
946 		} else {
947 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
948 			    keylen_in_bytes);
949 		}
950 	}
951 
952 	/* get the mechanism parameters, if applicable */
953 	if ((mechanism->cm_type % 3) == 2) {
954 		if (mechanism->cm_param == NULL ||
955 		    mechanism->cm_param_len != sizeof (ulong_t)) {
956 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
957 			goto bail;
958 		}
959 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
960 		if (digest_len > sha_digest_len) {
961 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
962 			goto bail;
963 		}
964 	}
965 
966 	/* do a SHA2 update of the inner context using the specified data */
967 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
968 	if (ret != CRYPTO_SUCCESS)
969 		/* the update failed, free context and bail */
970 		goto bail;
971 
972 	/*
973 	 * Do a SHA2 final on the inner context.
974 	 */
975 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
976 
977 	/*
978 	 * Do an SHA2 update on the outer context, feeding the inner
979 	 * digest as data.
980 	 *
981 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
982 	 * bytes of the inner hash value.
983 	 */
984 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
985 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
986 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
987 		    SHA384_DIGEST_LENGTH);
988 	else
989 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
990 
991 	/*
992 	 * Do a SHA2 final on the outer context, storing the computed
993 	 * digest in the users buffer.
994 	 */
995 	switch (mac->cd_format) {
996 	case CRYPTO_DATA_RAW:
997 		if (digest_len != sha_digest_len) {
998 			/*
999 			 * The caller requested a short digest. Digest
1000 			 * into a scratch buffer and return to
1001 			 * the user only what was requested.
1002 			 */
1003 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1004 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1005 			    mac->cd_offset, digest_len);
1006 		} else {
1007 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1008 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1009 		}
1010 		break;
1011 	case CRYPTO_DATA_UIO:
1012 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1013 		    digest_len, digest);
1014 		break;
1015 	default:
1016 		ret = CRYPTO_ARGUMENTS_BAD;
1017 	}
1018 
1019 	if (ret == CRYPTO_SUCCESS) {
1020 		mac->cd_length = digest_len;
1021 		return (CRYPTO_SUCCESS);
1022 	}
1023 bail:
1024 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1025 	mac->cd_length = 0;
1026 	return (ret);
1027 }
1028 
1029 static int
1030 sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
1031     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1032     crypto_spi_ctx_template_t ctx_template)
1033 {
1034 	int ret = CRYPTO_SUCCESS;
1035 	uchar_t digest[SHA512_DIGEST_LENGTH];
1036 	sha2_hmac_ctx_t sha2_hmac_ctx;
1037 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1038 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1039 
1040 	/*
1041 	 * Set the digest length and block size to values appropriate to the
1042 	 * mechanism
1043 	 */
1044 	switch (mechanism->cm_type) {
1045 	case SHA256_HMAC_MECH_INFO_TYPE:
1046 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1047 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1048 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1049 		break;
1050 	case SHA384_HMAC_MECH_INFO_TYPE:
1051 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1052 	case SHA512_HMAC_MECH_INFO_TYPE:
1053 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1054 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1055 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1056 		break;
1057 	default:
1058 		return (CRYPTO_MECHANISM_INVALID);
1059 	}
1060 
1061 	if (ctx_template != NULL) {
1062 		/* reuse context template */
1063 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1064 	} else {
1065 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1066 		/* no context template, initialize context */
1067 		if (keylen_in_bytes > sha_hmac_block_size) {
1068 			/*
1069 			 * Hash the passed-in key to get a smaller key.
1070 			 * The inner context is used since it hasn't been
1071 			 * initialized yet.
1072 			 */
1073 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1074 			    &sha2_hmac_ctx.hc_icontext,
1075 			    key->ck_data, keylen_in_bytes, digest);
1076 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1077 			    sha_digest_len);
1078 		} else {
1079 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1080 			    keylen_in_bytes);
1081 		}
1082 	}
1083 
1084 	/* get the mechanism parameters, if applicable */
1085 	if (mechanism->cm_type % 3 == 2) {
1086 		if (mechanism->cm_param == NULL ||
1087 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1088 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1089 			goto bail;
1090 		}
1091 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1092 		if (digest_len > sha_digest_len) {
1093 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1094 			goto bail;
1095 		}
1096 	}
1097 
1098 	if (mac->cd_length != digest_len) {
1099 		ret = CRYPTO_INVALID_MAC;
1100 		goto bail;
1101 	}
1102 
1103 	/* do a SHA2 update of the inner context using the specified data */
1104 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1105 	if (ret != CRYPTO_SUCCESS)
1106 		/* the update failed, free context and bail */
1107 		goto bail;
1108 
1109 	/* do a SHA2 final on the inner context */
1110 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1111 
1112 	/*
1113 	 * Do an SHA2 update on the outer context, feeding the inner
1114 	 * digest as data.
1115 	 *
1116 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1117 	 * bytes of the inner hash value.
1118 	 */
1119 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1120 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1121 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1122 		    SHA384_DIGEST_LENGTH);
1123 	else
1124 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1125 
1126 	/*
1127 	 * Do a SHA2 final on the outer context, storing the computed
1128 	 * digest in the users buffer.
1129 	 */
1130 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1131 
1132 	/*
1133 	 * Compare the computed digest against the expected digest passed
1134 	 * as argument.
1135 	 */
1136 
1137 	switch (mac->cd_format) {
1138 
1139 	case CRYPTO_DATA_RAW:
1140 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1141 		    mac->cd_offset, digest_len) != 0)
1142 			ret = CRYPTO_INVALID_MAC;
1143 		break;
1144 
1145 	case CRYPTO_DATA_UIO: {
1146 		off_t offset = mac->cd_offset;
1147 		uint_t vec_idx = 0;
1148 		off_t scratch_offset = 0;
1149 		size_t length = digest_len;
1150 		size_t cur_len;
1151 
1152 		/* we support only kernel buffer */
1153 		if (zfs_uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1154 			return (CRYPTO_ARGUMENTS_BAD);
1155 
1156 		/* jump to the first iovec containing the expected digest */
1157 		offset = zfs_uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1158 		if (vec_idx == zfs_uio_iovcnt(mac->cd_uio)) {
1159 			/*
1160 			 * The caller specified an offset that is
1161 			 * larger than the total size of the buffers
1162 			 * it provided.
1163 			 */
1164 			ret = CRYPTO_DATA_LEN_RANGE;
1165 			break;
1166 		}
1167 
1168 		/* do the comparison of computed digest vs specified one */
1169 		while (vec_idx < zfs_uio_iovcnt(mac->cd_uio) && length > 0) {
1170 			cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
1171 			    offset, length);
1172 
1173 			if (bcmp(digest + scratch_offset,
1174 			    zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
1175 			    cur_len) != 0) {
1176 				ret = CRYPTO_INVALID_MAC;
1177 				break;
1178 			}
1179 
1180 			length -= cur_len;
1181 			vec_idx++;
1182 			scratch_offset += cur_len;
1183 			offset = 0;
1184 		}
1185 		break;
1186 	}
1187 
1188 	default:
1189 		ret = CRYPTO_ARGUMENTS_BAD;
1190 	}
1191 
1192 	return (ret);
1193 bail:
1194 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1195 	mac->cd_length = 0;
1196 	return (ret);
1197 }
1198 
1199 /*
1200  * KCF software provider context management entry points.
1201  */
1202 
1203 static int
1204 sha2_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
1205     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size)
1206 {
1207 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1208 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1209 	uint32_t sha_digest_len, sha_hmac_block_size;
1210 
1211 	/*
1212 	 * Set the digest length and block size to values appropriate to the
1213 	 * mechanism
1214 	 */
1215 	switch (mechanism->cm_type) {
1216 	case SHA256_HMAC_MECH_INFO_TYPE:
1217 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1218 		sha_digest_len = SHA256_DIGEST_LENGTH;
1219 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1220 		break;
1221 	case SHA384_HMAC_MECH_INFO_TYPE:
1222 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1223 	case SHA512_HMAC_MECH_INFO_TYPE:
1224 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1225 		sha_digest_len = SHA512_DIGEST_LENGTH;
1226 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1227 		break;
1228 	default:
1229 		return (CRYPTO_MECHANISM_INVALID);
1230 	}
1231 
1232 	/*
1233 	 * Allocate and initialize SHA2 context.
1234 	 */
1235 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
1236 	if (sha2_hmac_ctx_tmpl == NULL)
1237 		return (CRYPTO_HOST_MEMORY);
1238 
1239 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1240 
1241 	if (keylen_in_bytes > sha_hmac_block_size) {
1242 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1243 
1244 		/*
1245 		 * Hash the passed-in key to get a smaller key.
1246 		 * The inner context is used since it hasn't been
1247 		 * initialized yet.
1248 		 */
1249 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1250 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1251 		    key->ck_data, keylen_in_bytes, digested_key);
1252 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1253 		    sha_digest_len);
1254 	} else {
1255 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1256 		    keylen_in_bytes);
1257 	}
1258 
1259 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1260 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1261 
1262 	return (CRYPTO_SUCCESS);
1263 }
1264 
1265 static int
1266 sha2_free_context(crypto_ctx_t *ctx)
1267 {
1268 	uint_t ctx_len;
1269 
1270 	if (ctx->cc_provider_private == NULL)
1271 		return (CRYPTO_SUCCESS);
1272 
1273 	/*
1274 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1275 	 * have different lengths.
1276 	 *
1277 	 * Note: Below is dependent on the mechanism ordering.
1278 	 */
1279 
1280 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1281 		ctx_len = sizeof (sha2_ctx_t);
1282 	else
1283 		ctx_len = sizeof (sha2_hmac_ctx_t);
1284 
1285 	bzero(ctx->cc_provider_private, ctx_len);
1286 	kmem_free(ctx->cc_provider_private, ctx_len);
1287 	ctx->cc_provider_private = NULL;
1288 
1289 	return (CRYPTO_SUCCESS);
1290 }
1291