xref: /freebsd/sys/contrib/openzfs/module/icp/io/sha2_mod.c (revision 9768746ba83efa02837c5b9c66348db6e900208f)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or https://opensource.org/licenses/CDDL-1.0.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/zfs_context.h>
28 #include <sys/crypto/common.h>
29 #include <sys/crypto/spi.h>
30 #include <sys/crypto/icp.h>
31 #define	_SHA2_IMPL
32 #include <sys/sha2.h>
33 #include <sha2/sha2_impl.h>
34 
35 /*
36  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
37  * by KCF to one of the entry points.
38  */
39 
40 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
41 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
42 
43 /* to extract the digest length passed as mechanism parameter */
44 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
45 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
46 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
47 	else {								\
48 		ulong_t tmp_ulong;					\
49 		memcpy(&tmp_ulong, (m)->cm_param, sizeof (ulong_t));	\
50 		(len) = (uint32_t)tmp_ulong;				\
51 	}								\
52 }
53 
54 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
55 	SHA2Init(mech, ctx);				\
56 	SHA2Update(ctx, key, len);			\
57 	SHA2Final(digest, ctx);				\
58 }
59 
60 /*
61  * Mechanism info structure passed to KCF during registration.
62  */
63 static const crypto_mech_info_t sha2_mech_info_tab[] = {
64 	/* SHA256 */
65 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
66 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
67 	/* SHA256-HMAC */
68 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
69 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
70 	/* SHA256-HMAC GENERAL */
71 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
72 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
73 	/* SHA384 */
74 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
75 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
76 	/* SHA384-HMAC */
77 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
78 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
79 	/* SHA384-HMAC GENERAL */
80 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
81 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
82 	/* SHA512 */
83 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
84 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
85 	/* SHA512-HMAC */
86 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
87 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
88 	/* SHA512-HMAC GENERAL */
89 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
90 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
91 };
92 
93 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *);
94 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
95 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *);
96 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *);
97 static int sha2_digest_atomic(crypto_mechanism_t *, crypto_data_t *,
98     crypto_data_t *);
99 
100 static const crypto_digest_ops_t sha2_digest_ops = {
101 	.digest_init = sha2_digest_init,
102 	.digest = sha2_digest,
103 	.digest_update = sha2_digest_update,
104 	.digest_final = sha2_digest_final,
105 	.digest_atomic = sha2_digest_atomic
106 };
107 
108 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
109     crypto_spi_ctx_template_t);
110 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *);
111 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *);
112 static int sha2_mac_atomic(crypto_mechanism_t *, crypto_key_t *,
113     crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
114 static int sha2_mac_verify_atomic(crypto_mechanism_t *, crypto_key_t *,
115     crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
116 
117 static const crypto_mac_ops_t sha2_mac_ops = {
118 	.mac_init = sha2_mac_init,
119 	.mac = NULL,
120 	.mac_update = sha2_mac_update,
121 	.mac_final = sha2_mac_final,
122 	.mac_atomic = sha2_mac_atomic,
123 	.mac_verify_atomic = sha2_mac_verify_atomic
124 };
125 
126 static int sha2_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
127     crypto_spi_ctx_template_t *, size_t *);
128 static int sha2_free_context(crypto_ctx_t *);
129 
130 static const crypto_ctx_ops_t sha2_ctx_ops = {
131 	.create_ctx_template = sha2_create_ctx_template,
132 	.free_context = sha2_free_context
133 };
134 
135 static const crypto_ops_t sha2_crypto_ops = {
136 	&sha2_digest_ops,
137 	NULL,
138 	&sha2_mac_ops,
139 	&sha2_ctx_ops,
140 };
141 
142 static const crypto_provider_info_t sha2_prov_info = {
143 	"SHA2 Software Provider",
144 	&sha2_crypto_ops,
145 	sizeof (sha2_mech_info_tab) / sizeof (crypto_mech_info_t),
146 	sha2_mech_info_tab
147 };
148 
149 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
150 
151 int
152 sha2_mod_init(void)
153 {
154 	int ret;
155 
156 	/*
157 	 * Register with KCF. If the registration fails, log an
158 	 * error but do not uninstall the module, since the functionality
159 	 * provided by misc/sha2 should still be available.
160 	 */
161 	if ((ret = crypto_register_provider(&sha2_prov_info,
162 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
163 		cmn_err(CE_WARN, "sha2 _init: "
164 		    "crypto_register_provider() failed (0x%x)", ret);
165 
166 	return (0);
167 }
168 
169 int
170 sha2_mod_fini(void)
171 {
172 	int ret = 0;
173 
174 	if (sha2_prov_handle != 0) {
175 		if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
176 		    CRYPTO_SUCCESS) {
177 			cmn_err(CE_WARN,
178 			    "sha2 _fini: crypto_unregister_provider() "
179 			    "failed (0x%x)", ret);
180 			return (EBUSY);
181 		}
182 		sha2_prov_handle = 0;
183 	}
184 
185 	return (ret);
186 }
187 
188 /*
189  * KCF software provider digest entry points.
190  */
191 
192 static int
193 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism)
194 {
195 
196 	/*
197 	 * Allocate and initialize SHA2 context.
198 	 */
199 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t), KM_SLEEP);
200 	if (ctx->cc_provider_private == NULL)
201 		return (CRYPTO_HOST_MEMORY);
202 
203 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
204 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
205 
206 	return (CRYPTO_SUCCESS);
207 }
208 
209 /*
210  * Helper SHA2 digest update function for uio data.
211  */
212 static int
213 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
214 {
215 	off_t offset = data->cd_offset;
216 	size_t length = data->cd_length;
217 	uint_t vec_idx = 0;
218 	size_t cur_len;
219 
220 	/* we support only kernel buffer */
221 	if (zfs_uio_segflg(data->cd_uio) != UIO_SYSSPACE)
222 		return (CRYPTO_ARGUMENTS_BAD);
223 
224 	/*
225 	 * Jump to the first iovec containing data to be
226 	 * digested.
227 	 */
228 	offset = zfs_uio_index_at_offset(data->cd_uio, offset, &vec_idx);
229 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio)) {
230 		/*
231 		 * The caller specified an offset that is larger than the
232 		 * total size of the buffers it provided.
233 		 */
234 		return (CRYPTO_DATA_LEN_RANGE);
235 	}
236 
237 	/*
238 	 * Now do the digesting on the iovecs.
239 	 */
240 	while (vec_idx < zfs_uio_iovcnt(data->cd_uio) && length > 0) {
241 		cur_len = MIN(zfs_uio_iovlen(data->cd_uio, vec_idx) -
242 		    offset, length);
243 
244 		SHA2Update(sha2_ctx, (uint8_t *)zfs_uio_iovbase(data->cd_uio,
245 		    vec_idx) + offset, cur_len);
246 		length -= cur_len;
247 		vec_idx++;
248 		offset = 0;
249 	}
250 
251 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio) && length > 0) {
252 		/*
253 		 * The end of the specified iovec's was reached but
254 		 * the length requested could not be processed, i.e.
255 		 * The caller requested to digest more data than it provided.
256 		 */
257 		return (CRYPTO_DATA_LEN_RANGE);
258 	}
259 
260 	return (CRYPTO_SUCCESS);
261 }
262 
263 /*
264  * Helper SHA2 digest final function for uio data.
265  * digest_len is the length of the desired digest. If digest_len
266  * is smaller than the default SHA2 digest length, the caller
267  * must pass a scratch buffer, digest_scratch, which must
268  * be at least the algorithm's digest length bytes.
269  */
270 static int
271 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
272     ulong_t digest_len, uchar_t *digest_scratch)
273 {
274 	off_t offset = digest->cd_offset;
275 	uint_t vec_idx = 0;
276 
277 	/* we support only kernel buffer */
278 	if (zfs_uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
279 		return (CRYPTO_ARGUMENTS_BAD);
280 
281 	/*
282 	 * Jump to the first iovec containing ptr to the digest to
283 	 * be returned.
284 	 */
285 	offset = zfs_uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
286 	if (vec_idx == zfs_uio_iovcnt(digest->cd_uio)) {
287 		/*
288 		 * The caller specified an offset that is
289 		 * larger than the total size of the buffers
290 		 * it provided.
291 		 */
292 		return (CRYPTO_DATA_LEN_RANGE);
293 	}
294 
295 	if (offset + digest_len <=
296 	    zfs_uio_iovlen(digest->cd_uio, vec_idx)) {
297 		/*
298 		 * The computed SHA2 digest will fit in the current
299 		 * iovec.
300 		 */
301 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
302 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
303 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
304 		    (digest_len != SHA512_DIGEST_LENGTH))) {
305 			/*
306 			 * The caller requested a short digest. Digest
307 			 * into a scratch buffer and return to
308 			 * the user only what was requested.
309 			 */
310 			SHA2Final(digest_scratch, sha2_ctx);
311 
312 			memcpy((uchar_t *)
313 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
314 			    digest_scratch, digest_len);
315 		} else {
316 			SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
317 			    cd_uio, vec_idx) + offset,
318 			    sha2_ctx);
319 
320 		}
321 	} else {
322 		/*
323 		 * The computed digest will be crossing one or more iovec's.
324 		 * This is bad performance-wise but we need to support it.
325 		 * Allocate a small scratch buffer on the stack and
326 		 * copy it piece meal to the specified digest iovec's.
327 		 */
328 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
329 		off_t scratch_offset = 0;
330 		size_t length = digest_len;
331 		size_t cur_len;
332 
333 		SHA2Final(digest_tmp, sha2_ctx);
334 
335 		while (vec_idx < zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
336 			cur_len =
337 			    MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
338 			    offset, length);
339 			memcpy(
340 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
341 			    digest_tmp + scratch_offset,
342 			    cur_len);
343 
344 			length -= cur_len;
345 			vec_idx++;
346 			scratch_offset += cur_len;
347 			offset = 0;
348 		}
349 
350 		if (vec_idx == zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
351 			/*
352 			 * The end of the specified iovec's was reached but
353 			 * the length requested could not be processed, i.e.
354 			 * The caller requested to digest more data than it
355 			 * provided.
356 			 */
357 			return (CRYPTO_DATA_LEN_RANGE);
358 		}
359 	}
360 
361 	return (CRYPTO_SUCCESS);
362 }
363 
364 static int
365 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest)
366 {
367 	int ret = CRYPTO_SUCCESS;
368 	uint_t sha_digest_len;
369 
370 	ASSERT(ctx->cc_provider_private != NULL);
371 
372 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
373 	case SHA256_MECH_INFO_TYPE:
374 		sha_digest_len = SHA256_DIGEST_LENGTH;
375 		break;
376 	case SHA384_MECH_INFO_TYPE:
377 		sha_digest_len = SHA384_DIGEST_LENGTH;
378 		break;
379 	case SHA512_MECH_INFO_TYPE:
380 		sha_digest_len = SHA512_DIGEST_LENGTH;
381 		break;
382 	default:
383 		return (CRYPTO_MECHANISM_INVALID);
384 	}
385 
386 	/*
387 	 * We need to just return the length needed to store the output.
388 	 * We should not destroy the context for the following cases.
389 	 */
390 	if ((digest->cd_length == 0) ||
391 	    (digest->cd_length < sha_digest_len)) {
392 		digest->cd_length = sha_digest_len;
393 		return (CRYPTO_BUFFER_TOO_SMALL);
394 	}
395 
396 	/*
397 	 * Do the SHA2 update on the specified input data.
398 	 */
399 	switch (data->cd_format) {
400 	case CRYPTO_DATA_RAW:
401 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
402 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
403 		    data->cd_length);
404 		break;
405 	case CRYPTO_DATA_UIO:
406 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
407 		    data);
408 		break;
409 	default:
410 		ret = CRYPTO_ARGUMENTS_BAD;
411 	}
412 
413 	if (ret != CRYPTO_SUCCESS) {
414 		/* the update failed, free context and bail */
415 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
416 		ctx->cc_provider_private = NULL;
417 		digest->cd_length = 0;
418 		return (ret);
419 	}
420 
421 	/*
422 	 * Do a SHA2 final, must be done separately since the digest
423 	 * type can be different than the input data type.
424 	 */
425 	switch (digest->cd_format) {
426 	case CRYPTO_DATA_RAW:
427 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
428 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
429 		break;
430 	case CRYPTO_DATA_UIO:
431 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
432 		    digest, sha_digest_len, NULL);
433 		break;
434 	default:
435 		ret = CRYPTO_ARGUMENTS_BAD;
436 	}
437 
438 	/* all done, free context and return */
439 
440 	if (ret == CRYPTO_SUCCESS)
441 		digest->cd_length = sha_digest_len;
442 	else
443 		digest->cd_length = 0;
444 
445 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
446 	ctx->cc_provider_private = NULL;
447 	return (ret);
448 }
449 
450 static int
451 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data)
452 {
453 	int ret = CRYPTO_SUCCESS;
454 
455 	ASSERT(ctx->cc_provider_private != NULL);
456 
457 	/*
458 	 * Do the SHA2 update on the specified input data.
459 	 */
460 	switch (data->cd_format) {
461 	case CRYPTO_DATA_RAW:
462 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
463 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
464 		    data->cd_length);
465 		break;
466 	case CRYPTO_DATA_UIO:
467 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
468 		    data);
469 		break;
470 	default:
471 		ret = CRYPTO_ARGUMENTS_BAD;
472 	}
473 
474 	return (ret);
475 }
476 
477 static int
478 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest)
479 {
480 	int ret = CRYPTO_SUCCESS;
481 	uint_t sha_digest_len;
482 
483 	ASSERT(ctx->cc_provider_private != NULL);
484 
485 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
486 	case SHA256_MECH_INFO_TYPE:
487 		sha_digest_len = SHA256_DIGEST_LENGTH;
488 		break;
489 	case SHA384_MECH_INFO_TYPE:
490 		sha_digest_len = SHA384_DIGEST_LENGTH;
491 		break;
492 	case SHA512_MECH_INFO_TYPE:
493 		sha_digest_len = SHA512_DIGEST_LENGTH;
494 		break;
495 	default:
496 		return (CRYPTO_MECHANISM_INVALID);
497 	}
498 
499 	/*
500 	 * We need to just return the length needed to store the output.
501 	 * We should not destroy the context for the following cases.
502 	 */
503 	if ((digest->cd_length == 0) ||
504 	    (digest->cd_length < sha_digest_len)) {
505 		digest->cd_length = sha_digest_len;
506 		return (CRYPTO_BUFFER_TOO_SMALL);
507 	}
508 
509 	/*
510 	 * Do a SHA2 final.
511 	 */
512 	switch (digest->cd_format) {
513 	case CRYPTO_DATA_RAW:
514 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
515 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
516 		break;
517 	case CRYPTO_DATA_UIO:
518 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
519 		    digest, sha_digest_len, NULL);
520 		break;
521 	default:
522 		ret = CRYPTO_ARGUMENTS_BAD;
523 	}
524 
525 	/* all done, free context and return */
526 
527 	if (ret == CRYPTO_SUCCESS)
528 		digest->cd_length = sha_digest_len;
529 	else
530 		digest->cd_length = 0;
531 
532 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
533 	ctx->cc_provider_private = NULL;
534 
535 	return (ret);
536 }
537 
538 static int
539 sha2_digest_atomic(crypto_mechanism_t *mechanism, crypto_data_t *data,
540     crypto_data_t *digest)
541 {
542 	int ret = CRYPTO_SUCCESS;
543 	SHA2_CTX sha2_ctx;
544 	uint32_t sha_digest_len;
545 
546 	/*
547 	 * Do the SHA inits.
548 	 */
549 
550 	SHA2Init(mechanism->cm_type, &sha2_ctx);
551 
552 	switch (data->cd_format) {
553 	case CRYPTO_DATA_RAW:
554 		SHA2Update(&sha2_ctx, (uint8_t *)data->
555 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
556 		break;
557 	case CRYPTO_DATA_UIO:
558 		ret = sha2_digest_update_uio(&sha2_ctx, data);
559 		break;
560 	default:
561 		ret = CRYPTO_ARGUMENTS_BAD;
562 	}
563 
564 	/*
565 	 * Do the SHA updates on the specified input data.
566 	 */
567 
568 	if (ret != CRYPTO_SUCCESS) {
569 		/* the update failed, bail */
570 		digest->cd_length = 0;
571 		return (ret);
572 	}
573 
574 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
575 		sha_digest_len = SHA256_DIGEST_LENGTH;
576 	else
577 		sha_digest_len = SHA512_DIGEST_LENGTH;
578 
579 	/*
580 	 * Do a SHA2 final, must be done separately since the digest
581 	 * type can be different than the input data type.
582 	 */
583 	switch (digest->cd_format) {
584 	case CRYPTO_DATA_RAW:
585 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
586 		    digest->cd_offset, &sha2_ctx);
587 		break;
588 	case CRYPTO_DATA_UIO:
589 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
590 		    sha_digest_len, NULL);
591 		break;
592 	default:
593 		ret = CRYPTO_ARGUMENTS_BAD;
594 	}
595 
596 	if (ret == CRYPTO_SUCCESS)
597 		digest->cd_length = sha_digest_len;
598 	else
599 		digest->cd_length = 0;
600 
601 	return (ret);
602 }
603 
604 /*
605  * KCF software provider mac entry points.
606  *
607  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
608  *
609  * Init:
610  * The initialization routine initializes what we denote
611  * as the inner and outer contexts by doing
612  * - for inner context: SHA2(key XOR ipad)
613  * - for outer context: SHA2(key XOR opad)
614  *
615  * Update:
616  * Each subsequent SHA2 HMAC update will result in an
617  * update of the inner context with the specified data.
618  *
619  * Final:
620  * The SHA2 HMAC final will do a SHA2 final operation on the
621  * inner context, and the resulting digest will be used
622  * as the data for an update on the outer context. Last
623  * but not least, a SHA2 final on the outer context will
624  * be performed to obtain the SHA2 HMAC digest to return
625  * to the user.
626  */
627 
628 /*
629  * Initialize a SHA2-HMAC context.
630  */
631 static void
632 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
633 {
634 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
635 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
636 	int i, block_size, blocks_per_int64;
637 
638 	/* Determine the block size */
639 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
640 		block_size = SHA256_HMAC_BLOCK_SIZE;
641 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
642 	} else {
643 		block_size = SHA512_HMAC_BLOCK_SIZE;
644 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
645 	}
646 
647 	(void) memset(ipad, 0, block_size);
648 	(void) memset(opad, 0, block_size);
649 
650 	if (keyval != NULL) {
651 		(void) memcpy(ipad, keyval, length_in_bytes);
652 		(void) memcpy(opad, keyval, length_in_bytes);
653 	} else {
654 		ASSERT0(length_in_bytes);
655 	}
656 
657 	/* XOR key with ipad (0x36) and opad (0x5c) */
658 	for (i = 0; i < blocks_per_int64; i ++) {
659 		ipad[i] ^= 0x3636363636363636;
660 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
661 	}
662 
663 	/* perform SHA2 on ipad */
664 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
665 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
666 
667 	/* perform SHA2 on opad */
668 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
669 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
670 }
671 
672 /*
673  */
674 static int
675 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
676     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template)
677 {
678 	int ret = CRYPTO_SUCCESS;
679 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
680 	uint_t sha_digest_len, sha_hmac_block_size;
681 
682 	/*
683 	 * Set the digest length and block size to values appropriate to the
684 	 * mechanism
685 	 */
686 	switch (mechanism->cm_type) {
687 	case SHA256_HMAC_MECH_INFO_TYPE:
688 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
689 		sha_digest_len = SHA256_DIGEST_LENGTH;
690 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
691 		break;
692 	case SHA384_HMAC_MECH_INFO_TYPE:
693 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
694 	case SHA512_HMAC_MECH_INFO_TYPE:
695 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
696 		sha_digest_len = SHA512_DIGEST_LENGTH;
697 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
698 		break;
699 	default:
700 		return (CRYPTO_MECHANISM_INVALID);
701 	}
702 
703 	ctx->cc_provider_private =
704 	    kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
705 	if (ctx->cc_provider_private == NULL)
706 		return (CRYPTO_HOST_MEMORY);
707 
708 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
709 	if (ctx_template != NULL) {
710 		/* reuse context template */
711 		memcpy(PROV_SHA2_HMAC_CTX(ctx), ctx_template,
712 		    sizeof (sha2_hmac_ctx_t));
713 	} else {
714 		/* no context template, compute context */
715 		if (keylen_in_bytes > sha_hmac_block_size) {
716 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
717 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
718 
719 			/*
720 			 * Hash the passed-in key to get a smaller key.
721 			 * The inner context is used since it hasn't been
722 			 * initialized yet.
723 			 */
724 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
725 			    &hmac_ctx->hc_icontext,
726 			    key->ck_data, keylen_in_bytes, digested_key);
727 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
728 			    digested_key, sha_digest_len);
729 		} else {
730 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
731 			    key->ck_data, keylen_in_bytes);
732 		}
733 	}
734 
735 	/*
736 	 * Get the mechanism parameters, if applicable.
737 	 */
738 	if (mechanism->cm_type % 3 == 2) {
739 		if (mechanism->cm_param == NULL ||
740 		    mechanism->cm_param_len != sizeof (ulong_t)) {
741 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
742 		} else {
743 			PROV_SHA2_GET_DIGEST_LEN(mechanism,
744 			    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
745 			if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len >
746 			    sha_digest_len)
747 				ret = CRYPTO_MECHANISM_PARAM_INVALID;
748 		}
749 	}
750 
751 	if (ret != CRYPTO_SUCCESS) {
752 		memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
753 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
754 		ctx->cc_provider_private = NULL;
755 	}
756 
757 	return (ret);
758 }
759 
760 static int
761 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data)
762 {
763 	int ret = CRYPTO_SUCCESS;
764 
765 	ASSERT(ctx->cc_provider_private != NULL);
766 
767 	/*
768 	 * Do a SHA2 update of the inner context using the specified
769 	 * data.
770 	 */
771 	switch (data->cd_format) {
772 	case CRYPTO_DATA_RAW:
773 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
774 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
775 		    data->cd_length);
776 		break;
777 	case CRYPTO_DATA_UIO:
778 		ret = sha2_digest_update_uio(
779 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
780 		break;
781 	default:
782 		ret = CRYPTO_ARGUMENTS_BAD;
783 	}
784 
785 	return (ret);
786 }
787 
788 static int
789 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac)
790 {
791 	int ret = CRYPTO_SUCCESS;
792 	uchar_t digest[SHA512_DIGEST_LENGTH];
793 	uint32_t digest_len, sha_digest_len;
794 
795 	ASSERT(ctx->cc_provider_private != NULL);
796 
797 	/* Set the digest lengths to values appropriate to the mechanism */
798 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
799 	case SHA256_HMAC_MECH_INFO_TYPE:
800 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
801 		break;
802 	case SHA384_HMAC_MECH_INFO_TYPE:
803 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
804 		break;
805 	case SHA512_HMAC_MECH_INFO_TYPE:
806 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
807 		break;
808 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
809 		sha_digest_len = SHA256_DIGEST_LENGTH;
810 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
811 		break;
812 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
813 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
814 		sha_digest_len = SHA512_DIGEST_LENGTH;
815 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
816 		break;
817 	default:
818 		return (CRYPTO_ARGUMENTS_BAD);
819 	}
820 
821 	/*
822 	 * We need to just return the length needed to store the output.
823 	 * We should not destroy the context for the following cases.
824 	 */
825 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
826 		mac->cd_length = digest_len;
827 		return (CRYPTO_BUFFER_TOO_SMALL);
828 	}
829 
830 	/*
831 	 * Do a SHA2 final on the inner context.
832 	 */
833 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
834 
835 	/*
836 	 * Do a SHA2 update on the outer context, feeding the inner
837 	 * digest as data.
838 	 */
839 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
840 	    sha_digest_len);
841 
842 	/*
843 	 * Do a SHA2 final on the outer context, storing the computing
844 	 * digest in the users buffer.
845 	 */
846 	switch (mac->cd_format) {
847 	case CRYPTO_DATA_RAW:
848 		if (digest_len != sha_digest_len) {
849 			/*
850 			 * The caller requested a short digest. Digest
851 			 * into a scratch buffer and return to
852 			 * the user only what was requested.
853 			 */
854 			SHA2Final(digest,
855 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
856 			memcpy((unsigned char *)mac->cd_raw.iov_base +
857 			    mac->cd_offset, digest, digest_len);
858 		} else {
859 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
860 			    mac->cd_offset,
861 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
862 		}
863 		break;
864 	case CRYPTO_DATA_UIO:
865 		ret = sha2_digest_final_uio(
866 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
867 		    digest_len, digest);
868 		break;
869 	default:
870 		ret = CRYPTO_ARGUMENTS_BAD;
871 	}
872 
873 	if (ret == CRYPTO_SUCCESS)
874 		mac->cd_length = digest_len;
875 	else
876 		mac->cd_length = 0;
877 
878 	memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
879 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
880 	ctx->cc_provider_private = NULL;
881 
882 	return (ret);
883 }
884 
885 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
886 	switch (data->cd_format) {					\
887 	case CRYPTO_DATA_RAW:						\
888 		SHA2Update(&(ctx).hc_icontext,				\
889 		    (uint8_t *)data->cd_raw.iov_base +			\
890 		    data->cd_offset, data->cd_length);			\
891 		break;							\
892 	case CRYPTO_DATA_UIO:						\
893 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
894 		break;							\
895 	default:							\
896 		ret = CRYPTO_ARGUMENTS_BAD;				\
897 	}								\
898 }
899 
900 static int
901 sha2_mac_atomic(crypto_mechanism_t *mechanism,
902     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
903     crypto_spi_ctx_template_t ctx_template)
904 {
905 	int ret = CRYPTO_SUCCESS;
906 	uchar_t digest[SHA512_DIGEST_LENGTH];
907 	sha2_hmac_ctx_t sha2_hmac_ctx;
908 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
909 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
910 
911 	/*
912 	 * Set the digest length and block size to values appropriate to the
913 	 * mechanism
914 	 */
915 	switch (mechanism->cm_type) {
916 	case SHA256_HMAC_MECH_INFO_TYPE:
917 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
918 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
919 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
920 		break;
921 	case SHA384_HMAC_MECH_INFO_TYPE:
922 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
923 	case SHA512_HMAC_MECH_INFO_TYPE:
924 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
925 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
926 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
927 		break;
928 	default:
929 		return (CRYPTO_MECHANISM_INVALID);
930 	}
931 
932 	if (ctx_template != NULL) {
933 		/* reuse context template */
934 		memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
935 	} else {
936 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
937 		/* no context template, initialize context */
938 		if (keylen_in_bytes > sha_hmac_block_size) {
939 			/*
940 			 * Hash the passed-in key to get a smaller key.
941 			 * The inner context is used since it hasn't been
942 			 * initialized yet.
943 			 */
944 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
945 			    &sha2_hmac_ctx.hc_icontext,
946 			    key->ck_data, keylen_in_bytes, digest);
947 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
948 			    sha_digest_len);
949 		} else {
950 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
951 			    keylen_in_bytes);
952 		}
953 	}
954 
955 	/* get the mechanism parameters, if applicable */
956 	if ((mechanism->cm_type % 3) == 2) {
957 		if (mechanism->cm_param == NULL ||
958 		    mechanism->cm_param_len != sizeof (ulong_t)) {
959 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
960 			goto bail;
961 		}
962 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
963 		if (digest_len > sha_digest_len) {
964 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
965 			goto bail;
966 		}
967 	}
968 
969 	/* do a SHA2 update of the inner context using the specified data */
970 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
971 	if (ret != CRYPTO_SUCCESS)
972 		/* the update failed, free context and bail */
973 		goto bail;
974 
975 	/*
976 	 * Do a SHA2 final on the inner context.
977 	 */
978 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
979 
980 	/*
981 	 * Do an SHA2 update on the outer context, feeding the inner
982 	 * digest as data.
983 	 *
984 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
985 	 * bytes of the inner hash value.
986 	 */
987 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
988 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
989 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
990 		    SHA384_DIGEST_LENGTH);
991 	else
992 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
993 
994 	/*
995 	 * Do a SHA2 final on the outer context, storing the computed
996 	 * digest in the users buffer.
997 	 */
998 	switch (mac->cd_format) {
999 	case CRYPTO_DATA_RAW:
1000 		if (digest_len != sha_digest_len) {
1001 			/*
1002 			 * The caller requested a short digest. Digest
1003 			 * into a scratch buffer and return to
1004 			 * the user only what was requested.
1005 			 */
1006 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1007 			memcpy((unsigned char *)mac->cd_raw.iov_base +
1008 			    mac->cd_offset, digest, digest_len);
1009 		} else {
1010 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1011 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1012 		}
1013 		break;
1014 	case CRYPTO_DATA_UIO:
1015 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1016 		    digest_len, digest);
1017 		break;
1018 	default:
1019 		ret = CRYPTO_ARGUMENTS_BAD;
1020 	}
1021 
1022 	if (ret == CRYPTO_SUCCESS) {
1023 		mac->cd_length = digest_len;
1024 		return (CRYPTO_SUCCESS);
1025 	}
1026 bail:
1027 	memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
1028 	mac->cd_length = 0;
1029 	return (ret);
1030 }
1031 
1032 static int
1033 sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
1034     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1035     crypto_spi_ctx_template_t ctx_template)
1036 {
1037 	int ret = CRYPTO_SUCCESS;
1038 	uchar_t digest[SHA512_DIGEST_LENGTH];
1039 	sha2_hmac_ctx_t sha2_hmac_ctx;
1040 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1041 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1042 
1043 	/*
1044 	 * Set the digest length and block size to values appropriate to the
1045 	 * mechanism
1046 	 */
1047 	switch (mechanism->cm_type) {
1048 	case SHA256_HMAC_MECH_INFO_TYPE:
1049 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1050 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1051 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1052 		break;
1053 	case SHA384_HMAC_MECH_INFO_TYPE:
1054 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1055 	case SHA512_HMAC_MECH_INFO_TYPE:
1056 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1057 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1058 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1059 		break;
1060 	default:
1061 		return (CRYPTO_MECHANISM_INVALID);
1062 	}
1063 
1064 	if (ctx_template != NULL) {
1065 		/* reuse context template */
1066 		memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
1067 	} else {
1068 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1069 		/* no context template, initialize context */
1070 		if (keylen_in_bytes > sha_hmac_block_size) {
1071 			/*
1072 			 * Hash the passed-in key to get a smaller key.
1073 			 * The inner context is used since it hasn't been
1074 			 * initialized yet.
1075 			 */
1076 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1077 			    &sha2_hmac_ctx.hc_icontext,
1078 			    key->ck_data, keylen_in_bytes, digest);
1079 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1080 			    sha_digest_len);
1081 		} else {
1082 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1083 			    keylen_in_bytes);
1084 		}
1085 	}
1086 
1087 	/* get the mechanism parameters, if applicable */
1088 	if (mechanism->cm_type % 3 == 2) {
1089 		if (mechanism->cm_param == NULL ||
1090 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1091 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1092 			goto bail;
1093 		}
1094 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1095 		if (digest_len > sha_digest_len) {
1096 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1097 			goto bail;
1098 		}
1099 	}
1100 
1101 	if (mac->cd_length != digest_len) {
1102 		ret = CRYPTO_INVALID_MAC;
1103 		goto bail;
1104 	}
1105 
1106 	/* do a SHA2 update of the inner context using the specified data */
1107 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1108 	if (ret != CRYPTO_SUCCESS)
1109 		/* the update failed, free context and bail */
1110 		goto bail;
1111 
1112 	/* do a SHA2 final on the inner context */
1113 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1114 
1115 	/*
1116 	 * Do an SHA2 update on the outer context, feeding the inner
1117 	 * digest as data.
1118 	 *
1119 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1120 	 * bytes of the inner hash value.
1121 	 */
1122 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1123 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1124 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1125 		    SHA384_DIGEST_LENGTH);
1126 	else
1127 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1128 
1129 	/*
1130 	 * Do a SHA2 final on the outer context, storing the computed
1131 	 * digest in the users buffer.
1132 	 */
1133 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1134 
1135 	/*
1136 	 * Compare the computed digest against the expected digest passed
1137 	 * as argument.
1138 	 */
1139 
1140 	switch (mac->cd_format) {
1141 
1142 	case CRYPTO_DATA_RAW:
1143 		if (memcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1144 		    mac->cd_offset, digest_len) != 0)
1145 			ret = CRYPTO_INVALID_MAC;
1146 		break;
1147 
1148 	case CRYPTO_DATA_UIO: {
1149 		off_t offset = mac->cd_offset;
1150 		uint_t vec_idx = 0;
1151 		off_t scratch_offset = 0;
1152 		size_t length = digest_len;
1153 		size_t cur_len;
1154 
1155 		/* we support only kernel buffer */
1156 		if (zfs_uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1157 			return (CRYPTO_ARGUMENTS_BAD);
1158 
1159 		/* jump to the first iovec containing the expected digest */
1160 		offset = zfs_uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1161 		if (vec_idx == zfs_uio_iovcnt(mac->cd_uio)) {
1162 			/*
1163 			 * The caller specified an offset that is
1164 			 * larger than the total size of the buffers
1165 			 * it provided.
1166 			 */
1167 			ret = CRYPTO_DATA_LEN_RANGE;
1168 			break;
1169 		}
1170 
1171 		/* do the comparison of computed digest vs specified one */
1172 		while (vec_idx < zfs_uio_iovcnt(mac->cd_uio) && length > 0) {
1173 			cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
1174 			    offset, length);
1175 
1176 			if (memcmp(digest + scratch_offset,
1177 			    zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
1178 			    cur_len) != 0) {
1179 				ret = CRYPTO_INVALID_MAC;
1180 				break;
1181 			}
1182 
1183 			length -= cur_len;
1184 			vec_idx++;
1185 			scratch_offset += cur_len;
1186 			offset = 0;
1187 		}
1188 		break;
1189 	}
1190 
1191 	default:
1192 		ret = CRYPTO_ARGUMENTS_BAD;
1193 	}
1194 
1195 	return (ret);
1196 bail:
1197 	memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
1198 	mac->cd_length = 0;
1199 	return (ret);
1200 }
1201 
1202 /*
1203  * KCF software provider context management entry points.
1204  */
1205 
1206 static int
1207 sha2_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
1208     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size)
1209 {
1210 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1211 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1212 	uint32_t sha_digest_len, sha_hmac_block_size;
1213 
1214 	/*
1215 	 * Set the digest length and block size to values appropriate to the
1216 	 * mechanism
1217 	 */
1218 	switch (mechanism->cm_type) {
1219 	case SHA256_HMAC_MECH_INFO_TYPE:
1220 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1221 		sha_digest_len = SHA256_DIGEST_LENGTH;
1222 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1223 		break;
1224 	case SHA384_HMAC_MECH_INFO_TYPE:
1225 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1226 	case SHA512_HMAC_MECH_INFO_TYPE:
1227 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1228 		sha_digest_len = SHA512_DIGEST_LENGTH;
1229 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1230 		break;
1231 	default:
1232 		return (CRYPTO_MECHANISM_INVALID);
1233 	}
1234 
1235 	/*
1236 	 * Allocate and initialize SHA2 context.
1237 	 */
1238 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
1239 	if (sha2_hmac_ctx_tmpl == NULL)
1240 		return (CRYPTO_HOST_MEMORY);
1241 
1242 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1243 
1244 	if (keylen_in_bytes > sha_hmac_block_size) {
1245 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1246 
1247 		/*
1248 		 * Hash the passed-in key to get a smaller key.
1249 		 * The inner context is used since it hasn't been
1250 		 * initialized yet.
1251 		 */
1252 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1253 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1254 		    key->ck_data, keylen_in_bytes, digested_key);
1255 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1256 		    sha_digest_len);
1257 	} else {
1258 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1259 		    keylen_in_bytes);
1260 	}
1261 
1262 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1263 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1264 
1265 	return (CRYPTO_SUCCESS);
1266 }
1267 
1268 static int
1269 sha2_free_context(crypto_ctx_t *ctx)
1270 {
1271 	uint_t ctx_len;
1272 
1273 	if (ctx->cc_provider_private == NULL)
1274 		return (CRYPTO_SUCCESS);
1275 
1276 	/*
1277 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1278 	 * have different lengths.
1279 	 *
1280 	 * Note: Below is dependent on the mechanism ordering.
1281 	 */
1282 
1283 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1284 		ctx_len = sizeof (sha2_ctx_t);
1285 	else
1286 		ctx_len = sizeof (sha2_hmac_ctx_t);
1287 
1288 	memset(ctx->cc_provider_private, 0, ctx_len);
1289 	kmem_free(ctx->cc_provider_private, ctx_len);
1290 	ctx->cc_provider_private = NULL;
1291 
1292 	return (CRYPTO_SUCCESS);
1293 }
1294