xref: /freebsd/sys/contrib/openzfs/module/icp/io/sha2_mod.c (revision e92ffd9b626833ebdbf2742c8ffddc6cd94b963e)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/zfs_context.h>
28 #include <sys/crypto/common.h>
29 #include <sys/crypto/spi.h>
30 #include <sys/crypto/icp.h>
31 #define	_SHA2_IMPL
32 #include <sys/sha2.h>
33 #include <sha2/sha2_impl.h>
34 
35 /*
36  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
37  * by KCF to one of the entry points.
38  */
39 
40 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
41 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
42 
43 /* to extract the digest length passed as mechanism parameter */
44 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
45 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
46 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
47 	else {								\
48 		ulong_t tmp_ulong;					\
49 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
50 		(len) = (uint32_t)tmp_ulong;				\
51 	}								\
52 }
53 
54 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
55 	SHA2Init(mech, ctx);				\
56 	SHA2Update(ctx, key, len);			\
57 	SHA2Final(digest, ctx);				\
58 }
59 
60 /*
61  * Mechanism info structure passed to KCF during registration.
62  */
63 static const crypto_mech_info_t sha2_mech_info_tab[] = {
64 	/* SHA256 */
65 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
66 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
67 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
68 	/* SHA256-HMAC */
69 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
70 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
71 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
72 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
73 	/* SHA256-HMAC GENERAL */
74 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
75 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
76 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
77 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
78 	/* SHA384 */
79 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
80 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
81 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
82 	/* SHA384-HMAC */
83 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
84 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
85 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
86 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
87 	/* SHA384-HMAC GENERAL */
88 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
89 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
90 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
91 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 	/* SHA512 */
93 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
94 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
95 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
96 	/* SHA512-HMAC */
97 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
98 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
99 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
100 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101 	/* SHA512-HMAC GENERAL */
102 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
103 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
104 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
105 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
106 };
107 
108 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
109 
110 static const crypto_control_ops_t sha2_control_ops = {
111 	sha2_provider_status
112 };
113 
114 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
115     crypto_req_handle_t);
116 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
117     crypto_req_handle_t);
118 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
119     crypto_req_handle_t);
120 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
121     crypto_req_handle_t);
122 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
123     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
124     crypto_req_handle_t);
125 
126 static const crypto_digest_ops_t sha2_digest_ops = {
127 	.digest_init = sha2_digest_init,
128 	.digest = sha2_digest,
129 	.digest_update = sha2_digest_update,
130 	.digest_key = NULL,
131 	.digest_final = sha2_digest_final,
132 	.digest_atomic = sha2_digest_atomic
133 };
134 
135 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
136     crypto_spi_ctx_template_t, crypto_req_handle_t);
137 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
138     crypto_req_handle_t);
139 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
140 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
141     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
142     crypto_spi_ctx_template_t, crypto_req_handle_t);
143 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
144     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
145     crypto_spi_ctx_template_t, crypto_req_handle_t);
146 
147 static const crypto_mac_ops_t sha2_mac_ops = {
148 	.mac_init = sha2_mac_init,
149 	.mac = NULL,
150 	.mac_update = sha2_mac_update,
151 	.mac_final = sha2_mac_final,
152 	.mac_atomic = sha2_mac_atomic,
153 	.mac_verify_atomic = sha2_mac_verify_atomic
154 };
155 
156 static int sha2_create_ctx_template(crypto_provider_handle_t,
157     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
158     size_t *, crypto_req_handle_t);
159 static int sha2_free_context(crypto_ctx_t *);
160 
161 static const crypto_ctx_ops_t sha2_ctx_ops = {
162 	.create_ctx_template = sha2_create_ctx_template,
163 	.free_context = sha2_free_context
164 };
165 
166 static const crypto_ops_t sha2_crypto_ops = {{{{{
167 	&sha2_control_ops,
168 	&sha2_digest_ops,
169 	NULL,
170 	&sha2_mac_ops,
171 	NULL,
172 	NULL,
173 	NULL,
174 	NULL,
175 	NULL,
176 	NULL,
177 	NULL,
178 	NULL,
179 	NULL,
180 	&sha2_ctx_ops
181 }}}}};
182 
183 static const crypto_provider_info_t sha2_prov_info = {{{{
184 	CRYPTO_SPI_VERSION_1,
185 	"SHA2 Software Provider",
186 	CRYPTO_SW_PROVIDER,
187 	NULL,
188 	&sha2_crypto_ops,
189 	sizeof (sha2_mech_info_tab) / sizeof (crypto_mech_info_t),
190 	sha2_mech_info_tab
191 }}}};
192 
193 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
194 
195 int
196 sha2_mod_init(void)
197 {
198 	int ret;
199 
200 	/*
201 	 * Register with KCF. If the registration fails, log an
202 	 * error but do not uninstall the module, since the functionality
203 	 * provided by misc/sha2 should still be available.
204 	 */
205 	if ((ret = crypto_register_provider(&sha2_prov_info,
206 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
207 		cmn_err(CE_WARN, "sha2 _init: "
208 		    "crypto_register_provider() failed (0x%x)", ret);
209 
210 	return (0);
211 }
212 
213 int
214 sha2_mod_fini(void)
215 {
216 	int ret = 0;
217 
218 	if (sha2_prov_handle != 0) {
219 		if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
220 		    CRYPTO_SUCCESS) {
221 			cmn_err(CE_WARN,
222 			    "sha2 _fini: crypto_unregister_provider() "
223 			    "failed (0x%x)", ret);
224 			return (EBUSY);
225 		}
226 		sha2_prov_handle = 0;
227 	}
228 
229 	return (ret);
230 }
231 
232 /*
233  * KCF software provider control entry points.
234  */
235 static void
236 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
237 {
238 	(void) provider;
239 	*status = CRYPTO_PROVIDER_READY;
240 }
241 
242 /*
243  * KCF software provider digest entry points.
244  */
245 
246 static int
247 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
248     crypto_req_handle_t req)
249 {
250 
251 	/*
252 	 * Allocate and initialize SHA2 context.
253 	 */
254 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
255 	    crypto_kmflag(req));
256 	if (ctx->cc_provider_private == NULL)
257 		return (CRYPTO_HOST_MEMORY);
258 
259 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
260 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
261 
262 	return (CRYPTO_SUCCESS);
263 }
264 
265 /*
266  * Helper SHA2 digest update function for uio data.
267  */
268 static int
269 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
270 {
271 	off_t offset = data->cd_offset;
272 	size_t length = data->cd_length;
273 	uint_t vec_idx = 0;
274 	size_t cur_len;
275 
276 	/* we support only kernel buffer */
277 	if (zfs_uio_segflg(data->cd_uio) != UIO_SYSSPACE)
278 		return (CRYPTO_ARGUMENTS_BAD);
279 
280 	/*
281 	 * Jump to the first iovec containing data to be
282 	 * digested.
283 	 */
284 	offset = zfs_uio_index_at_offset(data->cd_uio, offset, &vec_idx);
285 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio)) {
286 		/*
287 		 * The caller specified an offset that is larger than the
288 		 * total size of the buffers it provided.
289 		 */
290 		return (CRYPTO_DATA_LEN_RANGE);
291 	}
292 
293 	/*
294 	 * Now do the digesting on the iovecs.
295 	 */
296 	while (vec_idx < zfs_uio_iovcnt(data->cd_uio) && length > 0) {
297 		cur_len = MIN(zfs_uio_iovlen(data->cd_uio, vec_idx) -
298 		    offset, length);
299 
300 		SHA2Update(sha2_ctx, (uint8_t *)zfs_uio_iovbase(data->cd_uio,
301 		    vec_idx) + offset, cur_len);
302 		length -= cur_len;
303 		vec_idx++;
304 		offset = 0;
305 	}
306 
307 	if (vec_idx == zfs_uio_iovcnt(data->cd_uio) && length > 0) {
308 		/*
309 		 * The end of the specified iovec's was reached but
310 		 * the length requested could not be processed, i.e.
311 		 * The caller requested to digest more data than it provided.
312 		 */
313 		return (CRYPTO_DATA_LEN_RANGE);
314 	}
315 
316 	return (CRYPTO_SUCCESS);
317 }
318 
319 /*
320  * Helper SHA2 digest final function for uio data.
321  * digest_len is the length of the desired digest. If digest_len
322  * is smaller than the default SHA2 digest length, the caller
323  * must pass a scratch buffer, digest_scratch, which must
324  * be at least the algorithm's digest length bytes.
325  */
326 static int
327 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
328     ulong_t digest_len, uchar_t *digest_scratch)
329 {
330 	off_t offset = digest->cd_offset;
331 	uint_t vec_idx = 0;
332 
333 	/* we support only kernel buffer */
334 	if (zfs_uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
335 		return (CRYPTO_ARGUMENTS_BAD);
336 
337 	/*
338 	 * Jump to the first iovec containing ptr to the digest to
339 	 * be returned.
340 	 */
341 	offset = zfs_uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
342 	if (vec_idx == zfs_uio_iovcnt(digest->cd_uio)) {
343 		/*
344 		 * The caller specified an offset that is
345 		 * larger than the total size of the buffers
346 		 * it provided.
347 		 */
348 		return (CRYPTO_DATA_LEN_RANGE);
349 	}
350 
351 	if (offset + digest_len <=
352 	    zfs_uio_iovlen(digest->cd_uio, vec_idx)) {
353 		/*
354 		 * The computed SHA2 digest will fit in the current
355 		 * iovec.
356 		 */
357 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
358 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
359 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
360 		    (digest_len != SHA512_DIGEST_LENGTH))) {
361 			/*
362 			 * The caller requested a short digest. Digest
363 			 * into a scratch buffer and return to
364 			 * the user only what was requested.
365 			 */
366 			SHA2Final(digest_scratch, sha2_ctx);
367 
368 			bcopy(digest_scratch, (uchar_t *)
369 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
370 			    digest_len);
371 		} else {
372 			SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
373 			    cd_uio, vec_idx) + offset,
374 			    sha2_ctx);
375 
376 		}
377 	} else {
378 		/*
379 		 * The computed digest will be crossing one or more iovec's.
380 		 * This is bad performance-wise but we need to support it.
381 		 * Allocate a small scratch buffer on the stack and
382 		 * copy it piece meal to the specified digest iovec's.
383 		 */
384 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
385 		off_t scratch_offset = 0;
386 		size_t length = digest_len;
387 		size_t cur_len;
388 
389 		SHA2Final(digest_tmp, sha2_ctx);
390 
391 		while (vec_idx < zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
392 			cur_len =
393 			    MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
394 			    offset, length);
395 			bcopy(digest_tmp + scratch_offset,
396 			    zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
397 			    cur_len);
398 
399 			length -= cur_len;
400 			vec_idx++;
401 			scratch_offset += cur_len;
402 			offset = 0;
403 		}
404 
405 		if (vec_idx == zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
406 			/*
407 			 * The end of the specified iovec's was reached but
408 			 * the length requested could not be processed, i.e.
409 			 * The caller requested to digest more data than it
410 			 * provided.
411 			 */
412 			return (CRYPTO_DATA_LEN_RANGE);
413 		}
414 	}
415 
416 	return (CRYPTO_SUCCESS);
417 }
418 
419 static int
420 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
421     crypto_req_handle_t req)
422 {
423 	(void) req;
424 	int ret = CRYPTO_SUCCESS;
425 	uint_t sha_digest_len;
426 
427 	ASSERT(ctx->cc_provider_private != NULL);
428 
429 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
430 	case SHA256_MECH_INFO_TYPE:
431 		sha_digest_len = SHA256_DIGEST_LENGTH;
432 		break;
433 	case SHA384_MECH_INFO_TYPE:
434 		sha_digest_len = SHA384_DIGEST_LENGTH;
435 		break;
436 	case SHA512_MECH_INFO_TYPE:
437 		sha_digest_len = SHA512_DIGEST_LENGTH;
438 		break;
439 	default:
440 		return (CRYPTO_MECHANISM_INVALID);
441 	}
442 
443 	/*
444 	 * We need to just return the length needed to store the output.
445 	 * We should not destroy the context for the following cases.
446 	 */
447 	if ((digest->cd_length == 0) ||
448 	    (digest->cd_length < sha_digest_len)) {
449 		digest->cd_length = sha_digest_len;
450 		return (CRYPTO_BUFFER_TOO_SMALL);
451 	}
452 
453 	/*
454 	 * Do the SHA2 update on the specified input data.
455 	 */
456 	switch (data->cd_format) {
457 	case CRYPTO_DATA_RAW:
458 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
459 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
460 		    data->cd_length);
461 		break;
462 	case CRYPTO_DATA_UIO:
463 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
464 		    data);
465 		break;
466 	default:
467 		ret = CRYPTO_ARGUMENTS_BAD;
468 	}
469 
470 	if (ret != CRYPTO_SUCCESS) {
471 		/* the update failed, free context and bail */
472 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
473 		ctx->cc_provider_private = NULL;
474 		digest->cd_length = 0;
475 		return (ret);
476 	}
477 
478 	/*
479 	 * Do a SHA2 final, must be done separately since the digest
480 	 * type can be different than the input data type.
481 	 */
482 	switch (digest->cd_format) {
483 	case CRYPTO_DATA_RAW:
484 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
485 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
486 		break;
487 	case CRYPTO_DATA_UIO:
488 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
489 		    digest, sha_digest_len, NULL);
490 		break;
491 	default:
492 		ret = CRYPTO_ARGUMENTS_BAD;
493 	}
494 
495 	/* all done, free context and return */
496 
497 	if (ret == CRYPTO_SUCCESS)
498 		digest->cd_length = sha_digest_len;
499 	else
500 		digest->cd_length = 0;
501 
502 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
503 	ctx->cc_provider_private = NULL;
504 	return (ret);
505 }
506 
507 static int
508 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
509     crypto_req_handle_t req)
510 {
511 	(void) req;
512 	int ret = CRYPTO_SUCCESS;
513 
514 	ASSERT(ctx->cc_provider_private != NULL);
515 
516 	/*
517 	 * Do the SHA2 update on the specified input data.
518 	 */
519 	switch (data->cd_format) {
520 	case CRYPTO_DATA_RAW:
521 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
522 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
523 		    data->cd_length);
524 		break;
525 	case CRYPTO_DATA_UIO:
526 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
527 		    data);
528 		break;
529 	default:
530 		ret = CRYPTO_ARGUMENTS_BAD;
531 	}
532 
533 	return (ret);
534 }
535 
536 static int
537 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
538     crypto_req_handle_t req)
539 {
540 	(void) req;
541 	int ret = CRYPTO_SUCCESS;
542 	uint_t sha_digest_len;
543 
544 	ASSERT(ctx->cc_provider_private != NULL);
545 
546 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
547 	case SHA256_MECH_INFO_TYPE:
548 		sha_digest_len = SHA256_DIGEST_LENGTH;
549 		break;
550 	case SHA384_MECH_INFO_TYPE:
551 		sha_digest_len = SHA384_DIGEST_LENGTH;
552 		break;
553 	case SHA512_MECH_INFO_TYPE:
554 		sha_digest_len = SHA512_DIGEST_LENGTH;
555 		break;
556 	default:
557 		return (CRYPTO_MECHANISM_INVALID);
558 	}
559 
560 	/*
561 	 * We need to just return the length needed to store the output.
562 	 * We should not destroy the context for the following cases.
563 	 */
564 	if ((digest->cd_length == 0) ||
565 	    (digest->cd_length < sha_digest_len)) {
566 		digest->cd_length = sha_digest_len;
567 		return (CRYPTO_BUFFER_TOO_SMALL);
568 	}
569 
570 	/*
571 	 * Do a SHA2 final.
572 	 */
573 	switch (digest->cd_format) {
574 	case CRYPTO_DATA_RAW:
575 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
576 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
577 		break;
578 	case CRYPTO_DATA_UIO:
579 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
580 		    digest, sha_digest_len, NULL);
581 		break;
582 	default:
583 		ret = CRYPTO_ARGUMENTS_BAD;
584 	}
585 
586 	/* all done, free context and return */
587 
588 	if (ret == CRYPTO_SUCCESS)
589 		digest->cd_length = sha_digest_len;
590 	else
591 		digest->cd_length = 0;
592 
593 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
594 	ctx->cc_provider_private = NULL;
595 
596 	return (ret);
597 }
598 
599 static int
600 sha2_digest_atomic(crypto_provider_handle_t provider,
601     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
602     crypto_data_t *data, crypto_data_t *digest,
603     crypto_req_handle_t req)
604 {
605 	(void) provider, (void) session_id, (void) req;
606 	int ret = CRYPTO_SUCCESS;
607 	SHA2_CTX sha2_ctx;
608 	uint32_t sha_digest_len;
609 
610 	/*
611 	 * Do the SHA inits.
612 	 */
613 
614 	SHA2Init(mechanism->cm_type, &sha2_ctx);
615 
616 	switch (data->cd_format) {
617 	case CRYPTO_DATA_RAW:
618 		SHA2Update(&sha2_ctx, (uint8_t *)data->
619 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
620 		break;
621 	case CRYPTO_DATA_UIO:
622 		ret = sha2_digest_update_uio(&sha2_ctx, data);
623 		break;
624 	default:
625 		ret = CRYPTO_ARGUMENTS_BAD;
626 	}
627 
628 	/*
629 	 * Do the SHA updates on the specified input data.
630 	 */
631 
632 	if (ret != CRYPTO_SUCCESS) {
633 		/* the update failed, bail */
634 		digest->cd_length = 0;
635 		return (ret);
636 	}
637 
638 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
639 		sha_digest_len = SHA256_DIGEST_LENGTH;
640 	else
641 		sha_digest_len = SHA512_DIGEST_LENGTH;
642 
643 	/*
644 	 * Do a SHA2 final, must be done separately since the digest
645 	 * type can be different than the input data type.
646 	 */
647 	switch (digest->cd_format) {
648 	case CRYPTO_DATA_RAW:
649 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
650 		    digest->cd_offset, &sha2_ctx);
651 		break;
652 	case CRYPTO_DATA_UIO:
653 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
654 		    sha_digest_len, NULL);
655 		break;
656 	default:
657 		ret = CRYPTO_ARGUMENTS_BAD;
658 	}
659 
660 	if (ret == CRYPTO_SUCCESS)
661 		digest->cd_length = sha_digest_len;
662 	else
663 		digest->cd_length = 0;
664 
665 	return (ret);
666 }
667 
668 /*
669  * KCF software provider mac entry points.
670  *
671  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
672  *
673  * Init:
674  * The initialization routine initializes what we denote
675  * as the inner and outer contexts by doing
676  * - for inner context: SHA2(key XOR ipad)
677  * - for outer context: SHA2(key XOR opad)
678  *
679  * Update:
680  * Each subsequent SHA2 HMAC update will result in an
681  * update of the inner context with the specified data.
682  *
683  * Final:
684  * The SHA2 HMAC final will do a SHA2 final operation on the
685  * inner context, and the resulting digest will be used
686  * as the data for an update on the outer context. Last
687  * but not least, a SHA2 final on the outer context will
688  * be performed to obtain the SHA2 HMAC digest to return
689  * to the user.
690  */
691 
692 /*
693  * Initialize a SHA2-HMAC context.
694  */
695 static void
696 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
697 {
698 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
699 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
700 	int i, block_size, blocks_per_int64;
701 
702 	/* Determine the block size */
703 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
704 		block_size = SHA256_HMAC_BLOCK_SIZE;
705 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
706 	} else {
707 		block_size = SHA512_HMAC_BLOCK_SIZE;
708 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
709 	}
710 
711 	(void) bzero(ipad, block_size);
712 	(void) bzero(opad, block_size);
713 	(void) bcopy(keyval, ipad, length_in_bytes);
714 	(void) bcopy(keyval, opad, length_in_bytes);
715 
716 	/* XOR key with ipad (0x36) and opad (0x5c) */
717 	for (i = 0; i < blocks_per_int64; i ++) {
718 		ipad[i] ^= 0x3636363636363636;
719 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
720 	}
721 
722 	/* perform SHA2 on ipad */
723 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
724 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
725 
726 	/* perform SHA2 on opad */
727 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
728 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
729 
730 }
731 
732 /*
733  */
734 static int
735 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
736     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
737     crypto_req_handle_t req)
738 {
739 	int ret = CRYPTO_SUCCESS;
740 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
741 	uint_t sha_digest_len, sha_hmac_block_size;
742 
743 	/*
744 	 * Set the digest length and block size to values appropriate to the
745 	 * mechanism
746 	 */
747 	switch (mechanism->cm_type) {
748 	case SHA256_HMAC_MECH_INFO_TYPE:
749 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
750 		sha_digest_len = SHA256_DIGEST_LENGTH;
751 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
752 		break;
753 	case SHA384_HMAC_MECH_INFO_TYPE:
754 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
755 	case SHA512_HMAC_MECH_INFO_TYPE:
756 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
757 		sha_digest_len = SHA512_DIGEST_LENGTH;
758 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
759 		break;
760 	default:
761 		return (CRYPTO_MECHANISM_INVALID);
762 	}
763 
764 	if (key->ck_format != CRYPTO_KEY_RAW)
765 		return (CRYPTO_ARGUMENTS_BAD);
766 
767 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
768 	    crypto_kmflag(req));
769 	if (ctx->cc_provider_private == NULL)
770 		return (CRYPTO_HOST_MEMORY);
771 
772 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
773 	if (ctx_template != NULL) {
774 		/* reuse context template */
775 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
776 		    sizeof (sha2_hmac_ctx_t));
777 	} else {
778 		/* no context template, compute context */
779 		if (keylen_in_bytes > sha_hmac_block_size) {
780 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
781 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
782 
783 			/*
784 			 * Hash the passed-in key to get a smaller key.
785 			 * The inner context is used since it hasn't been
786 			 * initialized yet.
787 			 */
788 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
789 			    &hmac_ctx->hc_icontext,
790 			    key->ck_data, keylen_in_bytes, digested_key);
791 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
792 			    digested_key, sha_digest_len);
793 		} else {
794 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
795 			    key->ck_data, keylen_in_bytes);
796 		}
797 	}
798 
799 	/*
800 	 * Get the mechanism parameters, if applicable.
801 	 */
802 	if (mechanism->cm_type % 3 == 2) {
803 		if (mechanism->cm_param == NULL ||
804 		    mechanism->cm_param_len != sizeof (ulong_t))
805 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
806 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
807 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
808 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
809 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
810 	}
811 
812 	if (ret != CRYPTO_SUCCESS) {
813 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
814 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
815 		ctx->cc_provider_private = NULL;
816 	}
817 
818 	return (ret);
819 }
820 
821 static int
822 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
823     crypto_req_handle_t req)
824 {
825 	(void) req;
826 	int ret = CRYPTO_SUCCESS;
827 
828 	ASSERT(ctx->cc_provider_private != NULL);
829 
830 	/*
831 	 * Do a SHA2 update of the inner context using the specified
832 	 * data.
833 	 */
834 	switch (data->cd_format) {
835 	case CRYPTO_DATA_RAW:
836 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
837 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
838 		    data->cd_length);
839 		break;
840 	case CRYPTO_DATA_UIO:
841 		ret = sha2_digest_update_uio(
842 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
843 		break;
844 	default:
845 		ret = CRYPTO_ARGUMENTS_BAD;
846 	}
847 
848 	return (ret);
849 }
850 
851 static int
852 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
853 {
854 	(void) req;
855 	int ret = CRYPTO_SUCCESS;
856 	uchar_t digest[SHA512_DIGEST_LENGTH];
857 	uint32_t digest_len, sha_digest_len;
858 
859 	ASSERT(ctx->cc_provider_private != NULL);
860 
861 	/* Set the digest lengths to values appropriate to the mechanism */
862 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
863 	case SHA256_HMAC_MECH_INFO_TYPE:
864 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
865 		break;
866 	case SHA384_HMAC_MECH_INFO_TYPE:
867 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
868 		break;
869 	case SHA512_HMAC_MECH_INFO_TYPE:
870 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
871 		break;
872 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
873 		sha_digest_len = SHA256_DIGEST_LENGTH;
874 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
875 		break;
876 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
877 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
878 		sha_digest_len = SHA512_DIGEST_LENGTH;
879 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
880 		break;
881 	default:
882 		return (CRYPTO_ARGUMENTS_BAD);
883 	}
884 
885 	/*
886 	 * We need to just return the length needed to store the output.
887 	 * We should not destroy the context for the following cases.
888 	 */
889 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
890 		mac->cd_length = digest_len;
891 		return (CRYPTO_BUFFER_TOO_SMALL);
892 	}
893 
894 	/*
895 	 * Do a SHA2 final on the inner context.
896 	 */
897 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
898 
899 	/*
900 	 * Do a SHA2 update on the outer context, feeding the inner
901 	 * digest as data.
902 	 */
903 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
904 	    sha_digest_len);
905 
906 	/*
907 	 * Do a SHA2 final on the outer context, storing the computing
908 	 * digest in the users buffer.
909 	 */
910 	switch (mac->cd_format) {
911 	case CRYPTO_DATA_RAW:
912 		if (digest_len != sha_digest_len) {
913 			/*
914 			 * The caller requested a short digest. Digest
915 			 * into a scratch buffer and return to
916 			 * the user only what was requested.
917 			 */
918 			SHA2Final(digest,
919 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
920 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
921 			    mac->cd_offset, digest_len);
922 		} else {
923 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
924 			    mac->cd_offset,
925 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
926 		}
927 		break;
928 	case CRYPTO_DATA_UIO:
929 		ret = sha2_digest_final_uio(
930 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
931 		    digest_len, digest);
932 		break;
933 	default:
934 		ret = CRYPTO_ARGUMENTS_BAD;
935 	}
936 
937 	if (ret == CRYPTO_SUCCESS)
938 		mac->cd_length = digest_len;
939 	else
940 		mac->cd_length = 0;
941 
942 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
943 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
944 	ctx->cc_provider_private = NULL;
945 
946 	return (ret);
947 }
948 
949 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
950 	switch (data->cd_format) {					\
951 	case CRYPTO_DATA_RAW:						\
952 		SHA2Update(&(ctx).hc_icontext,				\
953 		    (uint8_t *)data->cd_raw.iov_base +			\
954 		    data->cd_offset, data->cd_length);			\
955 		break;							\
956 	case CRYPTO_DATA_UIO:						\
957 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
958 		break;							\
959 	default:							\
960 		ret = CRYPTO_ARGUMENTS_BAD;				\
961 	}								\
962 }
963 
964 static int
965 sha2_mac_atomic(crypto_provider_handle_t provider,
966     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
967     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
968     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
969 {
970 	(void) provider, (void) session_id, (void) req;
971 	int ret = CRYPTO_SUCCESS;
972 	uchar_t digest[SHA512_DIGEST_LENGTH];
973 	sha2_hmac_ctx_t sha2_hmac_ctx;
974 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
975 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
976 
977 	/*
978 	 * Set the digest length and block size to values appropriate to the
979 	 * mechanism
980 	 */
981 	switch (mechanism->cm_type) {
982 	case SHA256_HMAC_MECH_INFO_TYPE:
983 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
984 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
985 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
986 		break;
987 	case SHA384_HMAC_MECH_INFO_TYPE:
988 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
989 	case SHA512_HMAC_MECH_INFO_TYPE:
990 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
991 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
992 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
993 		break;
994 	default:
995 		return (CRYPTO_MECHANISM_INVALID);
996 	}
997 
998 	/* Add support for key by attributes (RFE 4706552) */
999 	if (key->ck_format != CRYPTO_KEY_RAW)
1000 		return (CRYPTO_ARGUMENTS_BAD);
1001 
1002 	if (ctx_template != NULL) {
1003 		/* reuse context template */
1004 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1005 	} else {
1006 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1007 		/* no context template, initialize context */
1008 		if (keylen_in_bytes > sha_hmac_block_size) {
1009 			/*
1010 			 * Hash the passed-in key to get a smaller key.
1011 			 * The inner context is used since it hasn't been
1012 			 * initialized yet.
1013 			 */
1014 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1015 			    &sha2_hmac_ctx.hc_icontext,
1016 			    key->ck_data, keylen_in_bytes, digest);
1017 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1018 			    sha_digest_len);
1019 		} else {
1020 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1021 			    keylen_in_bytes);
1022 		}
1023 	}
1024 
1025 	/* get the mechanism parameters, if applicable */
1026 	if ((mechanism->cm_type % 3) == 2) {
1027 		if (mechanism->cm_param == NULL ||
1028 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1029 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1030 			goto bail;
1031 		}
1032 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1033 		if (digest_len > sha_digest_len) {
1034 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1035 			goto bail;
1036 		}
1037 	}
1038 
1039 	/* do a SHA2 update of the inner context using the specified data */
1040 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1041 	if (ret != CRYPTO_SUCCESS)
1042 		/* the update failed, free context and bail */
1043 		goto bail;
1044 
1045 	/*
1046 	 * Do a SHA2 final on the inner context.
1047 	 */
1048 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1049 
1050 	/*
1051 	 * Do an SHA2 update on the outer context, feeding the inner
1052 	 * digest as data.
1053 	 *
1054 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1055 	 * bytes of the inner hash value.
1056 	 */
1057 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1058 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1059 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1060 		    SHA384_DIGEST_LENGTH);
1061 	else
1062 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1063 
1064 	/*
1065 	 * Do a SHA2 final on the outer context, storing the computed
1066 	 * digest in the users buffer.
1067 	 */
1068 	switch (mac->cd_format) {
1069 	case CRYPTO_DATA_RAW:
1070 		if (digest_len != sha_digest_len) {
1071 			/*
1072 			 * The caller requested a short digest. Digest
1073 			 * into a scratch buffer and return to
1074 			 * the user only what was requested.
1075 			 */
1076 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1077 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1078 			    mac->cd_offset, digest_len);
1079 		} else {
1080 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1081 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1082 		}
1083 		break;
1084 	case CRYPTO_DATA_UIO:
1085 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1086 		    digest_len, digest);
1087 		break;
1088 	default:
1089 		ret = CRYPTO_ARGUMENTS_BAD;
1090 	}
1091 
1092 	if (ret == CRYPTO_SUCCESS) {
1093 		mac->cd_length = digest_len;
1094 		return (CRYPTO_SUCCESS);
1095 	}
1096 bail:
1097 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1098 	mac->cd_length = 0;
1099 	return (ret);
1100 }
1101 
1102 static int
1103 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1104     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1105     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1106     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1107 {
1108 	(void) provider, (void) session_id, (void) req;
1109 	int ret = CRYPTO_SUCCESS;
1110 	uchar_t digest[SHA512_DIGEST_LENGTH];
1111 	sha2_hmac_ctx_t sha2_hmac_ctx;
1112 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1113 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1114 
1115 	/*
1116 	 * Set the digest length and block size to values appropriate to the
1117 	 * mechanism
1118 	 */
1119 	switch (mechanism->cm_type) {
1120 	case SHA256_HMAC_MECH_INFO_TYPE:
1121 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1122 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1123 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1124 		break;
1125 	case SHA384_HMAC_MECH_INFO_TYPE:
1126 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1127 	case SHA512_HMAC_MECH_INFO_TYPE:
1128 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1129 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1130 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1131 		break;
1132 	default:
1133 		return (CRYPTO_MECHANISM_INVALID);
1134 	}
1135 
1136 	/* Add support for key by attributes (RFE 4706552) */
1137 	if (key->ck_format != CRYPTO_KEY_RAW)
1138 		return (CRYPTO_ARGUMENTS_BAD);
1139 
1140 	if (ctx_template != NULL) {
1141 		/* reuse context template */
1142 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1143 	} else {
1144 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1145 		/* no context template, initialize context */
1146 		if (keylen_in_bytes > sha_hmac_block_size) {
1147 			/*
1148 			 * Hash the passed-in key to get a smaller key.
1149 			 * The inner context is used since it hasn't been
1150 			 * initialized yet.
1151 			 */
1152 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1153 			    &sha2_hmac_ctx.hc_icontext,
1154 			    key->ck_data, keylen_in_bytes, digest);
1155 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1156 			    sha_digest_len);
1157 		} else {
1158 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1159 			    keylen_in_bytes);
1160 		}
1161 	}
1162 
1163 	/* get the mechanism parameters, if applicable */
1164 	if (mechanism->cm_type % 3 == 2) {
1165 		if (mechanism->cm_param == NULL ||
1166 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1167 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1168 			goto bail;
1169 		}
1170 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1171 		if (digest_len > sha_digest_len) {
1172 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1173 			goto bail;
1174 		}
1175 	}
1176 
1177 	if (mac->cd_length != digest_len) {
1178 		ret = CRYPTO_INVALID_MAC;
1179 		goto bail;
1180 	}
1181 
1182 	/* do a SHA2 update of the inner context using the specified data */
1183 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1184 	if (ret != CRYPTO_SUCCESS)
1185 		/* the update failed, free context and bail */
1186 		goto bail;
1187 
1188 	/* do a SHA2 final on the inner context */
1189 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1190 
1191 	/*
1192 	 * Do an SHA2 update on the outer context, feeding the inner
1193 	 * digest as data.
1194 	 *
1195 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1196 	 * bytes of the inner hash value.
1197 	 */
1198 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1199 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1200 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1201 		    SHA384_DIGEST_LENGTH);
1202 	else
1203 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1204 
1205 	/*
1206 	 * Do a SHA2 final on the outer context, storing the computed
1207 	 * digest in the users buffer.
1208 	 */
1209 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1210 
1211 	/*
1212 	 * Compare the computed digest against the expected digest passed
1213 	 * as argument.
1214 	 */
1215 
1216 	switch (mac->cd_format) {
1217 
1218 	case CRYPTO_DATA_RAW:
1219 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1220 		    mac->cd_offset, digest_len) != 0)
1221 			ret = CRYPTO_INVALID_MAC;
1222 		break;
1223 
1224 	case CRYPTO_DATA_UIO: {
1225 		off_t offset = mac->cd_offset;
1226 		uint_t vec_idx = 0;
1227 		off_t scratch_offset = 0;
1228 		size_t length = digest_len;
1229 		size_t cur_len;
1230 
1231 		/* we support only kernel buffer */
1232 		if (zfs_uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1233 			return (CRYPTO_ARGUMENTS_BAD);
1234 
1235 		/* jump to the first iovec containing the expected digest */
1236 		offset = zfs_uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1237 		if (vec_idx == zfs_uio_iovcnt(mac->cd_uio)) {
1238 			/*
1239 			 * The caller specified an offset that is
1240 			 * larger than the total size of the buffers
1241 			 * it provided.
1242 			 */
1243 			ret = CRYPTO_DATA_LEN_RANGE;
1244 			break;
1245 		}
1246 
1247 		/* do the comparison of computed digest vs specified one */
1248 		while (vec_idx < zfs_uio_iovcnt(mac->cd_uio) && length > 0) {
1249 			cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
1250 			    offset, length);
1251 
1252 			if (bcmp(digest + scratch_offset,
1253 			    zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
1254 			    cur_len) != 0) {
1255 				ret = CRYPTO_INVALID_MAC;
1256 				break;
1257 			}
1258 
1259 			length -= cur_len;
1260 			vec_idx++;
1261 			scratch_offset += cur_len;
1262 			offset = 0;
1263 		}
1264 		break;
1265 	}
1266 
1267 	default:
1268 		ret = CRYPTO_ARGUMENTS_BAD;
1269 	}
1270 
1271 	return (ret);
1272 bail:
1273 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1274 	mac->cd_length = 0;
1275 	return (ret);
1276 }
1277 
1278 /*
1279  * KCF software provider context management entry points.
1280  */
1281 
1282 static int
1283 sha2_create_ctx_template(crypto_provider_handle_t provider,
1284     crypto_mechanism_t *mechanism, crypto_key_t *key,
1285     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1286     crypto_req_handle_t req)
1287 {
1288 	(void) provider;
1289 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1290 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1291 	uint32_t sha_digest_len, sha_hmac_block_size;
1292 
1293 	/*
1294 	 * Set the digest length and block size to values appropriate to the
1295 	 * mechanism
1296 	 */
1297 	switch (mechanism->cm_type) {
1298 	case SHA256_HMAC_MECH_INFO_TYPE:
1299 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1300 		sha_digest_len = SHA256_DIGEST_LENGTH;
1301 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1302 		break;
1303 	case SHA384_HMAC_MECH_INFO_TYPE:
1304 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1305 	case SHA512_HMAC_MECH_INFO_TYPE:
1306 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1307 		sha_digest_len = SHA512_DIGEST_LENGTH;
1308 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1309 		break;
1310 	default:
1311 		return (CRYPTO_MECHANISM_INVALID);
1312 	}
1313 
1314 	/* Add support for key by attributes (RFE 4706552) */
1315 	if (key->ck_format != CRYPTO_KEY_RAW)
1316 		return (CRYPTO_ARGUMENTS_BAD);
1317 
1318 	/*
1319 	 * Allocate and initialize SHA2 context.
1320 	 */
1321 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1322 	    crypto_kmflag(req));
1323 	if (sha2_hmac_ctx_tmpl == NULL)
1324 		return (CRYPTO_HOST_MEMORY);
1325 
1326 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1327 
1328 	if (keylen_in_bytes > sha_hmac_block_size) {
1329 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1330 
1331 		/*
1332 		 * Hash the passed-in key to get a smaller key.
1333 		 * The inner context is used since it hasn't been
1334 		 * initialized yet.
1335 		 */
1336 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1337 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1338 		    key->ck_data, keylen_in_bytes, digested_key);
1339 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1340 		    sha_digest_len);
1341 	} else {
1342 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1343 		    keylen_in_bytes);
1344 	}
1345 
1346 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1347 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1348 
1349 	return (CRYPTO_SUCCESS);
1350 }
1351 
1352 static int
1353 sha2_free_context(crypto_ctx_t *ctx)
1354 {
1355 	uint_t ctx_len;
1356 
1357 	if (ctx->cc_provider_private == NULL)
1358 		return (CRYPTO_SUCCESS);
1359 
1360 	/*
1361 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1362 	 * have different lengths.
1363 	 *
1364 	 * Note: Below is dependent on the mechanism ordering.
1365 	 */
1366 
1367 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1368 		ctx_len = sizeof (sha2_ctx_t);
1369 	else
1370 		ctx_len = sizeof (sha2_hmac_ctx_t);
1371 
1372 	bzero(ctx->cc_provider_private, ctx_len);
1373 	kmem_free(ctx->cc_provider_private, ctx_len);
1374 	ctx->cc_provider_private = NULL;
1375 
1376 	return (CRYPTO_SUCCESS);
1377 }
1378