xref: /titanic_41/usr/src/uts/common/crypto/io/sha2_mod.c (revision b509e89b2befbaa42939abad9da1d7f5a8c6aaae)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define	_SHA2_IMPL
35 #include <sys/sha2.h>
36 
37 /*
38  * The sha2 module is created with two modlinkages:
39  * - a modlmisc that allows consumers to directly call the entry points
40  *   SHA2Init, SHA2Update, and SHA2Final.
41  * - a modlcrypto that allows the module to register with the Kernel
42  *   Cryptographic Framework (KCF) as a software provider for the SHA2
43  *   mechanisms.
44  */
45 
46 static struct modlmisc modlmisc = {
47 	&mod_miscops,
48 	"SHA2 Message-Digest Algorithm"
49 };
50 
51 static struct modlcrypto modlcrypto = {
52 	&mod_cryptoops,
53 	"SHA2 Kernel SW Provider"
54 };
55 
56 static struct modlinkage modlinkage = {
57 	MODREV_1, &modlmisc, &modlcrypto, NULL
58 };
59 
60 /*
61  * CSPI information (entry points, provider info, etc.)
62  */
63 
64 /*
65  * Context for SHA2 mechanism.
66  */
67 typedef struct sha2_ctx {
68 	sha2_mech_type_t	sc_mech_type;	/* type of context */
69 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
70 } sha2_ctx_t;
71 
72 /*
73  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
74  */
75 typedef struct sha2_hmac_ctx {
76 	sha2_mech_type_t	hc_mech_type;	/* type of context */
77 	uint32_t		hc_digest_len;	/* digest len in bytes */
78 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
79 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
80 } sha2_hmac_ctx_t;
81 
82 /*
83  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
84  * by KCF to one of the entry points.
85  */
86 
87 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
88 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
89 
90 /* to extract the digest length passed as mechanism parameter */
91 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
92 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
93 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
94 	else {								\
95 		ulong_t tmp_ulong;					\
96 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
97 		(len) = (uint32_t)tmp_ulong;				\
98 	}								\
99 }
100 
101 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
102 	SHA2Init(mech, ctx);				\
103 	SHA2Update(ctx, key, len);			\
104 	SHA2Final(digest, ctx);				\
105 }
106 
107 /*
108  * Mechanism info structure passed to KCF during registration.
109  */
110 static crypto_mech_info_t sha2_mech_info_tab[] = {
111 	/* SHA256 */
112 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
113 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
114 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
115 	/* SHA256-HMAC */
116 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
117 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
118 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
119 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
120 	/* SHA256-HMAC GENERAL */
121 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
122 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
123 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
124 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
125 	/* SHA384 */
126 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
127 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
128 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
129 	/* SHA384-HMAC */
130 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
131 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
132 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
133 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
134 	/* SHA384-HMAC GENERAL */
135 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
136 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
137 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
138 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
139 	/* SHA512 */
140 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
141 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
142 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
143 	/* SHA512-HMAC */
144 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
145 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
146 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
147 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
148 	/* SHA512-HMAC GENERAL */
149 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
150 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
151 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
152 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
153 };
154 
155 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
156 
157 static crypto_control_ops_t sha2_control_ops = {
158 	sha2_provider_status
159 };
160 
161 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
162     crypto_req_handle_t);
163 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
164     crypto_req_handle_t);
165 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
166     crypto_req_handle_t);
167 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
168     crypto_req_handle_t);
169 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
170     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
171     crypto_req_handle_t);
172 
173 static crypto_digest_ops_t sha2_digest_ops = {
174 	sha2_digest_init,
175 	sha2_digest,
176 	sha2_digest_update,
177 	NULL,
178 	sha2_digest_final,
179 	sha2_digest_atomic
180 };
181 
182 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
183     crypto_spi_ctx_template_t, crypto_req_handle_t);
184 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
185     crypto_req_handle_t);
186 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
187 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
188     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
189     crypto_spi_ctx_template_t, crypto_req_handle_t);
190 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
191     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
192     crypto_spi_ctx_template_t, crypto_req_handle_t);
193 
194 static crypto_mac_ops_t sha2_mac_ops = {
195 	sha2_mac_init,
196 	NULL,
197 	sha2_mac_update,
198 	sha2_mac_final,
199 	sha2_mac_atomic,
200 	sha2_mac_verify_atomic
201 };
202 
203 static int sha2_create_ctx_template(crypto_provider_handle_t,
204     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
205     size_t *, crypto_req_handle_t);
206 static int sha2_free_context(crypto_ctx_t *);
207 
208 static crypto_ctx_ops_t sha2_ctx_ops = {
209 	sha2_create_ctx_template,
210 	sha2_free_context
211 };
212 
213 static crypto_ops_t sha2_crypto_ops = {
214 	&sha2_control_ops,
215 	&sha2_digest_ops,
216 	NULL,
217 	&sha2_mac_ops,
218 	NULL,
219 	NULL,
220 	NULL,
221 	NULL,
222 	NULL,
223 	NULL,
224 	NULL,
225 	NULL,
226 	NULL,
227 	&sha2_ctx_ops
228 };
229 
230 static crypto_provider_info_t sha2_prov_info = {
231 	CRYPTO_SPI_VERSION_1,
232 	"SHA2 Software Provider",
233 	CRYPTO_SW_PROVIDER,
234 	{&modlinkage},
235 	NULL,
236 	&sha2_crypto_ops,
237 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
238 	sha2_mech_info_tab
239 };
240 
241 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
242 
243 int
244 _init()
245 {
246 	int ret;
247 
248 	if ((ret = mod_install(&modlinkage)) != 0)
249 		return (ret);
250 
251 	/*
252 	 * Register with KCF. If the registration fails, log an
253 	 * error but do not uninstall the module, since the functionality
254 	 * provided by misc/sha2 should still be available.
255 	 */
256 	if ((ret = crypto_register_provider(&sha2_prov_info,
257 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
258 		cmn_err(CE_WARN, "sha2 _init: "
259 		    "crypto_register_provider() failed (0x%x)", ret);
260 
261 	return (0);
262 }
263 
264 int
265 _info(struct modinfo *modinfop)
266 {
267 	return (mod_info(&modlinkage, modinfop));
268 }
269 
270 /*
271  * KCF software provider control entry points.
272  */
273 /* ARGSUSED */
274 static void
275 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
276 {
277 	*status = CRYPTO_PROVIDER_READY;
278 }
279 
280 /*
281  * KCF software provider digest entry points.
282  */
283 
284 static int
285 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
286     crypto_req_handle_t req)
287 {
288 
289 	/*
290 	 * Allocate and initialize SHA2 context.
291 	 */
292 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
293 	    crypto_kmflag(req));
294 	if (ctx->cc_provider_private == NULL)
295 		return (CRYPTO_HOST_MEMORY);
296 
297 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
298 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
299 
300 	return (CRYPTO_SUCCESS);
301 }
302 
303 /*
304  * Helper SHA2 digest update function for uio data.
305  */
306 static int
307 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
308 {
309 	off_t offset = data->cd_offset;
310 	size_t length = data->cd_length;
311 	uint_t vec_idx;
312 	size_t cur_len;
313 
314 	/* we support only kernel buffer */
315 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
316 		return (CRYPTO_ARGUMENTS_BAD);
317 
318 	/*
319 	 * Jump to the first iovec containing data to be
320 	 * digested.
321 	 */
322 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
323 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
324 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
325 		;
326 	if (vec_idx == data->cd_uio->uio_iovcnt) {
327 		/*
328 		 * The caller specified an offset that is larger than the
329 		 * total size of the buffers it provided.
330 		 */
331 		return (CRYPTO_DATA_LEN_RANGE);
332 	}
333 
334 	/*
335 	 * Now do the digesting on the iovecs.
336 	 */
337 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
338 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
339 		    offset, length);
340 
341 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
342 		    uio_iov[vec_idx].iov_base + offset, cur_len);
343 		length -= cur_len;
344 		vec_idx++;
345 		offset = 0;
346 	}
347 
348 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
349 		/*
350 		 * The end of the specified iovec's was reached but
351 		 * the length requested could not be processed, i.e.
352 		 * The caller requested to digest more data than it provided.
353 		 */
354 		return (CRYPTO_DATA_LEN_RANGE);
355 	}
356 
357 	return (CRYPTO_SUCCESS);
358 }
359 
360 /*
361  * Helper SHA2 digest final function for uio data.
362  * digest_len is the length of the desired digest. If digest_len
363  * is smaller than the default SHA2 digest length, the caller
364  * must pass a scratch buffer, digest_scratch, which must
365  * be at least the algorithm's digest length bytes.
366  */
367 static int
368 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
369     ulong_t digest_len, uchar_t *digest_scratch)
370 {
371 	off_t offset = digest->cd_offset;
372 	uint_t vec_idx;
373 
374 	/* we support only kernel buffer */
375 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
376 		return (CRYPTO_ARGUMENTS_BAD);
377 
378 	/*
379 	 * Jump to the first iovec containing ptr to the digest to
380 	 * be returned.
381 	 */
382 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
383 	    vec_idx < digest->cd_uio->uio_iovcnt;
384 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
385 		;
386 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
387 		/*
388 		 * The caller specified an offset that is
389 		 * larger than the total size of the buffers
390 		 * it provided.
391 		 */
392 		return (CRYPTO_DATA_LEN_RANGE);
393 	}
394 
395 	if (offset + digest_len <=
396 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
397 		/*
398 		 * The computed SHA2 digest will fit in the current
399 		 * iovec.
400 		 */
401 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
402 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
403 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
404 		    (digest_len != SHA512_DIGEST_LENGTH))) {
405 			/*
406 			 * The caller requested a short digest. Digest
407 			 * into a scratch buffer and return to
408 			 * the user only what was requested.
409 			 */
410 			SHA2Final(digest_scratch, sha2_ctx);
411 
412 			bcopy(digest_scratch, (uchar_t *)digest->
413 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
414 			    digest_len);
415 		} else {
416 			SHA2Final((uchar_t *)digest->
417 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
418 			    sha2_ctx);
419 
420 		}
421 	} else {
422 		/*
423 		 * The computed digest will be crossing one or more iovec's.
424 		 * This is bad performance-wise but we need to support it.
425 		 * Allocate a small scratch buffer on the stack and
426 		 * copy it piece meal to the specified digest iovec's.
427 		 */
428 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
429 		off_t scratch_offset = 0;
430 		size_t length = digest_len;
431 		size_t cur_len;
432 
433 		SHA2Final(digest_tmp, sha2_ctx);
434 
435 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
436 			cur_len =
437 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
438 			    offset, length);
439 			bcopy(digest_tmp + scratch_offset,
440 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
441 			    cur_len);
442 
443 			length -= cur_len;
444 			vec_idx++;
445 			scratch_offset += cur_len;
446 			offset = 0;
447 		}
448 
449 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
450 			/*
451 			 * The end of the specified iovec's was reached but
452 			 * the length requested could not be processed, i.e.
453 			 * The caller requested to digest more data than it
454 			 * provided.
455 			 */
456 			return (CRYPTO_DATA_LEN_RANGE);
457 		}
458 	}
459 
460 	return (CRYPTO_SUCCESS);
461 }
462 
463 /*
464  * Helper SHA2 digest update for mblk's.
465  */
466 static int
467 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
468 {
469 	off_t offset = data->cd_offset;
470 	size_t length = data->cd_length;
471 	mblk_t *mp;
472 	size_t cur_len;
473 
474 	/*
475 	 * Jump to the first mblk_t containing data to be digested.
476 	 */
477 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
478 	    offset -= MBLKL(mp), mp = mp->b_cont)
479 		;
480 	if (mp == NULL) {
481 		/*
482 		 * The caller specified an offset that is larger than the
483 		 * total size of the buffers it provided.
484 		 */
485 		return (CRYPTO_DATA_LEN_RANGE);
486 	}
487 
488 	/*
489 	 * Now do the digesting on the mblk chain.
490 	 */
491 	while (mp != NULL && length > 0) {
492 		cur_len = MIN(MBLKL(mp) - offset, length);
493 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
494 		length -= cur_len;
495 		offset = 0;
496 		mp = mp->b_cont;
497 	}
498 
499 	if (mp == NULL && length > 0) {
500 		/*
501 		 * The end of the mblk was reached but the length requested
502 		 * could not be processed, i.e. The caller requested
503 		 * to digest more data than it provided.
504 		 */
505 		return (CRYPTO_DATA_LEN_RANGE);
506 	}
507 
508 	return (CRYPTO_SUCCESS);
509 }
510 
511 /*
512  * Helper SHA2 digest final for mblk's.
513  * digest_len is the length of the desired digest. If digest_len
514  * is smaller than the default SHA2 digest length, the caller
515  * must pass a scratch buffer, digest_scratch, which must
516  * be at least the algorithm's digest length bytes.
517  */
518 static int
519 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
520     ulong_t digest_len, uchar_t *digest_scratch)
521 {
522 	off_t offset = digest->cd_offset;
523 	mblk_t *mp;
524 
525 	/*
526 	 * Jump to the first mblk_t that will be used to store the digest.
527 	 */
528 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
529 	    offset -= MBLKL(mp), mp = mp->b_cont)
530 		;
531 	if (mp == NULL) {
532 		/*
533 		 * The caller specified an offset that is larger than the
534 		 * total size of the buffers it provided.
535 		 */
536 		return (CRYPTO_DATA_LEN_RANGE);
537 	}
538 
539 	if (offset + digest_len <= MBLKL(mp)) {
540 		/*
541 		 * The computed SHA2 digest will fit in the current mblk.
542 		 * Do the SHA2Final() in-place.
543 		 */
544 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
545 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
546 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
547 		    (digest_len != SHA512_DIGEST_LENGTH))) {
548 			/*
549 			 * The caller requested a short digest. Digest
550 			 * into a scratch buffer and return to
551 			 * the user only what was requested.
552 			 */
553 			SHA2Final(digest_scratch, sha2_ctx);
554 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
555 		} else {
556 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
557 		}
558 	} else {
559 		/*
560 		 * The computed digest will be crossing one or more mblk's.
561 		 * This is bad performance-wise but we need to support it.
562 		 * Allocate a small scratch buffer on the stack and
563 		 * copy it piece meal to the specified digest iovec's.
564 		 */
565 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
566 		off_t scratch_offset = 0;
567 		size_t length = digest_len;
568 		size_t cur_len;
569 
570 		SHA2Final(digest_tmp, sha2_ctx);
571 
572 		while (mp != NULL && length > 0) {
573 			cur_len = MIN(MBLKL(mp) - offset, length);
574 			bcopy(digest_tmp + scratch_offset,
575 			    mp->b_rptr + offset, cur_len);
576 
577 			length -= cur_len;
578 			mp = mp->b_cont;
579 			scratch_offset += cur_len;
580 			offset = 0;
581 		}
582 
583 		if (mp == NULL && length > 0) {
584 			/*
585 			 * The end of the specified mblk was reached but
586 			 * the length requested could not be processed, i.e.
587 			 * The caller requested to digest more data than it
588 			 * provided.
589 			 */
590 			return (CRYPTO_DATA_LEN_RANGE);
591 		}
592 	}
593 
594 	return (CRYPTO_SUCCESS);
595 }
596 
597 /* ARGSUSED */
598 static int
599 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
600     crypto_req_handle_t req)
601 {
602 	int ret = CRYPTO_SUCCESS;
603 	uint_t sha_digest_len;
604 
605 	ASSERT(ctx->cc_provider_private != NULL);
606 
607 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
608 	case SHA256_MECH_INFO_TYPE:
609 		sha_digest_len = SHA256_DIGEST_LENGTH;
610 		break;
611 	case SHA384_MECH_INFO_TYPE:
612 		sha_digest_len = SHA384_DIGEST_LENGTH;
613 		break;
614 	case SHA512_MECH_INFO_TYPE:
615 		sha_digest_len = SHA512_DIGEST_LENGTH;
616 		break;
617 	default:
618 		return (CRYPTO_MECHANISM_INVALID);
619 	}
620 
621 	/*
622 	 * We need to just return the length needed to store the output.
623 	 * We should not destroy the context for the following cases.
624 	 */
625 	if ((digest->cd_length == 0) ||
626 	    (digest->cd_length < sha_digest_len)) {
627 		digest->cd_length = sha_digest_len;
628 		return (CRYPTO_BUFFER_TOO_SMALL);
629 	}
630 
631 	/*
632 	 * Do the SHA2 update on the specified input data.
633 	 */
634 	switch (data->cd_format) {
635 	case CRYPTO_DATA_RAW:
636 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
637 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
638 		    data->cd_length);
639 		break;
640 	case CRYPTO_DATA_UIO:
641 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
642 		    data);
643 		break;
644 	case CRYPTO_DATA_MBLK:
645 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
646 		    data);
647 		break;
648 	default:
649 		ret = CRYPTO_ARGUMENTS_BAD;
650 	}
651 
652 	if (ret != CRYPTO_SUCCESS) {
653 		/* the update failed, free context and bail */
654 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
655 		ctx->cc_provider_private = NULL;
656 		digest->cd_length = 0;
657 		return (ret);
658 	}
659 
660 	/*
661 	 * Do a SHA2 final, must be done separately since the digest
662 	 * type can be different than the input data type.
663 	 */
664 	switch (digest->cd_format) {
665 	case CRYPTO_DATA_RAW:
666 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
667 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
668 		break;
669 	case CRYPTO_DATA_UIO:
670 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
671 		    digest, sha_digest_len, NULL);
672 		break;
673 	case CRYPTO_DATA_MBLK:
674 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
675 		    digest, sha_digest_len, NULL);
676 		break;
677 	default:
678 		ret = CRYPTO_ARGUMENTS_BAD;
679 	}
680 
681 	/* all done, free context and return */
682 
683 	if (ret == CRYPTO_SUCCESS)
684 		digest->cd_length = sha_digest_len;
685 	else
686 		digest->cd_length = 0;
687 
688 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
689 	ctx->cc_provider_private = NULL;
690 	return (ret);
691 }
692 
693 /* ARGSUSED */
694 static int
695 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
696     crypto_req_handle_t req)
697 {
698 	int ret = CRYPTO_SUCCESS;
699 
700 	ASSERT(ctx->cc_provider_private != NULL);
701 
702 	/*
703 	 * Do the SHA2 update on the specified input data.
704 	 */
705 	switch (data->cd_format) {
706 	case CRYPTO_DATA_RAW:
707 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
708 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
709 		    data->cd_length);
710 		break;
711 	case CRYPTO_DATA_UIO:
712 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
713 		    data);
714 		break;
715 	case CRYPTO_DATA_MBLK:
716 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
717 		    data);
718 		break;
719 	default:
720 		ret = CRYPTO_ARGUMENTS_BAD;
721 	}
722 
723 	return (ret);
724 }
725 
726 /* ARGSUSED */
727 static int
728 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
729     crypto_req_handle_t req)
730 {
731 	int ret = CRYPTO_SUCCESS;
732 	uint_t sha_digest_len;
733 
734 	ASSERT(ctx->cc_provider_private != NULL);
735 
736 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
737 	case SHA256_MECH_INFO_TYPE:
738 		sha_digest_len = SHA256_DIGEST_LENGTH;
739 		break;
740 	case SHA384_MECH_INFO_TYPE:
741 		sha_digest_len = SHA384_DIGEST_LENGTH;
742 		break;
743 	case SHA512_MECH_INFO_TYPE:
744 		sha_digest_len = SHA512_DIGEST_LENGTH;
745 		break;
746 	default:
747 		return (CRYPTO_MECHANISM_INVALID);
748 	}
749 
750 	/*
751 	 * We need to just return the length needed to store the output.
752 	 * We should not destroy the context for the following cases.
753 	 */
754 	if ((digest->cd_length == 0) ||
755 	    (digest->cd_length < sha_digest_len)) {
756 		digest->cd_length = sha_digest_len;
757 		return (CRYPTO_BUFFER_TOO_SMALL);
758 	}
759 
760 	/*
761 	 * Do a SHA2 final.
762 	 */
763 	switch (digest->cd_format) {
764 	case CRYPTO_DATA_RAW:
765 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
766 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
767 		break;
768 	case CRYPTO_DATA_UIO:
769 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
770 		    digest, sha_digest_len, NULL);
771 		break;
772 	case CRYPTO_DATA_MBLK:
773 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
774 		    digest, sha_digest_len, NULL);
775 		break;
776 	default:
777 		ret = CRYPTO_ARGUMENTS_BAD;
778 	}
779 
780 	/* all done, free context and return */
781 
782 	if (ret == CRYPTO_SUCCESS)
783 		digest->cd_length = sha_digest_len;
784 	else
785 		digest->cd_length = 0;
786 
787 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
788 	ctx->cc_provider_private = NULL;
789 
790 	return (ret);
791 }
792 
793 /* ARGSUSED */
794 static int
795 sha2_digest_atomic(crypto_provider_handle_t provider,
796     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
797     crypto_data_t *data, crypto_data_t *digest,
798     crypto_req_handle_t req)
799 {
800 	int ret = CRYPTO_SUCCESS;
801 	SHA2_CTX sha2_ctx;
802 	uint32_t sha_digest_len;
803 
804 	/*
805 	 * Do the SHA inits.
806 	 */
807 
808 	SHA2Init(mechanism->cm_type, &sha2_ctx);
809 
810 	switch (data->cd_format) {
811 	case CRYPTO_DATA_RAW:
812 		SHA2Update(&sha2_ctx, (uint8_t *)data->
813 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
814 		break;
815 	case CRYPTO_DATA_UIO:
816 		ret = sha2_digest_update_uio(&sha2_ctx, data);
817 		break;
818 	case CRYPTO_DATA_MBLK:
819 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
820 		break;
821 	default:
822 		ret = CRYPTO_ARGUMENTS_BAD;
823 	}
824 
825 	/*
826 	 * Do the SHA updates on the specified input data.
827 	 */
828 
829 	if (ret != CRYPTO_SUCCESS) {
830 		/* the update failed, bail */
831 		digest->cd_length = 0;
832 		return (ret);
833 	}
834 
835 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
836 		sha_digest_len = SHA256_DIGEST_LENGTH;
837 	else
838 		sha_digest_len = SHA512_DIGEST_LENGTH;
839 
840 	/*
841 	 * Do a SHA2 final, must be done separately since the digest
842 	 * type can be different than the input data type.
843 	 */
844 	switch (digest->cd_format) {
845 	case CRYPTO_DATA_RAW:
846 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
847 		    digest->cd_offset, &sha2_ctx);
848 		break;
849 	case CRYPTO_DATA_UIO:
850 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
851 		    sha_digest_len, NULL);
852 		break;
853 	case CRYPTO_DATA_MBLK:
854 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
855 		    sha_digest_len, NULL);
856 		break;
857 	default:
858 		ret = CRYPTO_ARGUMENTS_BAD;
859 	}
860 
861 	if (ret == CRYPTO_SUCCESS)
862 		digest->cd_length = sha_digest_len;
863 	else
864 		digest->cd_length = 0;
865 
866 	return (ret);
867 }
868 
869 /*
870  * KCF software provider mac entry points.
871  *
872  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
873  *
874  * Init:
875  * The initialization routine initializes what we denote
876  * as the inner and outer contexts by doing
877  * - for inner context: SHA2(key XOR ipad)
878  * - for outer context: SHA2(key XOR opad)
879  *
880  * Update:
881  * Each subsequent SHA2 HMAC update will result in an
882  * update of the inner context with the specified data.
883  *
884  * Final:
885  * The SHA2 HMAC final will do a SHA2 final operation on the
886  * inner context, and the resulting digest will be used
887  * as the data for an update on the outer context. Last
888  * but not least, a SHA2 final on the outer context will
889  * be performed to obtain the SHA2 HMAC digest to return
890  * to the user.
891  */
892 
893 /*
894  * Initialize a SHA2-HMAC context.
895  */
896 static void
897 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
898 {
899 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
900 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
901 	int i, block_size, blocks_per_int64;
902 
903 	/* Determine the block size */
904 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
905 		block_size = SHA256_HMAC_BLOCK_SIZE;
906 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
907 	} else {
908 		block_size = SHA512_HMAC_BLOCK_SIZE;
909 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
910 	}
911 
912 	(void) bzero(ipad, block_size);
913 	(void) bzero(opad, block_size);
914 	(void) bcopy(keyval, ipad, length_in_bytes);
915 	(void) bcopy(keyval, opad, length_in_bytes);
916 
917 	/* XOR key with ipad (0x36) and opad (0x5c) */
918 	for (i = 0; i < blocks_per_int64; i ++) {
919 		ipad[i] ^= 0x3636363636363636;
920 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
921 	}
922 
923 	/* perform SHA2 on ipad */
924 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
925 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
926 
927 	/* perform SHA2 on opad */
928 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
929 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
930 
931 }
932 
933 /*
934  */
935 static int
936 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
937     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
938     crypto_req_handle_t req)
939 {
940 	int ret = CRYPTO_SUCCESS;
941 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
942 	uint_t sha_digest_len, sha_hmac_block_size;
943 
944 	/*
945 	 * Set the digest length and block size to values approriate to the
946 	 * mechanism
947 	 */
948 	switch (mechanism->cm_type) {
949 	case SHA256_HMAC_MECH_INFO_TYPE:
950 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
951 		sha_digest_len = SHA256_DIGEST_LENGTH;
952 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
953 		break;
954 	case SHA384_HMAC_MECH_INFO_TYPE:
955 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
956 	case SHA512_HMAC_MECH_INFO_TYPE:
957 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
958 		sha_digest_len = SHA512_DIGEST_LENGTH;
959 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
960 		break;
961 	default:
962 		return (CRYPTO_MECHANISM_INVALID);
963 	}
964 
965 	if (key->ck_format != CRYPTO_KEY_RAW)
966 		return (CRYPTO_ARGUMENTS_BAD);
967 
968 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
969 	    crypto_kmflag(req));
970 	if (ctx->cc_provider_private == NULL)
971 		return (CRYPTO_HOST_MEMORY);
972 
973 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
974 	if (ctx_template != NULL) {
975 		/* reuse context template */
976 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
977 		    sizeof (sha2_hmac_ctx_t));
978 	} else {
979 		/* no context template, compute context */
980 		if (keylen_in_bytes > sha_hmac_block_size) {
981 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
982 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
983 
984 			/*
985 			 * Hash the passed-in key to get a smaller key.
986 			 * The inner context is used since it hasn't been
987 			 * initialized yet.
988 			 */
989 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
990 			    &hmac_ctx->hc_icontext,
991 			    key->ck_data, keylen_in_bytes, digested_key);
992 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
993 			    digested_key, sha_digest_len);
994 		} else {
995 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
996 			    key->ck_data, keylen_in_bytes);
997 		}
998 	}
999 
1000 	/*
1001 	 * Get the mechanism parameters, if applicable.
1002 	 */
1003 	if (mechanism->cm_type % 3 == 2) {
1004 		if (mechanism->cm_param == NULL ||
1005 		    mechanism->cm_param_len != sizeof (ulong_t))
1006 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1007 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1008 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1009 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1010 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1011 	}
1012 
1013 	if (ret != CRYPTO_SUCCESS) {
1014 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1015 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1016 		ctx->cc_provider_private = NULL;
1017 	}
1018 
1019 	return (ret);
1020 }
1021 
1022 /* ARGSUSED */
1023 static int
1024 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1025     crypto_req_handle_t req)
1026 {
1027 	int ret = CRYPTO_SUCCESS;
1028 
1029 	ASSERT(ctx->cc_provider_private != NULL);
1030 
1031 	/*
1032 	 * Do a SHA2 update of the inner context using the specified
1033 	 * data.
1034 	 */
1035 	switch (data->cd_format) {
1036 	case CRYPTO_DATA_RAW:
1037 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1038 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1039 		    data->cd_length);
1040 		break;
1041 	case CRYPTO_DATA_UIO:
1042 		ret = sha2_digest_update_uio(
1043 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1044 		break;
1045 	case CRYPTO_DATA_MBLK:
1046 		ret = sha2_digest_update_mblk(
1047 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1048 		break;
1049 	default:
1050 		ret = CRYPTO_ARGUMENTS_BAD;
1051 	}
1052 
1053 	return (ret);
1054 }
1055 
1056 /* ARGSUSED */
1057 static int
1058 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1059 {
1060 	int ret = CRYPTO_SUCCESS;
1061 	uchar_t digest[SHA512_DIGEST_LENGTH];
1062 	uint32_t digest_len, sha_digest_len;
1063 
1064 	ASSERT(ctx->cc_provider_private != NULL);
1065 
1066 	/* Set the digest lengths to values approriate to the mechanism */
1067 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1068 	case SHA256_HMAC_MECH_INFO_TYPE:
1069 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1070 		break;
1071 	case SHA384_HMAC_MECH_INFO_TYPE:
1072 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1073 		break;
1074 	case SHA512_HMAC_MECH_INFO_TYPE:
1075 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1076 		break;
1077 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1078 		sha_digest_len = SHA256_DIGEST_LENGTH;
1079 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1080 		break;
1081 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1082 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1083 		sha_digest_len = SHA512_DIGEST_LENGTH;
1084 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1085 		break;
1086 	}
1087 
1088 	/*
1089 	 * We need to just return the length needed to store the output.
1090 	 * We should not destroy the context for the following cases.
1091 	 */
1092 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1093 		mac->cd_length = digest_len;
1094 		return (CRYPTO_BUFFER_TOO_SMALL);
1095 	}
1096 
1097 	/*
1098 	 * Do a SHA2 final on the inner context.
1099 	 */
1100 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1101 
1102 	/*
1103 	 * Do a SHA2 update on the outer context, feeding the inner
1104 	 * digest as data.
1105 	 */
1106 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1107 	    sha_digest_len);
1108 
1109 	/*
1110 	 * Do a SHA2 final on the outer context, storing the computing
1111 	 * digest in the users buffer.
1112 	 */
1113 	switch (mac->cd_format) {
1114 	case CRYPTO_DATA_RAW:
1115 		if (digest_len != sha_digest_len) {
1116 			/*
1117 			 * The caller requested a short digest. Digest
1118 			 * into a scratch buffer and return to
1119 			 * the user only what was requested.
1120 			 */
1121 			SHA2Final(digest,
1122 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1123 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1124 			    mac->cd_offset, digest_len);
1125 		} else {
1126 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1127 			    mac->cd_offset,
1128 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1129 		}
1130 		break;
1131 	case CRYPTO_DATA_UIO:
1132 		ret = sha2_digest_final_uio(
1133 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1134 		    digest_len, digest);
1135 		break;
1136 	case CRYPTO_DATA_MBLK:
1137 		ret = sha2_digest_final_mblk(
1138 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1139 		    digest_len, digest);
1140 		break;
1141 	default:
1142 		ret = CRYPTO_ARGUMENTS_BAD;
1143 	}
1144 
1145 	if (ret == CRYPTO_SUCCESS)
1146 		mac->cd_length = digest_len;
1147 	else
1148 		mac->cd_length = 0;
1149 
1150 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1151 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1152 	ctx->cc_provider_private = NULL;
1153 
1154 	return (ret);
1155 }
1156 
1157 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1158 	switch (data->cd_format) {					\
1159 	case CRYPTO_DATA_RAW:						\
1160 		SHA2Update(&(ctx).hc_icontext,				\
1161 		    (uint8_t *)data->cd_raw.iov_base +			\
1162 		    data->cd_offset, data->cd_length);			\
1163 		break;							\
1164 	case CRYPTO_DATA_UIO:						\
1165 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1166 		break;							\
1167 	case CRYPTO_DATA_MBLK:						\
1168 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1169 		    data);						\
1170 		break;							\
1171 	default:							\
1172 		ret = CRYPTO_ARGUMENTS_BAD;				\
1173 	}								\
1174 }
1175 
1176 /* ARGSUSED */
1177 static int
1178 sha2_mac_atomic(crypto_provider_handle_t provider,
1179     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1180     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1181     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1182 {
1183 	int ret = CRYPTO_SUCCESS;
1184 	uchar_t digest[SHA512_DIGEST_LENGTH];
1185 	sha2_hmac_ctx_t sha2_hmac_ctx;
1186 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1187 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1188 
1189 	/*
1190 	 * Set the digest length and block size to values approriate to the
1191 	 * mechanism
1192 	 */
1193 	switch (mechanism->cm_type) {
1194 	case SHA256_HMAC_MECH_INFO_TYPE:
1195 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1196 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1197 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1198 		break;
1199 	case SHA384_HMAC_MECH_INFO_TYPE:
1200 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1201 	case SHA512_HMAC_MECH_INFO_TYPE:
1202 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1203 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1204 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1205 		break;
1206 	default:
1207 		return (CRYPTO_MECHANISM_INVALID);
1208 	}
1209 
1210 	/* Add support for key by attributes (RFE 4706552) */
1211 	if (key->ck_format != CRYPTO_KEY_RAW)
1212 		return (CRYPTO_ARGUMENTS_BAD);
1213 
1214 	if (ctx_template != NULL) {
1215 		/* reuse context template */
1216 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1217 	} else {
1218 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1219 		/* no context template, initialize context */
1220 		if (keylen_in_bytes > sha_hmac_block_size) {
1221 			/*
1222 			 * Hash the passed-in key to get a smaller key.
1223 			 * The inner context is used since it hasn't been
1224 			 * initialized yet.
1225 			 */
1226 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1227 			    &sha2_hmac_ctx.hc_icontext,
1228 			    key->ck_data, keylen_in_bytes, digest);
1229 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1230 			    sha_digest_len);
1231 		} else {
1232 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1233 			    keylen_in_bytes);
1234 		}
1235 	}
1236 
1237 	/* get the mechanism parameters, if applicable */
1238 	if ((mechanism->cm_type % 3) == 2) {
1239 		if (mechanism->cm_param == NULL ||
1240 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1241 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1242 			goto bail;
1243 		}
1244 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1245 		if (digest_len > sha_digest_len) {
1246 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1247 			goto bail;
1248 		}
1249 	}
1250 
1251 	/* do a SHA2 update of the inner context using the specified data */
1252 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1253 	if (ret != CRYPTO_SUCCESS)
1254 		/* the update failed, free context and bail */
1255 		goto bail;
1256 
1257 	/*
1258 	 * Do a SHA2 final on the inner context.
1259 	 */
1260 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1261 
1262 	/*
1263 	 * Do an SHA2 update on the outer context, feeding the inner
1264 	 * digest as data.
1265 	 *
1266 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1267 	 * bytes of the inner hash value.
1268 	 */
1269 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1270 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1271 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1272 		    SHA384_DIGEST_LENGTH);
1273 	else
1274 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1275 
1276 	/*
1277 	 * Do a SHA2 final on the outer context, storing the computed
1278 	 * digest in the users buffer.
1279 	 */
1280 	switch (mac->cd_format) {
1281 	case CRYPTO_DATA_RAW:
1282 		if (digest_len != sha_digest_len) {
1283 			/*
1284 			 * The caller requested a short digest. Digest
1285 			 * into a scratch buffer and return to
1286 			 * the user only what was requested.
1287 			 */
1288 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1289 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1290 			    mac->cd_offset, digest_len);
1291 		} else {
1292 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1293 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1294 		}
1295 		break;
1296 	case CRYPTO_DATA_UIO:
1297 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1298 		    digest_len, digest);
1299 		break;
1300 	case CRYPTO_DATA_MBLK:
1301 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1302 		    digest_len, digest);
1303 		break;
1304 	default:
1305 		ret = CRYPTO_ARGUMENTS_BAD;
1306 	}
1307 
1308 	if (ret == CRYPTO_SUCCESS) {
1309 		mac->cd_length = digest_len;
1310 		return (CRYPTO_SUCCESS);
1311 	}
1312 bail:
1313 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1314 	mac->cd_length = 0;
1315 	return (ret);
1316 }
1317 
1318 /* ARGSUSED */
1319 static int
1320 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1321     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1322     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1323     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1324 {
1325 	int ret = CRYPTO_SUCCESS;
1326 	uchar_t digest[SHA512_DIGEST_LENGTH];
1327 	sha2_hmac_ctx_t sha2_hmac_ctx;
1328 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1329 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1330 
1331 	/*
1332 	 * Set the digest length and block size to values approriate to the
1333 	 * mechanism
1334 	 */
1335 	switch (mechanism->cm_type) {
1336 	case SHA256_HMAC_MECH_INFO_TYPE:
1337 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1338 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1339 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1340 		break;
1341 	case SHA384_HMAC_MECH_INFO_TYPE:
1342 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1343 	case SHA512_HMAC_MECH_INFO_TYPE:
1344 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1345 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1346 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1347 		break;
1348 	default:
1349 		return (CRYPTO_MECHANISM_INVALID);
1350 	}
1351 
1352 	/* Add support for key by attributes (RFE 4706552) */
1353 	if (key->ck_format != CRYPTO_KEY_RAW)
1354 		return (CRYPTO_ARGUMENTS_BAD);
1355 
1356 	if (ctx_template != NULL) {
1357 		/* reuse context template */
1358 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1359 	} else {
1360 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1361 		/* no context template, initialize context */
1362 		if (keylen_in_bytes > sha_hmac_block_size) {
1363 			/*
1364 			 * Hash the passed-in key to get a smaller key.
1365 			 * The inner context is used since it hasn't been
1366 			 * initialized yet.
1367 			 */
1368 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1369 			    &sha2_hmac_ctx.hc_icontext,
1370 			    key->ck_data, keylen_in_bytes, digest);
1371 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1372 			    sha_digest_len);
1373 		} else {
1374 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1375 			    keylen_in_bytes);
1376 		}
1377 	}
1378 
1379 	/* get the mechanism parameters, if applicable */
1380 	if (mechanism->cm_type % 3 == 2) {
1381 		if (mechanism->cm_param == NULL ||
1382 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1383 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1384 			goto bail;
1385 		}
1386 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1387 		if (digest_len > sha_digest_len) {
1388 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1389 			goto bail;
1390 		}
1391 	}
1392 
1393 	if (mac->cd_length != digest_len) {
1394 		ret = CRYPTO_INVALID_MAC;
1395 		goto bail;
1396 	}
1397 
1398 	/* do a SHA2 update of the inner context using the specified data */
1399 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1400 	if (ret != CRYPTO_SUCCESS)
1401 		/* the update failed, free context and bail */
1402 		goto bail;
1403 
1404 	/* do a SHA2 final on the inner context */
1405 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1406 
1407 	/*
1408 	 * Do an SHA2 update on the outer context, feeding the inner
1409 	 * digest as data.
1410 	 *
1411 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1412 	 * bytes of the inner hash value.
1413 	 */
1414 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1415 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1416 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1417 		    SHA384_DIGEST_LENGTH);
1418 	else
1419 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1420 
1421 	/*
1422 	 * Do a SHA2 final on the outer context, storing the computed
1423 	 * digest in the users buffer.
1424 	 */
1425 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1426 
1427 	/*
1428 	 * Compare the computed digest against the expected digest passed
1429 	 * as argument.
1430 	 */
1431 
1432 	switch (mac->cd_format) {
1433 
1434 	case CRYPTO_DATA_RAW:
1435 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1436 		    mac->cd_offset, digest_len) != 0)
1437 			ret = CRYPTO_INVALID_MAC;
1438 		break;
1439 
1440 	case CRYPTO_DATA_UIO: {
1441 		off_t offset = mac->cd_offset;
1442 		uint_t vec_idx;
1443 		off_t scratch_offset = 0;
1444 		size_t length = digest_len;
1445 		size_t cur_len;
1446 
1447 		/* we support only kernel buffer */
1448 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1449 			return (CRYPTO_ARGUMENTS_BAD);
1450 
1451 		/* jump to the first iovec containing the expected digest */
1452 		for (vec_idx = 0;
1453 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1454 		    vec_idx < mac->cd_uio->uio_iovcnt;
1455 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1456 			;
1457 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1458 			/*
1459 			 * The caller specified an offset that is
1460 			 * larger than the total size of the buffers
1461 			 * it provided.
1462 			 */
1463 			ret = CRYPTO_DATA_LEN_RANGE;
1464 			break;
1465 		}
1466 
1467 		/* do the comparison of computed digest vs specified one */
1468 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1469 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1470 			    offset, length);
1471 
1472 			if (bcmp(digest + scratch_offset,
1473 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1474 			    cur_len) != 0) {
1475 				ret = CRYPTO_INVALID_MAC;
1476 				break;
1477 			}
1478 
1479 			length -= cur_len;
1480 			vec_idx++;
1481 			scratch_offset += cur_len;
1482 			offset = 0;
1483 		}
1484 		break;
1485 	}
1486 
1487 	case CRYPTO_DATA_MBLK: {
1488 		off_t offset = mac->cd_offset;
1489 		mblk_t *mp;
1490 		off_t scratch_offset = 0;
1491 		size_t length = digest_len;
1492 		size_t cur_len;
1493 
1494 		/* jump to the first mblk_t containing the expected digest */
1495 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1496 		    offset -= MBLKL(mp), mp = mp->b_cont)
1497 			;
1498 		if (mp == NULL) {
1499 			/*
1500 			 * The caller specified an offset that is larger than
1501 			 * the total size of the buffers it provided.
1502 			 */
1503 			ret = CRYPTO_DATA_LEN_RANGE;
1504 			break;
1505 		}
1506 
1507 		while (mp != NULL && length > 0) {
1508 			cur_len = MIN(MBLKL(mp) - offset, length);
1509 			if (bcmp(digest + scratch_offset,
1510 			    mp->b_rptr + offset, cur_len) != 0) {
1511 				ret = CRYPTO_INVALID_MAC;
1512 				break;
1513 			}
1514 
1515 			length -= cur_len;
1516 			mp = mp->b_cont;
1517 			scratch_offset += cur_len;
1518 			offset = 0;
1519 		}
1520 		break;
1521 	}
1522 
1523 	default:
1524 		ret = CRYPTO_ARGUMENTS_BAD;
1525 	}
1526 
1527 	return (ret);
1528 bail:
1529 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1530 	mac->cd_length = 0;
1531 	return (ret);
1532 }
1533 
1534 /*
1535  * KCF software provider context management entry points.
1536  */
1537 
1538 /* ARGSUSED */
1539 static int
1540 sha2_create_ctx_template(crypto_provider_handle_t provider,
1541     crypto_mechanism_t *mechanism, crypto_key_t *key,
1542     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1543     crypto_req_handle_t req)
1544 {
1545 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1546 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1547 	uint32_t sha_digest_len, sha_hmac_block_size;
1548 
1549 	/*
1550 	 * Set the digest length and block size to values approriate to the
1551 	 * mechanism
1552 	 */
1553 	switch (mechanism->cm_type) {
1554 	case SHA256_HMAC_MECH_INFO_TYPE:
1555 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1556 		sha_digest_len = SHA256_DIGEST_LENGTH;
1557 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1558 		break;
1559 	case SHA384_HMAC_MECH_INFO_TYPE:
1560 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1561 	case SHA512_HMAC_MECH_INFO_TYPE:
1562 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1563 		sha_digest_len = SHA512_DIGEST_LENGTH;
1564 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1565 		break;
1566 	default:
1567 		return (CRYPTO_MECHANISM_INVALID);
1568 	}
1569 
1570 	/* Add support for key by attributes (RFE 4706552) */
1571 	if (key->ck_format != CRYPTO_KEY_RAW)
1572 		return (CRYPTO_ARGUMENTS_BAD);
1573 
1574 	/*
1575 	 * Allocate and initialize SHA2 context.
1576 	 */
1577 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1578 	    crypto_kmflag(req));
1579 	if (sha2_hmac_ctx_tmpl == NULL)
1580 		return (CRYPTO_HOST_MEMORY);
1581 
1582 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1583 
1584 	if (keylen_in_bytes > sha_hmac_block_size) {
1585 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1586 
1587 		/*
1588 		 * Hash the passed-in key to get a smaller key.
1589 		 * The inner context is used since it hasn't been
1590 		 * initialized yet.
1591 		 */
1592 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1593 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1594 		    key->ck_data, keylen_in_bytes, digested_key);
1595 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1596 		    sha_digest_len);
1597 	} else {
1598 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1599 		    keylen_in_bytes);
1600 	}
1601 
1602 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1603 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1604 
1605 	return (CRYPTO_SUCCESS);
1606 }
1607 
1608 static int
1609 sha2_free_context(crypto_ctx_t *ctx)
1610 {
1611 	uint_t ctx_len;
1612 
1613 	if (ctx->cc_provider_private == NULL)
1614 		return (CRYPTO_SUCCESS);
1615 
1616 	/*
1617 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1618 	 * have different lengths.
1619 	 *
1620 	 * Note: Below is dependent on the mechanism ordering.
1621 	 */
1622 
1623 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1624 		ctx_len = sizeof (sha2_ctx_t);
1625 	else
1626 		ctx_len = sizeof (sha2_hmac_ctx_t);
1627 
1628 	bzero(ctx->cc_provider_private, ctx_len);
1629 	kmem_free(ctx->cc_provider_private, ctx_len);
1630 	ctx->cc_provider_private = NULL;
1631 
1632 	return (CRYPTO_SUCCESS);
1633 }
1634