xref: /illumos-gate/usr/src/uts/common/crypto/io/sha2_mod.c (revision 45ede40b2394db7967e59f19288fae9b62efd4aa)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define	_SHA2_IMPL
35 #include <sys/sha2.h>
36 #include <sha2/sha2_impl.h>
37 
38 /*
39  * The sha2 module is created with two modlinkages:
40  * - a modlmisc that allows consumers to directly call the entry points
41  *   SHA2Init, SHA2Update, and SHA2Final.
42  * - a modlcrypto that allows the module to register with the Kernel
43  *   Cryptographic Framework (KCF) as a software provider for the SHA2
44  *   mechanisms.
45  */
46 
47 static struct modlmisc modlmisc = {
48 	&mod_miscops,
49 	"SHA2 Message-Digest Algorithm"
50 };
51 
52 static struct modlcrypto modlcrypto = {
53 	&mod_cryptoops,
54 	"SHA2 Kernel SW Provider"
55 };
56 
57 static struct modlinkage modlinkage = {
58 	MODREV_1, &modlmisc, &modlcrypto, NULL
59 };
60 
61 /*
62  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63  * by KCF to one of the entry points.
64  */
65 
66 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
67 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68 
69 /* to extract the digest length passed as mechanism parameter */
70 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
71 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
72 		(len) = (uint32_t)*((ulong_t *)(void *)(m)->cm_param);	\
73 	else {								\
74 		ulong_t tmp_ulong;					\
75 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
76 		(len) = (uint32_t)tmp_ulong;				\
77 	}								\
78 }
79 
80 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
81 	SHA2Init(mech, ctx);				\
82 	SHA2Update(ctx, key, len);			\
83 	SHA2Final(digest, ctx);				\
84 }
85 
86 /*
87  * Mechanism info structure passed to KCF during registration.
88  */
89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 	/* SHA256 */
91 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 	/* SHA256-HMAC */
95 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* SHA256-HMAC GENERAL */
100 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 	/* SHA384 */
105 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 	/* SHA384-HMAC */
109 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 	/* SHA384-HMAC GENERAL */
114 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 	/* SHA512 */
119 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA512-HMAC */
123 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 	/* SHA512-HMAC GENERAL */
128 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
132 	/* SHA512_224 */
133 	{SUN_CKM_SHA512_224, SHA512_224_MECH_INFO_TYPE,
134 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
135 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
136 	/* SHA512_256 */
137 	{SUN_CKM_SHA512_256, SHA512_256_MECH_INFO_TYPE,
138 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
139 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS}
140 };
141 
142 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
143 
144 static crypto_control_ops_t sha2_control_ops = {
145 	sha2_provider_status
146 };
147 
148 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
149     crypto_req_handle_t);
150 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
151     crypto_req_handle_t);
152 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
153     crypto_req_handle_t);
154 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
155     crypto_req_handle_t);
156 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
157     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
158     crypto_req_handle_t);
159 
160 static crypto_digest_ops_t sha2_digest_ops = {
161 	sha2_digest_init,
162 	sha2_digest,
163 	sha2_digest_update,
164 	NULL,
165 	sha2_digest_final,
166 	sha2_digest_atomic
167 };
168 
169 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
170     crypto_spi_ctx_template_t, crypto_req_handle_t);
171 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
172     crypto_req_handle_t);
173 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
174 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
175     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
176     crypto_spi_ctx_template_t, crypto_req_handle_t);
177 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
178     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
179     crypto_spi_ctx_template_t, crypto_req_handle_t);
180 
181 static crypto_mac_ops_t sha2_mac_ops = {
182 	sha2_mac_init,
183 	NULL,
184 	sha2_mac_update,
185 	sha2_mac_final,
186 	sha2_mac_atomic,
187 	sha2_mac_verify_atomic
188 };
189 
190 static int sha2_create_ctx_template(crypto_provider_handle_t,
191     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
192     size_t *, crypto_req_handle_t);
193 static int sha2_free_context(crypto_ctx_t *);
194 
195 static crypto_ctx_ops_t sha2_ctx_ops = {
196 	sha2_create_ctx_template,
197 	sha2_free_context
198 };
199 
200 static crypto_ops_t sha2_crypto_ops = {
201 	&sha2_control_ops,
202 	&sha2_digest_ops,
203 	NULL,
204 	&sha2_mac_ops,
205 	NULL,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	NULL,
211 	NULL,
212 	NULL,
213 	NULL,
214 	&sha2_ctx_ops,
215 	NULL,
216 	NULL,
217 	NULL,
218 };
219 
220 static crypto_provider_info_t sha2_prov_info = {
221 	CRYPTO_SPI_VERSION_4,
222 	"SHA2 Software Provider",
223 	CRYPTO_SW_PROVIDER,
224 	{&modlinkage},
225 	NULL,
226 	&sha2_crypto_ops,
227 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
228 	sha2_mech_info_tab
229 };
230 
231 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
232 
233 int
234 _init()
235 {
236 	int ret;
237 
238 	if ((ret = mod_install(&modlinkage)) != 0)
239 		return (ret);
240 
241 	/*
242 	 * Register with KCF. If the registration fails, do not uninstall the
243 	 * module, since the functionality provided by misc/sha2 should still
244 	 * be available.
245 	 */
246 	(void) crypto_register_provider(&sha2_prov_info, &sha2_prov_handle);
247 
248 	return (0);
249 }
250 
251 int
252 _info(struct modinfo *modinfop)
253 {
254 	return (mod_info(&modlinkage, modinfop));
255 }
256 
257 /*
258  * KCF software provider control entry points.
259  */
260 /* ARGSUSED */
261 static void
262 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
263 {
264 	*status = CRYPTO_PROVIDER_READY;
265 }
266 
267 /*
268  * KCF software provider digest entry points.
269  */
270 
271 static int
272 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
273     crypto_req_handle_t req)
274 {
275 
276 	/*
277 	 * Allocate and initialize SHA2 context.
278 	 */
279 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
280 	    crypto_kmflag(req));
281 	if (ctx->cc_provider_private == NULL)
282 		return (CRYPTO_HOST_MEMORY);
283 
284 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
285 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
286 
287 	return (CRYPTO_SUCCESS);
288 }
289 
290 /*
291  * Helper SHA2 digest update function for uio data.
292  */
293 static int
294 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
295 {
296 	off_t offset = data->cd_offset;
297 	size_t length = data->cd_length;
298 	uint_t vec_idx;
299 	size_t cur_len;
300 
301 	/* we support only kernel buffer */
302 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
303 		return (CRYPTO_ARGUMENTS_BAD);
304 
305 	/*
306 	 * Jump to the first iovec containing data to be
307 	 * digested.
308 	 */
309 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
310 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
311 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
312 		;
313 	if (vec_idx == data->cd_uio->uio_iovcnt) {
314 		/*
315 		 * The caller specified an offset that is larger than the
316 		 * total size of the buffers it provided.
317 		 */
318 		return (CRYPTO_DATA_LEN_RANGE);
319 	}
320 
321 	/*
322 	 * Now do the digesting on the iovecs.
323 	 */
324 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
325 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
326 		    offset, length);
327 
328 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
329 		    uio_iov[vec_idx].iov_base + offset, cur_len);
330 		length -= cur_len;
331 		vec_idx++;
332 		offset = 0;
333 	}
334 
335 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
336 		/*
337 		 * The end of the specified iovec's was reached but
338 		 * the length requested could not be processed, i.e.
339 		 * The caller requested to digest more data than it provided.
340 		 */
341 		return (CRYPTO_DATA_LEN_RANGE);
342 	}
343 
344 	return (CRYPTO_SUCCESS);
345 }
346 
347 /*
348  * Helper SHA2 digest final function for uio data.
349  * digest_len is the length of the desired digest. If digest_len
350  * is smaller than the default SHA2 digest length, the caller
351  * must pass a scratch buffer, digest_scratch, which must
352  * be at least the algorithm's digest length bytes.
353  */
354 static int
355 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
356     ulong_t digest_len, uchar_t *digest_scratch)
357 {
358 	off_t offset = digest->cd_offset;
359 	uint_t vec_idx;
360 
361 	/* we support only kernel buffer */
362 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
363 		return (CRYPTO_ARGUMENTS_BAD);
364 
365 	/*
366 	 * Jump to the first iovec containing ptr to the digest to
367 	 * be returned.
368 	 */
369 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
370 	    vec_idx < digest->cd_uio->uio_iovcnt;
371 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
372 		;
373 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
374 		/*
375 		 * The caller specified an offset that is
376 		 * larger than the total size of the buffers
377 		 * it provided.
378 		 */
379 		return (CRYPTO_DATA_LEN_RANGE);
380 	}
381 
382 	if (offset + digest_len <=
383 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
384 		/*
385 		 * The computed SHA2 digest will fit in the current
386 		 * iovec.
387 		 */
388 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
389 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
390 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
391 		    (digest_len != SHA512_DIGEST_LENGTH))) {
392 			/*
393 			 * The caller requested a short digest. Digest
394 			 * into a scratch buffer and return to
395 			 * the user only what was requested.
396 			 */
397 			SHA2Final(digest_scratch, sha2_ctx);
398 
399 			bcopy(digest_scratch, (uchar_t *)digest->
400 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
401 			    digest_len);
402 		} else {
403 			SHA2Final((uchar_t *)digest->
404 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
405 			    sha2_ctx);
406 
407 		}
408 	} else {
409 		/*
410 		 * The computed digest will be crossing one or more iovec's.
411 		 * This is bad performance-wise but we need to support it.
412 		 * Allocate a small scratch buffer on the stack and
413 		 * copy it piece meal to the specified digest iovec's.
414 		 */
415 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
416 		off_t scratch_offset = 0;
417 		size_t length = digest_len;
418 		size_t cur_len;
419 
420 		SHA2Final(digest_tmp, sha2_ctx);
421 
422 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
423 			cur_len =
424 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
425 			    offset, length);
426 			bcopy(digest_tmp + scratch_offset,
427 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
428 			    cur_len);
429 
430 			length -= cur_len;
431 			vec_idx++;
432 			scratch_offset += cur_len;
433 			offset = 0;
434 		}
435 
436 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
437 			/*
438 			 * The end of the specified iovec's was reached but
439 			 * the length requested could not be processed, i.e.
440 			 * The caller requested to digest more data than it
441 			 * provided.
442 			 */
443 			return (CRYPTO_DATA_LEN_RANGE);
444 		}
445 	}
446 
447 	return (CRYPTO_SUCCESS);
448 }
449 
450 /*
451  * Helper SHA2 digest update for mblk's.
452  */
453 static int
454 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
455 {
456 	off_t offset = data->cd_offset;
457 	size_t length = data->cd_length;
458 	mblk_t *mp;
459 	size_t cur_len;
460 
461 	/*
462 	 * Jump to the first mblk_t containing data to be digested.
463 	 */
464 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
465 	    offset -= MBLKL(mp), mp = mp->b_cont)
466 		;
467 	if (mp == NULL) {
468 		/*
469 		 * The caller specified an offset that is larger than the
470 		 * total size of the buffers it provided.
471 		 */
472 		return (CRYPTO_DATA_LEN_RANGE);
473 	}
474 
475 	/*
476 	 * Now do the digesting on the mblk chain.
477 	 */
478 	while (mp != NULL && length > 0) {
479 		cur_len = MIN(MBLKL(mp) - offset, length);
480 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
481 		length -= cur_len;
482 		offset = 0;
483 		mp = mp->b_cont;
484 	}
485 
486 	if (mp == NULL && length > 0) {
487 		/*
488 		 * The end of the mblk was reached but the length requested
489 		 * could not be processed, i.e. The caller requested
490 		 * to digest more data than it provided.
491 		 */
492 		return (CRYPTO_DATA_LEN_RANGE);
493 	}
494 
495 	return (CRYPTO_SUCCESS);
496 }
497 
498 /*
499  * Helper SHA2 digest final for mblk's.
500  * digest_len is the length of the desired digest. If digest_len
501  * is smaller than the default SHA2 digest length, the caller
502  * must pass a scratch buffer, digest_scratch, which must
503  * be at least the algorithm's digest length bytes.
504  */
505 static int
506 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
507     ulong_t digest_len, uchar_t *digest_scratch)
508 {
509 	off_t offset = digest->cd_offset;
510 	mblk_t *mp;
511 
512 	/*
513 	 * Jump to the first mblk_t that will be used to store the digest.
514 	 */
515 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
516 	    offset -= MBLKL(mp), mp = mp->b_cont)
517 		;
518 	if (mp == NULL) {
519 		/*
520 		 * The caller specified an offset that is larger than the
521 		 * total size of the buffers it provided.
522 		 */
523 		return (CRYPTO_DATA_LEN_RANGE);
524 	}
525 
526 	if (offset + digest_len <= MBLKL(mp)) {
527 		/*
528 		 * The computed SHA2 digest will fit in the current mblk.
529 		 * Do the SHA2Final() in-place.
530 		 */
531 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
532 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
533 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
534 		    (digest_len != SHA512_DIGEST_LENGTH))) {
535 			/*
536 			 * The caller requested a short digest. Digest
537 			 * into a scratch buffer and return to
538 			 * the user only what was requested.
539 			 */
540 			SHA2Final(digest_scratch, sha2_ctx);
541 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
542 		} else {
543 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
544 		}
545 	} else {
546 		/*
547 		 * The computed digest will be crossing one or more mblk's.
548 		 * This is bad performance-wise but we need to support it.
549 		 * Allocate a small scratch buffer on the stack and
550 		 * copy it piece meal to the specified digest iovec's.
551 		 */
552 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
553 		off_t scratch_offset = 0;
554 		size_t length = digest_len;
555 		size_t cur_len;
556 
557 		SHA2Final(digest_tmp, sha2_ctx);
558 
559 		while (mp != NULL && length > 0) {
560 			cur_len = MIN(MBLKL(mp) - offset, length);
561 			bcopy(digest_tmp + scratch_offset,
562 			    mp->b_rptr + offset, cur_len);
563 
564 			length -= cur_len;
565 			mp = mp->b_cont;
566 			scratch_offset += cur_len;
567 			offset = 0;
568 		}
569 
570 		if (mp == NULL && length > 0) {
571 			/*
572 			 * The end of the specified mblk was reached but
573 			 * the length requested could not be processed, i.e.
574 			 * The caller requested to digest more data than it
575 			 * provided.
576 			 */
577 			return (CRYPTO_DATA_LEN_RANGE);
578 		}
579 	}
580 
581 	return (CRYPTO_SUCCESS);
582 }
583 
584 /* ARGSUSED */
585 static int
586 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
587     crypto_req_handle_t req)
588 {
589 	int ret = CRYPTO_SUCCESS;
590 	uint_t sha_digest_len;
591 
592 	ASSERT(ctx->cc_provider_private != NULL);
593 
594 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
595 	case SHA256_MECH_INFO_TYPE:
596 		sha_digest_len = SHA256_DIGEST_LENGTH;
597 		break;
598 	case SHA384_MECH_INFO_TYPE:
599 		sha_digest_len = SHA384_DIGEST_LENGTH;
600 		break;
601 	case SHA512_MECH_INFO_TYPE:
602 		sha_digest_len = SHA512_DIGEST_LENGTH;
603 		break;
604 	case SHA512_224_MECH_INFO_TYPE:
605 		sha_digest_len = SHA512_224_DIGEST_LENGTH;
606 		break;
607 	case SHA512_256_MECH_INFO_TYPE:
608 		sha_digest_len = SHA512_256_DIGEST_LENGTH;
609 		break;
610 	default:
611 		return (CRYPTO_MECHANISM_INVALID);
612 	}
613 
614 	/*
615 	 * We need to just return the length needed to store the output.
616 	 * We should not destroy the context for the following cases.
617 	 */
618 	if ((digest->cd_length == 0) ||
619 	    (digest->cd_length < sha_digest_len)) {
620 		digest->cd_length = sha_digest_len;
621 		return (CRYPTO_BUFFER_TOO_SMALL);
622 	}
623 
624 	/*
625 	 * Do the SHA2 update on the specified input data.
626 	 */
627 	switch (data->cd_format) {
628 	case CRYPTO_DATA_RAW:
629 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
630 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
631 		    data->cd_length);
632 		break;
633 	case CRYPTO_DATA_UIO:
634 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
635 		    data);
636 		break;
637 	case CRYPTO_DATA_MBLK:
638 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
639 		    data);
640 		break;
641 	default:
642 		ret = CRYPTO_ARGUMENTS_BAD;
643 	}
644 
645 	if (ret != CRYPTO_SUCCESS) {
646 		/* the update failed, free context and bail */
647 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
648 		ctx->cc_provider_private = NULL;
649 		digest->cd_length = 0;
650 		return (ret);
651 	}
652 
653 	/*
654 	 * Do a SHA2 final, must be done separately since the digest
655 	 * type can be different than the input data type.
656 	 */
657 	switch (digest->cd_format) {
658 	case CRYPTO_DATA_RAW:
659 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
660 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
661 		break;
662 	case CRYPTO_DATA_UIO:
663 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
664 		    digest, sha_digest_len, NULL);
665 		break;
666 	case CRYPTO_DATA_MBLK:
667 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
668 		    digest, sha_digest_len, NULL);
669 		break;
670 	default:
671 		ret = CRYPTO_ARGUMENTS_BAD;
672 	}
673 
674 	/* all done, free context and return */
675 
676 	if (ret == CRYPTO_SUCCESS)
677 		digest->cd_length = sha_digest_len;
678 	else
679 		digest->cd_length = 0;
680 
681 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
682 	ctx->cc_provider_private = NULL;
683 	return (ret);
684 }
685 
686 /* ARGSUSED */
687 static int
688 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
689     crypto_req_handle_t req)
690 {
691 	int ret = CRYPTO_SUCCESS;
692 
693 	ASSERT(ctx->cc_provider_private != NULL);
694 
695 	/*
696 	 * Do the SHA2 update on the specified input data.
697 	 */
698 	switch (data->cd_format) {
699 	case CRYPTO_DATA_RAW:
700 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
701 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
702 		    data->cd_length);
703 		break;
704 	case CRYPTO_DATA_UIO:
705 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
706 		    data);
707 		break;
708 	case CRYPTO_DATA_MBLK:
709 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
710 		    data);
711 		break;
712 	default:
713 		ret = CRYPTO_ARGUMENTS_BAD;
714 	}
715 
716 	return (ret);
717 }
718 
719 /* ARGSUSED */
720 static int
721 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
722     crypto_req_handle_t req)
723 {
724 	int ret = CRYPTO_SUCCESS;
725 	uint_t sha_digest_len;
726 
727 	ASSERT(ctx->cc_provider_private != NULL);
728 
729 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
730 	case SHA256_MECH_INFO_TYPE:
731 		sha_digest_len = SHA256_DIGEST_LENGTH;
732 		break;
733 	case SHA384_MECH_INFO_TYPE:
734 		sha_digest_len = SHA384_DIGEST_LENGTH;
735 		break;
736 	case SHA512_MECH_INFO_TYPE:
737 		sha_digest_len = SHA512_DIGEST_LENGTH;
738 		break;
739 	case SHA512_224_MECH_INFO_TYPE:
740 		sha_digest_len = SHA512_224_DIGEST_LENGTH;
741 		break;
742 	case SHA512_256_MECH_INFO_TYPE:
743 		sha_digest_len = SHA512_256_DIGEST_LENGTH;
744 		break;
745 	default:
746 		return (CRYPTO_MECHANISM_INVALID);
747 	}
748 
749 	/*
750 	 * We need to just return the length needed to store the output.
751 	 * We should not destroy the context for the following cases.
752 	 */
753 	if ((digest->cd_length == 0) ||
754 	    (digest->cd_length < sha_digest_len)) {
755 		digest->cd_length = sha_digest_len;
756 		return (CRYPTO_BUFFER_TOO_SMALL);
757 	}
758 
759 	/*
760 	 * Do a SHA2 final.
761 	 */
762 	switch (digest->cd_format) {
763 	case CRYPTO_DATA_RAW:
764 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
765 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
766 		break;
767 	case CRYPTO_DATA_UIO:
768 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
769 		    digest, sha_digest_len, NULL);
770 		break;
771 	case CRYPTO_DATA_MBLK:
772 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
773 		    digest, sha_digest_len, NULL);
774 		break;
775 	default:
776 		ret = CRYPTO_ARGUMENTS_BAD;
777 	}
778 
779 	/* all done, free context and return */
780 
781 	if (ret == CRYPTO_SUCCESS)
782 		digest->cd_length = sha_digest_len;
783 	else
784 		digest->cd_length = 0;
785 
786 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
787 	ctx->cc_provider_private = NULL;
788 
789 	return (ret);
790 }
791 
792 /* ARGSUSED */
793 static int
794 sha2_digest_atomic(crypto_provider_handle_t provider,
795     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
796     crypto_data_t *data, crypto_data_t *digest,
797     crypto_req_handle_t req)
798 {
799 	int ret = CRYPTO_SUCCESS;
800 	SHA2_CTX sha2_ctx;
801 	uint32_t sha_digest_len;
802 
803 	/*
804 	 * Do the SHA inits.
805 	 */
806 
807 	SHA2Init(mechanism->cm_type, &sha2_ctx);
808 
809 	switch (data->cd_format) {
810 	case CRYPTO_DATA_RAW:
811 		SHA2Update(&sha2_ctx, (uint8_t *)data->
812 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
813 		break;
814 	case CRYPTO_DATA_UIO:
815 		ret = sha2_digest_update_uio(&sha2_ctx, data);
816 		break;
817 	case CRYPTO_DATA_MBLK:
818 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
819 		break;
820 	default:
821 		ret = CRYPTO_ARGUMENTS_BAD;
822 	}
823 
824 	/*
825 	 * Do the SHA updates on the specified input data.
826 	 */
827 
828 	if (ret != CRYPTO_SUCCESS) {
829 		/* the update failed, bail */
830 		digest->cd_length = 0;
831 		return (ret);
832 	}
833 
834 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
835 		sha_digest_len = SHA256_DIGEST_LENGTH;
836 	else
837 		sha_digest_len = SHA512_DIGEST_LENGTH;
838 
839 	/*
840 	 * Do a SHA2 final, must be done separately since the digest
841 	 * type can be different than the input data type.
842 	 */
843 	switch (digest->cd_format) {
844 	case CRYPTO_DATA_RAW:
845 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
846 		    digest->cd_offset, &sha2_ctx);
847 		break;
848 	case CRYPTO_DATA_UIO:
849 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
850 		    sha_digest_len, NULL);
851 		break;
852 	case CRYPTO_DATA_MBLK:
853 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
854 		    sha_digest_len, NULL);
855 		break;
856 	default:
857 		ret = CRYPTO_ARGUMENTS_BAD;
858 	}
859 
860 	if (ret == CRYPTO_SUCCESS)
861 		digest->cd_length = sha_digest_len;
862 	else
863 		digest->cd_length = 0;
864 
865 	return (ret);
866 }
867 
868 /*
869  * KCF software provider mac entry points.
870  *
871  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
872  *
873  * Init:
874  * The initialization routine initializes what we denote
875  * as the inner and outer contexts by doing
876  * - for inner context: SHA2(key XOR ipad)
877  * - for outer context: SHA2(key XOR opad)
878  *
879  * Update:
880  * Each subsequent SHA2 HMAC update will result in an
881  * update of the inner context with the specified data.
882  *
883  * Final:
884  * The SHA2 HMAC final will do a SHA2 final operation on the
885  * inner context, and the resulting digest will be used
886  * as the data for an update on the outer context. Last
887  * but not least, a SHA2 final on the outer context will
888  * be performed to obtain the SHA2 HMAC digest to return
889  * to the user.
890  */
891 
892 /*
893  * Initialize a SHA2-HMAC context.
894  */
895 static void
896 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
897 {
898 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
899 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
900 	int i, block_size, blocks_per_int64;
901 
902 	/* Determine the block size */
903 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
904 		block_size = SHA256_HMAC_BLOCK_SIZE;
905 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
906 	} else {
907 		block_size = SHA512_HMAC_BLOCK_SIZE;
908 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
909 	}
910 
911 	(void) bzero(ipad, block_size);
912 	(void) bzero(opad, block_size);
913 	(void) bcopy(keyval, ipad, length_in_bytes);
914 	(void) bcopy(keyval, opad, length_in_bytes);
915 
916 	/* XOR key with ipad (0x36) and opad (0x5c) */
917 	for (i = 0; i < blocks_per_int64; i ++) {
918 		ipad[i] ^= 0x3636363636363636;
919 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
920 	}
921 
922 	/* perform SHA2 on ipad */
923 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
924 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
925 
926 	/* perform SHA2 on opad */
927 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
928 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
929 
930 }
931 
932 static boolean_t
933 sha2_is_general_hmech(const crypto_mechanism_t *mechanism)
934 {
935 	switch (mechanism->cm_type) {
936 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
937 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
938 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
939 		return (B_TRUE);
940 	default:
941 		return (B_FALSE);
942 	}
943 }
944 
945 /*
946  */
947 static int
948 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
949     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
950     crypto_req_handle_t req)
951 {
952 	int ret = CRYPTO_SUCCESS;
953 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
954 	uint_t sha_digest_len, sha_hmac_block_size;
955 
956 	/*
957 	 * Set the digest length and block size to values appropriate to the
958 	 * mechanism
959 	 */
960 	switch (mechanism->cm_type) {
961 	case SHA256_HMAC_MECH_INFO_TYPE:
962 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
963 		sha_digest_len = SHA256_DIGEST_LENGTH;
964 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
965 		break;
966 	case SHA384_HMAC_MECH_INFO_TYPE:
967 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
968 	case SHA512_HMAC_MECH_INFO_TYPE:
969 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
970 		sha_digest_len = SHA512_DIGEST_LENGTH;
971 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
972 		break;
973 	default:
974 		return (CRYPTO_MECHANISM_INVALID);
975 	}
976 
977 	if (key->ck_format != CRYPTO_KEY_RAW)
978 		return (CRYPTO_ARGUMENTS_BAD);
979 
980 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
981 	    crypto_kmflag(req));
982 	if (ctx->cc_provider_private == NULL)
983 		return (CRYPTO_HOST_MEMORY);
984 
985 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
986 	if (ctx_template != NULL) {
987 		/* reuse context template */
988 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
989 		    sizeof (sha2_hmac_ctx_t));
990 	} else {
991 		/* no context template, compute context */
992 		if (keylen_in_bytes > sha_hmac_block_size) {
993 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
994 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
995 
996 			/*
997 			 * Hash the passed-in key to get a smaller key.
998 			 * The inner context is used since it hasn't been
999 			 * initialized yet.
1000 			 */
1001 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1002 			    &hmac_ctx->hc_icontext,
1003 			    key->ck_data, keylen_in_bytes, digested_key);
1004 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1005 			    digested_key, sha_digest_len);
1006 		} else {
1007 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1008 			    key->ck_data, keylen_in_bytes);
1009 		}
1010 	}
1011 
1012 	/*
1013 	 * Get the mechanism parameters, if applicable.
1014 	 */
1015 	if (sha2_is_general_hmech(mechanism)) {
1016 		if (mechanism->cm_param == NULL ||
1017 		    mechanism->cm_param_len != sizeof (ulong_t))
1018 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1019 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1020 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1021 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1022 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1023 	}
1024 
1025 	if (ret != CRYPTO_SUCCESS) {
1026 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1027 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1028 		ctx->cc_provider_private = NULL;
1029 	}
1030 
1031 	return (ret);
1032 }
1033 
1034 /* ARGSUSED */
1035 static int
1036 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1037     crypto_req_handle_t req)
1038 {
1039 	int ret = CRYPTO_SUCCESS;
1040 
1041 	ASSERT(ctx->cc_provider_private != NULL);
1042 
1043 	/*
1044 	 * Do a SHA2 update of the inner context using the specified
1045 	 * data.
1046 	 */
1047 	switch (data->cd_format) {
1048 	case CRYPTO_DATA_RAW:
1049 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1050 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1051 		    data->cd_length);
1052 		break;
1053 	case CRYPTO_DATA_UIO:
1054 		ret = sha2_digest_update_uio(
1055 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1056 		break;
1057 	case CRYPTO_DATA_MBLK:
1058 		ret = sha2_digest_update_mblk(
1059 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1060 		break;
1061 	default:
1062 		ret = CRYPTO_ARGUMENTS_BAD;
1063 	}
1064 
1065 	return (ret);
1066 }
1067 
1068 /* ARGSUSED */
1069 static int
1070 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1071 {
1072 	int ret = CRYPTO_SUCCESS;
1073 	uchar_t digest[SHA512_DIGEST_LENGTH];
1074 	uint32_t digest_len, sha_digest_len;
1075 
1076 	ASSERT(ctx->cc_provider_private != NULL);
1077 
1078 	/* Set the digest lengths to values appropriate to the mechanism */
1079 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1080 	case SHA256_HMAC_MECH_INFO_TYPE:
1081 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1082 		break;
1083 	case SHA384_HMAC_MECH_INFO_TYPE:
1084 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1085 		break;
1086 	case SHA512_HMAC_MECH_INFO_TYPE:
1087 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1088 		break;
1089 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1090 		sha_digest_len = SHA256_DIGEST_LENGTH;
1091 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1092 		break;
1093 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1094 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1095 		sha_digest_len = SHA512_DIGEST_LENGTH;
1096 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1097 		break;
1098 	}
1099 
1100 	/*
1101 	 * We need to just return the length needed to store the output.
1102 	 * We should not destroy the context for the following cases.
1103 	 */
1104 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1105 		mac->cd_length = digest_len;
1106 		return (CRYPTO_BUFFER_TOO_SMALL);
1107 	}
1108 
1109 	/*
1110 	 * Do a SHA2 final on the inner context.
1111 	 */
1112 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1113 
1114 	/*
1115 	 * Do a SHA2 update on the outer context, feeding the inner
1116 	 * digest as data.
1117 	 */
1118 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1119 	    sha_digest_len);
1120 
1121 	/*
1122 	 * Do a SHA2 final on the outer context, storing the computing
1123 	 * digest in the users buffer.
1124 	 */
1125 	switch (mac->cd_format) {
1126 	case CRYPTO_DATA_RAW:
1127 		if (digest_len != sha_digest_len) {
1128 			/*
1129 			 * The caller requested a short digest. Digest
1130 			 * into a scratch buffer and return to
1131 			 * the user only what was requested.
1132 			 */
1133 			SHA2Final(digest,
1134 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1135 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1136 			    mac->cd_offset, digest_len);
1137 		} else {
1138 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1139 			    mac->cd_offset,
1140 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1141 		}
1142 		break;
1143 	case CRYPTO_DATA_UIO:
1144 		ret = sha2_digest_final_uio(
1145 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1146 		    digest_len, digest);
1147 		break;
1148 	case CRYPTO_DATA_MBLK:
1149 		ret = sha2_digest_final_mblk(
1150 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1151 		    digest_len, digest);
1152 		break;
1153 	default:
1154 		ret = CRYPTO_ARGUMENTS_BAD;
1155 	}
1156 
1157 	if (ret == CRYPTO_SUCCESS)
1158 		mac->cd_length = digest_len;
1159 	else
1160 		mac->cd_length = 0;
1161 
1162 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1163 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1164 	ctx->cc_provider_private = NULL;
1165 
1166 	return (ret);
1167 }
1168 
1169 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1170 	switch (data->cd_format) {					\
1171 	case CRYPTO_DATA_RAW:						\
1172 		SHA2Update(&(ctx).hc_icontext,				\
1173 		    (uint8_t *)data->cd_raw.iov_base +			\
1174 		    data->cd_offset, data->cd_length);			\
1175 		break;							\
1176 	case CRYPTO_DATA_UIO:						\
1177 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1178 		break;							\
1179 	case CRYPTO_DATA_MBLK:						\
1180 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1181 		    data);						\
1182 		break;							\
1183 	default:							\
1184 		ret = CRYPTO_ARGUMENTS_BAD;				\
1185 	}								\
1186 }
1187 
1188 /* ARGSUSED */
1189 static int
1190 sha2_mac_atomic(crypto_provider_handle_t provider,
1191     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1192     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1193     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1194 {
1195 	int ret = CRYPTO_SUCCESS;
1196 	uchar_t digest[SHA512_DIGEST_LENGTH];
1197 	sha2_hmac_ctx_t sha2_hmac_ctx;
1198 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1199 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1200 
1201 	/*
1202 	 * Set the digest length and block size to values appropriate to the
1203 	 * mechanism
1204 	 */
1205 	switch (mechanism->cm_type) {
1206 	case SHA256_HMAC_MECH_INFO_TYPE:
1207 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1208 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1209 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1210 		break;
1211 	case SHA384_HMAC_MECH_INFO_TYPE:
1212 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1213 	case SHA512_HMAC_MECH_INFO_TYPE:
1214 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1215 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1216 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1217 		break;
1218 	default:
1219 		return (CRYPTO_MECHANISM_INVALID);
1220 	}
1221 
1222 	/* Add support for key by attributes (RFE 4706552) */
1223 	if (key->ck_format != CRYPTO_KEY_RAW)
1224 		return (CRYPTO_ARGUMENTS_BAD);
1225 
1226 	if (ctx_template != NULL) {
1227 		/* reuse context template */
1228 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1229 	} else {
1230 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1231 		/* no context template, initialize context */
1232 		if (keylen_in_bytes > sha_hmac_block_size) {
1233 			/*
1234 			 * Hash the passed-in key to get a smaller key.
1235 			 * The inner context is used since it hasn't been
1236 			 * initialized yet.
1237 			 */
1238 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1239 			    &sha2_hmac_ctx.hc_icontext,
1240 			    key->ck_data, keylen_in_bytes, digest);
1241 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1242 			    sha_digest_len);
1243 		} else {
1244 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1245 			    keylen_in_bytes);
1246 		}
1247 	}
1248 
1249 	/* get the mechanism parameters, if applicable */
1250 	if (sha2_is_general_hmech(mechanism)) {
1251 		if (mechanism->cm_param == NULL ||
1252 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1253 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1254 			goto bail;
1255 		}
1256 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1257 		if (digest_len > sha_digest_len) {
1258 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1259 			goto bail;
1260 		}
1261 	}
1262 
1263 	/* do a SHA2 update of the inner context using the specified data */
1264 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1265 	if (ret != CRYPTO_SUCCESS)
1266 		/* the update failed, free context and bail */
1267 		goto bail;
1268 
1269 	/*
1270 	 * Do a SHA2 final on the inner context.
1271 	 */
1272 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1273 
1274 	/*
1275 	 * Do an SHA2 update on the outer context, feeding the inner
1276 	 * digest as data.
1277 	 *
1278 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1279 	 * bytes of the inner hash value.
1280 	 */
1281 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1282 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1283 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1284 		    SHA384_DIGEST_LENGTH);
1285 	else
1286 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1287 
1288 	/*
1289 	 * Do a SHA2 final on the outer context, storing the computed
1290 	 * digest in the users buffer.
1291 	 */
1292 	switch (mac->cd_format) {
1293 	case CRYPTO_DATA_RAW:
1294 		if (digest_len != sha_digest_len) {
1295 			/*
1296 			 * The caller requested a short digest. Digest
1297 			 * into a scratch buffer and return to
1298 			 * the user only what was requested.
1299 			 */
1300 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1301 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1302 			    mac->cd_offset, digest_len);
1303 		} else {
1304 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1305 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1306 		}
1307 		break;
1308 	case CRYPTO_DATA_UIO:
1309 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1310 		    digest_len, digest);
1311 		break;
1312 	case CRYPTO_DATA_MBLK:
1313 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1314 		    digest_len, digest);
1315 		break;
1316 	default:
1317 		ret = CRYPTO_ARGUMENTS_BAD;
1318 	}
1319 
1320 	if (ret == CRYPTO_SUCCESS) {
1321 		mac->cd_length = digest_len;
1322 		return (CRYPTO_SUCCESS);
1323 	}
1324 bail:
1325 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1326 	mac->cd_length = 0;
1327 	return (ret);
1328 }
1329 
1330 /* ARGSUSED */
1331 static int
1332 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1333     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1334     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1335     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1336 {
1337 	int ret = CRYPTO_SUCCESS;
1338 	uchar_t digest[SHA512_DIGEST_LENGTH];
1339 	sha2_hmac_ctx_t sha2_hmac_ctx;
1340 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1341 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1342 
1343 	/*
1344 	 * Set the digest length and block size to values appropriate to the
1345 	 * mechanism
1346 	 */
1347 	switch (mechanism->cm_type) {
1348 	case SHA256_HMAC_MECH_INFO_TYPE:
1349 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1350 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1351 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1352 		break;
1353 	case SHA384_HMAC_MECH_INFO_TYPE:
1354 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1355 	case SHA512_HMAC_MECH_INFO_TYPE:
1356 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1357 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1358 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1359 		break;
1360 	default:
1361 		return (CRYPTO_MECHANISM_INVALID);
1362 	}
1363 
1364 	/* Add support for key by attributes (RFE 4706552) */
1365 	if (key->ck_format != CRYPTO_KEY_RAW)
1366 		return (CRYPTO_ARGUMENTS_BAD);
1367 
1368 	if (ctx_template != NULL) {
1369 		/* reuse context template */
1370 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1371 	} else {
1372 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1373 		/* no context template, initialize context */
1374 		if (keylen_in_bytes > sha_hmac_block_size) {
1375 			/*
1376 			 * Hash the passed-in key to get a smaller key.
1377 			 * The inner context is used since it hasn't been
1378 			 * initialized yet.
1379 			 */
1380 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1381 			    &sha2_hmac_ctx.hc_icontext,
1382 			    key->ck_data, keylen_in_bytes, digest);
1383 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1384 			    sha_digest_len);
1385 		} else {
1386 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1387 			    keylen_in_bytes);
1388 		}
1389 	}
1390 
1391 	/* get the mechanism parameters, if applicable */
1392 	if (sha2_is_general_hmech(mechanism)) {
1393 		if (mechanism->cm_param == NULL ||
1394 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1395 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1396 			goto bail;
1397 		}
1398 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1399 		if (digest_len > sha_digest_len) {
1400 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1401 			goto bail;
1402 		}
1403 	}
1404 
1405 	if (mac->cd_length != digest_len) {
1406 		ret = CRYPTO_INVALID_MAC;
1407 		goto bail;
1408 	}
1409 
1410 	/* do a SHA2 update of the inner context using the specified data */
1411 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1412 	if (ret != CRYPTO_SUCCESS)
1413 		/* the update failed, free context and bail */
1414 		goto bail;
1415 
1416 	/* do a SHA2 final on the inner context */
1417 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1418 
1419 	/*
1420 	 * Do an SHA2 update on the outer context, feeding the inner
1421 	 * digest as data.
1422 	 *
1423 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1424 	 * bytes of the inner hash value.
1425 	 */
1426 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1427 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1428 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1429 		    SHA384_DIGEST_LENGTH);
1430 	else
1431 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1432 
1433 	/*
1434 	 * Do a SHA2 final on the outer context, storing the computed
1435 	 * digest in the users buffer.
1436 	 */
1437 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1438 
1439 	/*
1440 	 * Compare the computed digest against the expected digest passed
1441 	 * as argument.
1442 	 */
1443 
1444 	switch (mac->cd_format) {
1445 
1446 	case CRYPTO_DATA_RAW:
1447 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1448 		    mac->cd_offset, digest_len) != 0)
1449 			ret = CRYPTO_INVALID_MAC;
1450 		break;
1451 
1452 	case CRYPTO_DATA_UIO: {
1453 		off_t offset = mac->cd_offset;
1454 		uint_t vec_idx;
1455 		off_t scratch_offset = 0;
1456 		size_t length = digest_len;
1457 		size_t cur_len;
1458 
1459 		/* we support only kernel buffer */
1460 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1461 			return (CRYPTO_ARGUMENTS_BAD);
1462 
1463 		/* jump to the first iovec containing the expected digest */
1464 		for (vec_idx = 0;
1465 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1466 		    vec_idx < mac->cd_uio->uio_iovcnt;
1467 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1468 			;
1469 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1470 			/*
1471 			 * The caller specified an offset that is
1472 			 * larger than the total size of the buffers
1473 			 * it provided.
1474 			 */
1475 			ret = CRYPTO_DATA_LEN_RANGE;
1476 			break;
1477 		}
1478 
1479 		/* do the comparison of computed digest vs specified one */
1480 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1481 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1482 			    offset, length);
1483 
1484 			if (bcmp(digest + scratch_offset,
1485 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1486 			    cur_len) != 0) {
1487 				ret = CRYPTO_INVALID_MAC;
1488 				break;
1489 			}
1490 
1491 			length -= cur_len;
1492 			vec_idx++;
1493 			scratch_offset += cur_len;
1494 			offset = 0;
1495 		}
1496 		break;
1497 	}
1498 
1499 	case CRYPTO_DATA_MBLK: {
1500 		off_t offset = mac->cd_offset;
1501 		mblk_t *mp;
1502 		off_t scratch_offset = 0;
1503 		size_t length = digest_len;
1504 		size_t cur_len;
1505 
1506 		/* jump to the first mblk_t containing the expected digest */
1507 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1508 		    offset -= MBLKL(mp), mp = mp->b_cont)
1509 			;
1510 		if (mp == NULL) {
1511 			/*
1512 			 * The caller specified an offset that is larger than
1513 			 * the total size of the buffers it provided.
1514 			 */
1515 			ret = CRYPTO_DATA_LEN_RANGE;
1516 			break;
1517 		}
1518 
1519 		while (mp != NULL && length > 0) {
1520 			cur_len = MIN(MBLKL(mp) - offset, length);
1521 			if (bcmp(digest + scratch_offset,
1522 			    mp->b_rptr + offset, cur_len) != 0) {
1523 				ret = CRYPTO_INVALID_MAC;
1524 				break;
1525 			}
1526 
1527 			length -= cur_len;
1528 			mp = mp->b_cont;
1529 			scratch_offset += cur_len;
1530 			offset = 0;
1531 		}
1532 		break;
1533 	}
1534 
1535 	default:
1536 		ret = CRYPTO_ARGUMENTS_BAD;
1537 	}
1538 
1539 	return (ret);
1540 bail:
1541 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1542 	mac->cd_length = 0;
1543 	return (ret);
1544 }
1545 
1546 /*
1547  * KCF software provider context management entry points.
1548  */
1549 
1550 /* ARGSUSED */
1551 static int
1552 sha2_create_ctx_template(crypto_provider_handle_t provider,
1553     crypto_mechanism_t *mechanism, crypto_key_t *key,
1554     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1555     crypto_req_handle_t req)
1556 {
1557 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1558 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1559 	uint32_t sha_digest_len, sha_hmac_block_size;
1560 
1561 	/*
1562 	 * Set the digest length and block size to values appropriate to the
1563 	 * mechanism
1564 	 */
1565 	switch (mechanism->cm_type) {
1566 	case SHA256_HMAC_MECH_INFO_TYPE:
1567 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1568 		sha_digest_len = SHA256_DIGEST_LENGTH;
1569 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1570 		break;
1571 	case SHA384_HMAC_MECH_INFO_TYPE:
1572 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1573 	case SHA512_HMAC_MECH_INFO_TYPE:
1574 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1575 		sha_digest_len = SHA512_DIGEST_LENGTH;
1576 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1577 		break;
1578 	default:
1579 		return (CRYPTO_MECHANISM_INVALID);
1580 	}
1581 
1582 	/* Add support for key by attributes (RFE 4706552) */
1583 	if (key->ck_format != CRYPTO_KEY_RAW)
1584 		return (CRYPTO_ARGUMENTS_BAD);
1585 
1586 	/*
1587 	 * Allocate and initialize SHA2 context.
1588 	 */
1589 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1590 	    crypto_kmflag(req));
1591 	if (sha2_hmac_ctx_tmpl == NULL)
1592 		return (CRYPTO_HOST_MEMORY);
1593 
1594 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1595 
1596 	if (keylen_in_bytes > sha_hmac_block_size) {
1597 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1598 
1599 		/*
1600 		 * Hash the passed-in key to get a smaller key.
1601 		 * The inner context is used since it hasn't been
1602 		 * initialized yet.
1603 		 */
1604 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1605 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1606 		    key->ck_data, keylen_in_bytes, digested_key);
1607 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1608 		    sha_digest_len);
1609 	} else {
1610 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1611 		    keylen_in_bytes);
1612 	}
1613 
1614 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1615 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1616 
1617 	return (CRYPTO_SUCCESS);
1618 }
1619 
1620 static int
1621 sha2_free_context(crypto_ctx_t *ctx)
1622 {
1623 	uint_t ctx_len;
1624 
1625 	if (ctx->cc_provider_private == NULL)
1626 		return (CRYPTO_SUCCESS);
1627 
1628 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1629 	case SHA256_MECH_INFO_TYPE:
1630 	case SHA384_MECH_INFO_TYPE:
1631 	case SHA512_MECH_INFO_TYPE:
1632 	case SHA512_224_MECH_INFO_TYPE:
1633 	case SHA512_256_MECH_INFO_TYPE:
1634 		ctx_len = sizeof (sha2_ctx_t);
1635 		break;
1636 	case SHA256_HMAC_MECH_INFO_TYPE:
1637 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1638 	case SHA384_HMAC_MECH_INFO_TYPE:
1639 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1640 	case SHA512_HMAC_MECH_INFO_TYPE:
1641 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1642 		ctx_len = sizeof (sha2_hmac_ctx_t);
1643 		break;
1644 	default:
1645 		/*
1646 		 * If we get here, someone forgot to update the above list
1647 		 * when adding a new mechanism.  Without the correct ctx_len
1648 		 * we will corrupt the heap when calling kmem_free, so panic
1649 		 * now and make it easier to identify the problem.
1650 		 */
1651 		panic("Unknown SHA2 mechanism %d",
1652 		    PROV_SHA2_CTX(ctx)->sc_mech_type);
1653 	}
1654 
1655 	bzero(ctx->cc_provider_private, ctx_len);
1656 	kmem_free(ctx->cc_provider_private, ctx_len);
1657 	ctx->cc_provider_private = NULL;
1658 
1659 	return (CRYPTO_SUCCESS);
1660 }
1661