xref: /titanic_44/usr/src/uts/common/crypto/io/sha2_mod.c (revision a38ddfee9c8c6b6c5a2947ff52fd2338362a4444)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #pragma ident	"%Z%%M%	%I%	%E% SMI"
28 
29 #include <sys/modctl.h>
30 #include <sys/cmn_err.h>
31 #include <sys/crypto/common.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/strsun.h>
34 #include <sys/systm.h>
35 #include <sys/sysmacros.h>
36 #define	_SHA2_IMPL
37 #include <sys/sha2.h>
38 
39 /*
40  * The sha2 module is created with two modlinkages:
41  * - a modlmisc that allows consumers to directly call the entry points
42  *   SHA2Init, SHA2Update, and SHA2Final.
43  * - a modlcrypto that allows the module to register with the Kernel
44  *   Cryptographic Framework (KCF) as a software provider for the SHA2
45  *   mechanisms.
46  */
47 
48 static struct modlmisc modlmisc = {
49 	&mod_miscops,
50 	"SHA2 Message-Digest Algorithm"
51 };
52 
53 static struct modlcrypto modlcrypto = {
54 	&mod_cryptoops,
55 	"SHA2 Kernel SW Provider"
56 };
57 
58 static struct modlinkage modlinkage = {
59 	MODREV_1, &modlmisc, &modlcrypto, NULL
60 };
61 
62 /*
63  * CSPI information (entry points, provider info, etc.)
64  */
65 
66 /*
67  * Context for SHA2 mechanism.
68  */
69 typedef struct sha2_ctx {
70 	sha2_mech_type_t	sc_mech_type;	/* type of context */
71 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
72 } sha2_ctx_t;
73 
74 /*
75  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
76  */
77 typedef struct sha2_hmac_ctx {
78 	sha2_mech_type_t	hc_mech_type;	/* type of context */
79 	uint32_t		hc_digest_len;	/* digest len in bytes */
80 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
81 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
82 } sha2_hmac_ctx_t;
83 
84 /*
85  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
86  * by KCF to one of the entry points.
87  */
88 
89 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
90 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
91 
92 /* to extract the digest length passed as mechanism parameter */
93 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
94 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
95 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
96 	else {								\
97 		ulong_t tmp_ulong;					\
98 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
99 		(len) = (uint32_t)tmp_ulong;				\
100 	}								\
101 }
102 
103 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
104 	SHA2Init(mech, ctx);				\
105 	SHA2Update(ctx, key, len);			\
106 	SHA2Final(digest, ctx);				\
107 }
108 
109 /*
110  * Mechanism info structure passed to KCF during registration.
111  */
112 static crypto_mech_info_t sha2_mech_info_tab[] = {
113 	/* SHA256 */
114 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
115 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
116 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
117 	/* SHA256-HMAC */
118 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
119 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
120 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
121 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA256-HMAC GENERAL */
123 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
127 	/* SHA384 */
128 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
129 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
130 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
131 	/* SHA384-HMAC */
132 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
133 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
134 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
135 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
136 	/* SHA384-HMAC GENERAL */
137 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
138 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
139 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
140 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
141 	/* SHA512 */
142 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
143 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
144 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
145 	/* SHA512-HMAC */
146 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
147 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
148 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
149 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
150 	/* SHA512-HMAC GENERAL */
151 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
152 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
153 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
154 	    CRYPTO_KEYSIZE_UNIT_IN_BITS}
155 };
156 
157 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
158 
159 static crypto_control_ops_t sha2_control_ops = {
160 	sha2_provider_status
161 };
162 
163 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
164     crypto_req_handle_t);
165 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
166     crypto_req_handle_t);
167 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
168     crypto_req_handle_t);
169 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
170     crypto_req_handle_t);
171 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
172     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
173     crypto_req_handle_t);
174 
175 static crypto_digest_ops_t sha2_digest_ops = {
176 	sha2_digest_init,
177 	sha2_digest,
178 	sha2_digest_update,
179 	NULL,
180 	sha2_digest_final,
181 	sha2_digest_atomic
182 };
183 
184 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
185     crypto_spi_ctx_template_t, crypto_req_handle_t);
186 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
187     crypto_req_handle_t);
188 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
189 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
190     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
191     crypto_spi_ctx_template_t, crypto_req_handle_t);
192 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
193     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
194     crypto_spi_ctx_template_t, crypto_req_handle_t);
195 
196 static crypto_mac_ops_t sha2_mac_ops = {
197 	sha2_mac_init,
198 	NULL,
199 	sha2_mac_update,
200 	sha2_mac_final,
201 	sha2_mac_atomic,
202 	sha2_mac_verify_atomic
203 };
204 
205 static int sha2_create_ctx_template(crypto_provider_handle_t,
206     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
207     size_t *, crypto_req_handle_t);
208 static int sha2_free_context(crypto_ctx_t *);
209 
210 static crypto_ctx_ops_t sha2_ctx_ops = {
211 	sha2_create_ctx_template,
212 	sha2_free_context
213 };
214 
215 static crypto_ops_t sha2_crypto_ops = {
216 	&sha2_control_ops,
217 	&sha2_digest_ops,
218 	NULL,
219 	&sha2_mac_ops,
220 	NULL,
221 	NULL,
222 	NULL,
223 	NULL,
224 	NULL,
225 	NULL,
226 	NULL,
227 	NULL,
228 	NULL,
229 	&sha2_ctx_ops
230 };
231 
232 static crypto_provider_info_t sha2_prov_info = {
233 	CRYPTO_SPI_VERSION_1,
234 	"SHA2 Software Provider",
235 	CRYPTO_SW_PROVIDER,
236 	{&modlinkage},
237 	NULL,
238 	&sha2_crypto_ops,
239 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
240 	sha2_mech_info_tab
241 };
242 
243 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
244 
245 int
246 _init()
247 {
248 	int ret;
249 
250 	if ((ret = mod_install(&modlinkage)) != 0)
251 		return (ret);
252 
253 	/*
254 	 * Register with KCF. If the registration fails, log an
255 	 * error but do not uninstall the module, since the functionality
256 	 * provided by misc/sha2 should still be available.
257 	 */
258 	if ((ret = crypto_register_provider(&sha2_prov_info,
259 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
260 		cmn_err(CE_WARN, "sha2 _init: "
261 		    "crypto_register_provider() failed (0x%x)", ret);
262 
263 	return (0);
264 }
265 
266 int
267 _info(struct modinfo *modinfop)
268 {
269 	return (mod_info(&modlinkage, modinfop));
270 }
271 
272 /*
273  * KCF software provider control entry points.
274  */
275 /* ARGSUSED */
276 static void
277 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
278 {
279 	*status = CRYPTO_PROVIDER_READY;
280 }
281 
282 /*
283  * KCF software provider digest entry points.
284  */
285 
286 static int
287 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
288     crypto_req_handle_t req)
289 {
290 
291 	/*
292 	 * Allocate and initialize SHA2 context.
293 	 */
294 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
295 	    crypto_kmflag(req));
296 	if (ctx->cc_provider_private == NULL)
297 		return (CRYPTO_HOST_MEMORY);
298 
299 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
300 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
301 
302 	return (CRYPTO_SUCCESS);
303 }
304 
305 /*
306  * Helper SHA2 digest update function for uio data.
307  */
308 static int
309 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
310 {
311 	off_t offset = data->cd_offset;
312 	size_t length = data->cd_length;
313 	uint_t vec_idx;
314 	size_t cur_len;
315 
316 	/* we support only kernel buffer */
317 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
318 		return (CRYPTO_ARGUMENTS_BAD);
319 
320 	/*
321 	 * Jump to the first iovec containing data to be
322 	 * digested.
323 	 */
324 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
325 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
326 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
327 		;
328 	if (vec_idx == data->cd_uio->uio_iovcnt) {
329 		/*
330 		 * The caller specified an offset that is larger than the
331 		 * total size of the buffers it provided.
332 		 */
333 		return (CRYPTO_DATA_LEN_RANGE);
334 	}
335 
336 	/*
337 	 * Now do the digesting on the iovecs.
338 	 */
339 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
340 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
341 		    offset, length);
342 
343 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
344 		    uio_iov[vec_idx].iov_base + offset, cur_len);
345 		length -= cur_len;
346 		vec_idx++;
347 		offset = 0;
348 	}
349 
350 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
351 		/*
352 		 * The end of the specified iovec's was reached but
353 		 * the length requested could not be processed, i.e.
354 		 * The caller requested to digest more data than it provided.
355 		 */
356 		return (CRYPTO_DATA_LEN_RANGE);
357 	}
358 
359 	return (CRYPTO_SUCCESS);
360 }
361 
362 /*
363  * Helper SHA2 digest final function for uio data.
364  * digest_len is the length of the desired digest. If digest_len
365  * is smaller than the default SHA2 digest length, the caller
366  * must pass a scratch buffer, digest_scratch, which must
367  * be at least the algorithm's digest length bytes.
368  */
369 static int
370 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
371     ulong_t digest_len, uchar_t *digest_scratch)
372 {
373 	off_t offset = digest->cd_offset;
374 	uint_t vec_idx;
375 
376 	/* we support only kernel buffer */
377 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
378 		return (CRYPTO_ARGUMENTS_BAD);
379 
380 	/*
381 	 * Jump to the first iovec containing ptr to the digest to
382 	 * be returned.
383 	 */
384 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
385 	    vec_idx < digest->cd_uio->uio_iovcnt;
386 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
387 		;
388 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
389 		/*
390 		 * The caller specified an offset that is
391 		 * larger than the total size of the buffers
392 		 * it provided.
393 		 */
394 		return (CRYPTO_DATA_LEN_RANGE);
395 	}
396 
397 	if (offset + digest_len <=
398 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
399 		/*
400 		 * The computed SHA2 digest will fit in the current
401 		 * iovec.
402 		 */
403 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
404 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
405 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
406 		    (digest_len != SHA512_DIGEST_LENGTH))) {
407 			/*
408 			 * The caller requested a short digest. Digest
409 			 * into a scratch buffer and return to
410 			 * the user only what was requested.
411 			 */
412 			SHA2Final(digest_scratch, sha2_ctx);
413 
414 			bcopy(digest_scratch, (uchar_t *)digest->
415 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
416 			    digest_len);
417 		} else {
418 			SHA2Final((uchar_t *)digest->
419 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
420 			    sha2_ctx);
421 
422 		}
423 	} else {
424 		/*
425 		 * The computed digest will be crossing one or more iovec's.
426 		 * This is bad performance-wise but we need to support it.
427 		 * Allocate a small scratch buffer on the stack and
428 		 * copy it piece meal to the specified digest iovec's.
429 		 */
430 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
431 		off_t scratch_offset = 0;
432 		size_t length = digest_len;
433 		size_t cur_len;
434 
435 		SHA2Final(digest_tmp, sha2_ctx);
436 
437 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
438 			cur_len =
439 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
440 			    offset, length);
441 			bcopy(digest_tmp + scratch_offset,
442 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
443 			    cur_len);
444 
445 			length -= cur_len;
446 			vec_idx++;
447 			scratch_offset += cur_len;
448 			offset = 0;
449 		}
450 
451 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
452 			/*
453 			 * The end of the specified iovec's was reached but
454 			 * the length requested could not be processed, i.e.
455 			 * The caller requested to digest more data than it
456 			 * provided.
457 			 */
458 			return (CRYPTO_DATA_LEN_RANGE);
459 		}
460 	}
461 
462 	return (CRYPTO_SUCCESS);
463 }
464 
465 /*
466  * Helper SHA2 digest update for mblk's.
467  */
468 static int
469 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
470 {
471 	off_t offset = data->cd_offset;
472 	size_t length = data->cd_length;
473 	mblk_t *mp;
474 	size_t cur_len;
475 
476 	/*
477 	 * Jump to the first mblk_t containing data to be digested.
478 	 */
479 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
480 	    offset -= MBLKL(mp), mp = mp->b_cont)
481 		;
482 	if (mp == NULL) {
483 		/*
484 		 * The caller specified an offset that is larger than the
485 		 * total size of the buffers it provided.
486 		 */
487 		return (CRYPTO_DATA_LEN_RANGE);
488 	}
489 
490 	/*
491 	 * Now do the digesting on the mblk chain.
492 	 */
493 	while (mp != NULL && length > 0) {
494 		cur_len = MIN(MBLKL(mp) - offset, length);
495 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
496 		length -= cur_len;
497 		offset = 0;
498 		mp = mp->b_cont;
499 	}
500 
501 	if (mp == NULL && length > 0) {
502 		/*
503 		 * The end of the mblk was reached but the length requested
504 		 * could not be processed, i.e. The caller requested
505 		 * to digest more data than it provided.
506 		 */
507 		return (CRYPTO_DATA_LEN_RANGE);
508 	}
509 
510 	return (CRYPTO_SUCCESS);
511 }
512 
513 /*
514  * Helper SHA2 digest final for mblk's.
515  * digest_len is the length of the desired digest. If digest_len
516  * is smaller than the default SHA2 digest length, the caller
517  * must pass a scratch buffer, digest_scratch, which must
518  * be at least the algorithm's digest length bytes.
519  */
520 static int
521 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
522     ulong_t digest_len, uchar_t *digest_scratch)
523 {
524 	off_t offset = digest->cd_offset;
525 	mblk_t *mp;
526 
527 	/*
528 	 * Jump to the first mblk_t that will be used to store the digest.
529 	 */
530 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
531 	    offset -= MBLKL(mp), mp = mp->b_cont)
532 		;
533 	if (mp == NULL) {
534 		/*
535 		 * The caller specified an offset that is larger than the
536 		 * total size of the buffers it provided.
537 		 */
538 		return (CRYPTO_DATA_LEN_RANGE);
539 	}
540 
541 	if (offset + digest_len <= MBLKL(mp)) {
542 		/*
543 		 * The computed SHA2 digest will fit in the current mblk.
544 		 * Do the SHA2Final() in-place.
545 		 */
546 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
547 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
548 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
549 		    (digest_len != SHA512_DIGEST_LENGTH))) {
550 			/*
551 			 * The caller requested a short digest. Digest
552 			 * into a scratch buffer and return to
553 			 * the user only what was requested.
554 			 */
555 			SHA2Final(digest_scratch, sha2_ctx);
556 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
557 		} else {
558 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
559 		}
560 	} else {
561 		/*
562 		 * The computed digest will be crossing one or more mblk's.
563 		 * This is bad performance-wise but we need to support it.
564 		 * Allocate a small scratch buffer on the stack and
565 		 * copy it piece meal to the specified digest iovec's.
566 		 */
567 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
568 		off_t scratch_offset = 0;
569 		size_t length = digest_len;
570 		size_t cur_len;
571 
572 		SHA2Final(digest_tmp, sha2_ctx);
573 
574 		while (mp != NULL && length > 0) {
575 			cur_len = MIN(MBLKL(mp) - offset, length);
576 			bcopy(digest_tmp + scratch_offset,
577 			    mp->b_rptr + offset, cur_len);
578 
579 			length -= cur_len;
580 			mp = mp->b_cont;
581 			scratch_offset += cur_len;
582 			offset = 0;
583 		}
584 
585 		if (mp == NULL && length > 0) {
586 			/*
587 			 * The end of the specified mblk was reached but
588 			 * the length requested could not be processed, i.e.
589 			 * The caller requested to digest more data than it
590 			 * provided.
591 			 */
592 			return (CRYPTO_DATA_LEN_RANGE);
593 		}
594 	}
595 
596 	return (CRYPTO_SUCCESS);
597 }
598 
599 /* ARGSUSED */
600 static int
601 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
602     crypto_req_handle_t req)
603 {
604 	int ret = CRYPTO_SUCCESS;
605 	uint_t sha_digest_len;
606 
607 	ASSERT(ctx->cc_provider_private != NULL);
608 
609 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
610 	case SHA256_MECH_INFO_TYPE:
611 		sha_digest_len = SHA256_DIGEST_LENGTH;
612 		break;
613 	case SHA384_MECH_INFO_TYPE:
614 		sha_digest_len = SHA384_DIGEST_LENGTH;
615 		break;
616 	case SHA512_MECH_INFO_TYPE:
617 		sha_digest_len = SHA512_DIGEST_LENGTH;
618 		break;
619 	default:
620 		return (CRYPTO_MECHANISM_INVALID);
621 	}
622 
623 	/*
624 	 * We need to just return the length needed to store the output.
625 	 * We should not destroy the context for the following cases.
626 	 */
627 	if ((digest->cd_length == 0) ||
628 	    (digest->cd_length < sha_digest_len)) {
629 		digest->cd_length = sha_digest_len;
630 		return (CRYPTO_BUFFER_TOO_SMALL);
631 	}
632 
633 	/*
634 	 * Do the SHA2 update on the specified input data.
635 	 */
636 	switch (data->cd_format) {
637 	case CRYPTO_DATA_RAW:
638 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
639 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
640 		    data->cd_length);
641 		break;
642 	case CRYPTO_DATA_UIO:
643 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
644 		    data);
645 		break;
646 	case CRYPTO_DATA_MBLK:
647 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
648 		    data);
649 		break;
650 	default:
651 		ret = CRYPTO_ARGUMENTS_BAD;
652 	}
653 
654 	if (ret != CRYPTO_SUCCESS) {
655 		/* the update failed, free context and bail */
656 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
657 		ctx->cc_provider_private = NULL;
658 		digest->cd_length = 0;
659 		return (ret);
660 	}
661 
662 	/*
663 	 * Do a SHA2 final, must be done separately since the digest
664 	 * type can be different than the input data type.
665 	 */
666 	switch (digest->cd_format) {
667 	case CRYPTO_DATA_RAW:
668 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
669 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
670 		break;
671 	case CRYPTO_DATA_UIO:
672 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
673 		    digest, sha_digest_len, NULL);
674 		break;
675 	case CRYPTO_DATA_MBLK:
676 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
677 		    digest, sha_digest_len, NULL);
678 		break;
679 	default:
680 		ret = CRYPTO_ARGUMENTS_BAD;
681 	}
682 
683 	/* all done, free context and return */
684 
685 	if (ret == CRYPTO_SUCCESS)
686 		digest->cd_length = sha_digest_len;
687 	else
688 		digest->cd_length = 0;
689 
690 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
691 	ctx->cc_provider_private = NULL;
692 	return (ret);
693 }
694 
695 /* ARGSUSED */
696 static int
697 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
698     crypto_req_handle_t req)
699 {
700 	int ret = CRYPTO_SUCCESS;
701 
702 	ASSERT(ctx->cc_provider_private != NULL);
703 
704 	/*
705 	 * Do the SHA2 update on the specified input data.
706 	 */
707 	switch (data->cd_format) {
708 	case CRYPTO_DATA_RAW:
709 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
710 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
711 		    data->cd_length);
712 		break;
713 	case CRYPTO_DATA_UIO:
714 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
715 		    data);
716 		break;
717 	case CRYPTO_DATA_MBLK:
718 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
719 		    data);
720 		break;
721 	default:
722 		ret = CRYPTO_ARGUMENTS_BAD;
723 	}
724 
725 	return (ret);
726 }
727 
728 /* ARGSUSED */
729 static int
730 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
731     crypto_req_handle_t req)
732 {
733 	int ret = CRYPTO_SUCCESS;
734 	uint_t sha_digest_len;
735 
736 	ASSERT(ctx->cc_provider_private != NULL);
737 
738 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
739 	case SHA256_MECH_INFO_TYPE:
740 		sha_digest_len = SHA256_DIGEST_LENGTH;
741 		break;
742 	case SHA384_MECH_INFO_TYPE:
743 		sha_digest_len = SHA384_DIGEST_LENGTH;
744 		break;
745 	case SHA512_MECH_INFO_TYPE:
746 		sha_digest_len = SHA512_DIGEST_LENGTH;
747 		break;
748 	default:
749 		return (CRYPTO_MECHANISM_INVALID);
750 	}
751 
752 	/*
753 	 * We need to just return the length needed to store the output.
754 	 * We should not destroy the context for the following cases.
755 	 */
756 	if ((digest->cd_length == 0) ||
757 	    (digest->cd_length < sha_digest_len)) {
758 		digest->cd_length = sha_digest_len;
759 		return (CRYPTO_BUFFER_TOO_SMALL);
760 	}
761 
762 	/*
763 	 * Do a SHA2 final.
764 	 */
765 	switch (digest->cd_format) {
766 	case CRYPTO_DATA_RAW:
767 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
768 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
769 		break;
770 	case CRYPTO_DATA_UIO:
771 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
772 		    digest, sha_digest_len, NULL);
773 		break;
774 	case CRYPTO_DATA_MBLK:
775 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
776 		    digest, sha_digest_len, NULL);
777 		break;
778 	default:
779 		ret = CRYPTO_ARGUMENTS_BAD;
780 	}
781 
782 	/* all done, free context and return */
783 
784 	if (ret == CRYPTO_SUCCESS)
785 		digest->cd_length = sha_digest_len;
786 	else
787 		digest->cd_length = 0;
788 
789 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
790 	ctx->cc_provider_private = NULL;
791 
792 	return (ret);
793 }
794 
795 /* ARGSUSED */
796 static int
797 sha2_digest_atomic(crypto_provider_handle_t provider,
798     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
799     crypto_data_t *data, crypto_data_t *digest,
800     crypto_req_handle_t req)
801 {
802 	int ret = CRYPTO_SUCCESS;
803 	SHA2_CTX sha2_ctx;
804 	uint32_t sha_digest_len;
805 
806 	/*
807 	 * Do the SHA inits.
808 	 */
809 
810 	SHA2Init(mechanism->cm_type, &sha2_ctx);
811 
812 	switch (data->cd_format) {
813 	case CRYPTO_DATA_RAW:
814 		SHA2Update(&sha2_ctx, (uint8_t *)data->
815 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
816 		break;
817 	case CRYPTO_DATA_UIO:
818 		ret = sha2_digest_update_uio(&sha2_ctx, data);
819 		break;
820 	case CRYPTO_DATA_MBLK:
821 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
822 		break;
823 	default:
824 		ret = CRYPTO_ARGUMENTS_BAD;
825 	}
826 
827 	/*
828 	 * Do the SHA updates on the specified input data.
829 	 */
830 
831 	if (ret != CRYPTO_SUCCESS) {
832 		/* the update failed, bail */
833 		digest->cd_length = 0;
834 		return (ret);
835 	}
836 
837 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
838 		sha_digest_len = SHA256_DIGEST_LENGTH;
839 	else
840 		sha_digest_len = SHA512_DIGEST_LENGTH;
841 
842 	/*
843 	 * Do a SHA2 final, must be done separately since the digest
844 	 * type can be different than the input data type.
845 	 */
846 	switch (digest->cd_format) {
847 	case CRYPTO_DATA_RAW:
848 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
849 		    digest->cd_offset, &sha2_ctx);
850 		break;
851 	case CRYPTO_DATA_UIO:
852 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
853 		    sha_digest_len, NULL);
854 		break;
855 	case CRYPTO_DATA_MBLK:
856 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
857 		    sha_digest_len, NULL);
858 		break;
859 	default:
860 		ret = CRYPTO_ARGUMENTS_BAD;
861 	}
862 
863 	if (ret == CRYPTO_SUCCESS)
864 		digest->cd_length = sha_digest_len;
865 	else
866 		digest->cd_length = 0;
867 
868 	return (ret);
869 }
870 
871 /*
872  * KCF software provider mac entry points.
873  *
874  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
875  *
876  * Init:
877  * The initialization routine initializes what we denote
878  * as the inner and outer contexts by doing
879  * - for inner context: SHA2(key XOR ipad)
880  * - for outer context: SHA2(key XOR opad)
881  *
882  * Update:
883  * Each subsequent SHA2 HMAC update will result in an
884  * update of the inner context with the specified data.
885  *
886  * Final:
887  * The SHA2 HMAC final will do a SHA2 final operation on the
888  * inner context, and the resulting digest will be used
889  * as the data for an update on the outer context. Last
890  * but not least, a SHA2 final on the outer context will
891  * be performed to obtain the SHA2 HMAC digest to return
892  * to the user.
893  */
894 
895 /*
896  * Initialize a SHA2-HMAC context.
897  */
898 static void
899 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
900 {
901 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
902 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
903 	int i, block_size, blocks_per_int64;
904 
905 	/* Determine the block size */
906 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
907 		block_size = SHA256_HMAC_BLOCK_SIZE;
908 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
909 	} else {
910 		block_size = SHA512_HMAC_BLOCK_SIZE;
911 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
912 	}
913 
914 	(void) bzero(ipad, block_size);
915 	(void) bzero(opad, block_size);
916 	(void) bcopy(keyval, ipad, length_in_bytes);
917 	(void) bcopy(keyval, opad, length_in_bytes);
918 
919 	/* XOR key with ipad (0x36) and opad (0x5c) */
920 	for (i = 0; i < blocks_per_int64; i ++) {
921 		ipad[i] ^= 0x3636363636363636;
922 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
923 	}
924 
925 	/* perform SHA2 on ipad */
926 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
927 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
928 
929 	/* perform SHA2 on opad */
930 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
931 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
932 
933 }
934 
935 /*
936  */
937 static int
938 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
939     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
940     crypto_req_handle_t req)
941 {
942 	int ret = CRYPTO_SUCCESS;
943 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
944 	uint_t sha_digest_len, sha_hmac_block_size;
945 
946 	/*
947 	 * Set the digest length and block size to values approriate to the
948 	 * mechanism
949 	 */
950 	switch (mechanism->cm_type) {
951 	case SHA256_HMAC_MECH_INFO_TYPE:
952 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
953 		sha_digest_len = SHA256_DIGEST_LENGTH;
954 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
955 		break;
956 	case SHA384_HMAC_MECH_INFO_TYPE:
957 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
958 	case SHA512_HMAC_MECH_INFO_TYPE:
959 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
960 		sha_digest_len = SHA512_DIGEST_LENGTH;
961 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
962 		break;
963 	default:
964 		return (CRYPTO_MECHANISM_INVALID);
965 	}
966 
967 	if (key->ck_format != CRYPTO_KEY_RAW)
968 		return (CRYPTO_ARGUMENTS_BAD);
969 
970 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
971 	    crypto_kmflag(req));
972 	if (ctx->cc_provider_private == NULL)
973 		return (CRYPTO_HOST_MEMORY);
974 
975 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
976 	if (ctx_template != NULL) {
977 		/* reuse context template */
978 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
979 		    sizeof (sha2_hmac_ctx_t));
980 	} else {
981 		/* no context template, compute context */
982 		if (keylen_in_bytes > sha_hmac_block_size) {
983 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
984 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
985 
986 			/*
987 			 * Hash the passed-in key to get a smaller key.
988 			 * The inner context is used since it hasn't been
989 			 * initialized yet.
990 			 */
991 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
992 			    &hmac_ctx->hc_icontext,
993 			    key->ck_data, keylen_in_bytes, digested_key);
994 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
995 			    digested_key, sha_digest_len);
996 		} else {
997 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
998 			    key->ck_data, keylen_in_bytes);
999 		}
1000 	}
1001 
1002 	/*
1003 	 * Get the mechanism parameters, if applicable.
1004 	 */
1005 	if (mechanism->cm_type % 3 == 2) {
1006 		if (mechanism->cm_param == NULL ||
1007 		    mechanism->cm_param_len != sizeof (ulong_t))
1008 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1009 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1010 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1011 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1012 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1013 	}
1014 
1015 	if (ret != CRYPTO_SUCCESS) {
1016 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1017 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1018 		ctx->cc_provider_private = NULL;
1019 	}
1020 
1021 	return (ret);
1022 }
1023 
1024 /* ARGSUSED */
1025 static int
1026 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1027     crypto_req_handle_t req)
1028 {
1029 	int ret = CRYPTO_SUCCESS;
1030 
1031 	ASSERT(ctx->cc_provider_private != NULL);
1032 
1033 	/*
1034 	 * Do a SHA2 update of the inner context using the specified
1035 	 * data.
1036 	 */
1037 	switch (data->cd_format) {
1038 	case CRYPTO_DATA_RAW:
1039 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1040 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1041 		    data->cd_length);
1042 		break;
1043 	case CRYPTO_DATA_UIO:
1044 		ret = sha2_digest_update_uio(
1045 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1046 		break;
1047 	case CRYPTO_DATA_MBLK:
1048 		ret = sha2_digest_update_mblk(
1049 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1050 		break;
1051 	default:
1052 		ret = CRYPTO_ARGUMENTS_BAD;
1053 	}
1054 
1055 	return (ret);
1056 }
1057 
1058 /* ARGSUSED */
1059 static int
1060 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1061 {
1062 	int ret = CRYPTO_SUCCESS;
1063 	uchar_t digest[SHA512_DIGEST_LENGTH];
1064 	uint32_t digest_len, sha_digest_len;
1065 
1066 	ASSERT(ctx->cc_provider_private != NULL);
1067 
1068 	/* Set the digest lengths to values approriate to the mechanism */
1069 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1070 	case SHA256_HMAC_MECH_INFO_TYPE:
1071 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1072 		break;
1073 	case SHA384_HMAC_MECH_INFO_TYPE:
1074 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1075 		break;
1076 	case SHA512_HMAC_MECH_INFO_TYPE:
1077 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1078 		break;
1079 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1080 		sha_digest_len = SHA256_DIGEST_LENGTH;
1081 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1082 		break;
1083 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1084 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1085 		sha_digest_len = SHA512_DIGEST_LENGTH;
1086 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1087 		break;
1088 	}
1089 
1090 	/*
1091 	 * We need to just return the length needed to store the output.
1092 	 * We should not destroy the context for the following cases.
1093 	 */
1094 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1095 		mac->cd_length = digest_len;
1096 		return (CRYPTO_BUFFER_TOO_SMALL);
1097 	}
1098 
1099 	/*
1100 	 * Do a SHA2 final on the inner context.
1101 	 */
1102 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1103 
1104 	/*
1105 	 * Do a SHA2 update on the outer context, feeding the inner
1106 	 * digest as data.
1107 	 */
1108 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1109 	    sha_digest_len);
1110 
1111 	/*
1112 	 * Do a SHA2 final on the outer context, storing the computing
1113 	 * digest in the users buffer.
1114 	 */
1115 	switch (mac->cd_format) {
1116 	case CRYPTO_DATA_RAW:
1117 		if (digest_len != sha_digest_len) {
1118 			/*
1119 			 * The caller requested a short digest. Digest
1120 			 * into a scratch buffer and return to
1121 			 * the user only what was requested.
1122 			 */
1123 			SHA2Final(digest,
1124 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1125 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1126 			    mac->cd_offset, digest_len);
1127 		} else {
1128 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1129 			    mac->cd_offset,
1130 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1131 		}
1132 		break;
1133 	case CRYPTO_DATA_UIO:
1134 		ret = sha2_digest_final_uio(
1135 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1136 		    digest_len, digest);
1137 		break;
1138 	case CRYPTO_DATA_MBLK:
1139 		ret = sha2_digest_final_mblk(
1140 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1141 		    digest_len, digest);
1142 		break;
1143 	default:
1144 		ret = CRYPTO_ARGUMENTS_BAD;
1145 	}
1146 
1147 	if (ret == CRYPTO_SUCCESS)
1148 		mac->cd_length = digest_len;
1149 	else
1150 		mac->cd_length = 0;
1151 
1152 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1153 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1154 	ctx->cc_provider_private = NULL;
1155 
1156 	return (ret);
1157 }
1158 
1159 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1160 	switch (data->cd_format) {					\
1161 	case CRYPTO_DATA_RAW:						\
1162 		SHA2Update(&(ctx).hc_icontext,				\
1163 		    (uint8_t *)data->cd_raw.iov_base +			\
1164 		    data->cd_offset, data->cd_length);			\
1165 		break;							\
1166 	case CRYPTO_DATA_UIO:						\
1167 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1168 		break;							\
1169 	case CRYPTO_DATA_MBLK:						\
1170 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1171 		    data);						\
1172 		break;							\
1173 	default:							\
1174 		ret = CRYPTO_ARGUMENTS_BAD;				\
1175 	}								\
1176 }
1177 
1178 /* ARGSUSED */
1179 static int
1180 sha2_mac_atomic(crypto_provider_handle_t provider,
1181     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1182     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1183     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1184 {
1185 	int ret = CRYPTO_SUCCESS;
1186 	uchar_t digest[SHA512_DIGEST_LENGTH];
1187 	sha2_hmac_ctx_t sha2_hmac_ctx;
1188 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1189 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1190 
1191 	/*
1192 	 * Set the digest length and block size to values approriate to the
1193 	 * mechanism
1194 	 */
1195 	switch (mechanism->cm_type) {
1196 	case SHA256_HMAC_MECH_INFO_TYPE:
1197 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1198 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1199 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1200 		break;
1201 	case SHA384_HMAC_MECH_INFO_TYPE:
1202 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1203 	case SHA512_HMAC_MECH_INFO_TYPE:
1204 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1205 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1206 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1207 		break;
1208 	default:
1209 		return (CRYPTO_MECHANISM_INVALID);
1210 	}
1211 
1212 	/* Add support for key by attributes (RFE 4706552) */
1213 	if (key->ck_format != CRYPTO_KEY_RAW)
1214 		return (CRYPTO_ARGUMENTS_BAD);
1215 
1216 	if (ctx_template != NULL) {
1217 		/* reuse context template */
1218 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1219 	} else {
1220 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1221 		/* no context template, initialize context */
1222 		if (keylen_in_bytes > sha_hmac_block_size) {
1223 			/*
1224 			 * Hash the passed-in key to get a smaller key.
1225 			 * The inner context is used since it hasn't been
1226 			 * initialized yet.
1227 			 */
1228 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1229 			    &sha2_hmac_ctx.hc_icontext,
1230 			    key->ck_data, keylen_in_bytes, digest);
1231 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1232 			    sha_digest_len);
1233 		} else {
1234 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1235 			    keylen_in_bytes);
1236 		}
1237 	}
1238 
1239 	/* get the mechanism parameters, if applicable */
1240 	if ((mechanism->cm_type % 3) == 2) {
1241 		if (mechanism->cm_param == NULL ||
1242 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1243 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1244 			goto bail;
1245 		}
1246 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1247 		if (digest_len > sha_digest_len) {
1248 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1249 			goto bail;
1250 		}
1251 	}
1252 
1253 	/* do a SHA2 update of the inner context using the specified data */
1254 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1255 	if (ret != CRYPTO_SUCCESS)
1256 		/* the update failed, free context and bail */
1257 		goto bail;
1258 
1259 	/*
1260 	 * Do a SHA2 final on the inner context.
1261 	 */
1262 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1263 
1264 	/*
1265 	 * Do an SHA2 update on the outer context, feeding the inner
1266 	 * digest as data.
1267 	 *
1268 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1269 	 * bytes of the inner hash value.
1270 	 */
1271 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1272 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1273 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1274 		    SHA384_DIGEST_LENGTH);
1275 	else
1276 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1277 
1278 	/*
1279 	 * Do a SHA2 final on the outer context, storing the computed
1280 	 * digest in the users buffer.
1281 	 */
1282 	switch (mac->cd_format) {
1283 	case CRYPTO_DATA_RAW:
1284 		if (digest_len != sha_digest_len) {
1285 			/*
1286 			 * The caller requested a short digest. Digest
1287 			 * into a scratch buffer and return to
1288 			 * the user only what was requested.
1289 			 */
1290 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1291 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1292 			    mac->cd_offset, digest_len);
1293 		} else {
1294 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1295 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1296 		}
1297 		break;
1298 	case CRYPTO_DATA_UIO:
1299 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1300 		    digest_len, digest);
1301 		break;
1302 	case CRYPTO_DATA_MBLK:
1303 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1304 		    digest_len, digest);
1305 		break;
1306 	default:
1307 		ret = CRYPTO_ARGUMENTS_BAD;
1308 	}
1309 
1310 	if (ret == CRYPTO_SUCCESS) {
1311 		mac->cd_length = digest_len;
1312 		return (CRYPTO_SUCCESS);
1313 	}
1314 bail:
1315 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1316 	mac->cd_length = 0;
1317 	return (ret);
1318 }
1319 
1320 /* ARGSUSED */
1321 static int
1322 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1323     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1324     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1325     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1326 {
1327 	int ret = CRYPTO_SUCCESS;
1328 	uchar_t digest[SHA512_DIGEST_LENGTH];
1329 	sha2_hmac_ctx_t sha2_hmac_ctx;
1330 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1331 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1332 
1333 	/*
1334 	 * Set the digest length and block size to values approriate to the
1335 	 * mechanism
1336 	 */
1337 	switch (mechanism->cm_type) {
1338 	case SHA256_HMAC_MECH_INFO_TYPE:
1339 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1340 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1341 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1342 		break;
1343 	case SHA384_HMAC_MECH_INFO_TYPE:
1344 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1345 	case SHA512_HMAC_MECH_INFO_TYPE:
1346 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1347 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1348 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1349 		break;
1350 	default:
1351 		return (CRYPTO_MECHANISM_INVALID);
1352 	}
1353 
1354 	/* Add support for key by attributes (RFE 4706552) */
1355 	if (key->ck_format != CRYPTO_KEY_RAW)
1356 		return (CRYPTO_ARGUMENTS_BAD);
1357 
1358 	if (ctx_template != NULL) {
1359 		/* reuse context template */
1360 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1361 	} else {
1362 		/* no context template, initialize context */
1363 		if (keylen_in_bytes > sha_hmac_block_size) {
1364 			/*
1365 			 * Hash the passed-in key to get a smaller key.
1366 			 * The inner context is used since it hasn't been
1367 			 * initialized yet.
1368 			 */
1369 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1370 			    &sha2_hmac_ctx.hc_icontext,
1371 			    key->ck_data, keylen_in_bytes, digest);
1372 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1373 			    sha_digest_len);
1374 		} else {
1375 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1376 			    keylen_in_bytes);
1377 		}
1378 	}
1379 
1380 	/* get the mechanism parameters, if applicable */
1381 	if (mechanism->cm_type % 3 == 2) {
1382 		if (mechanism->cm_param == NULL ||
1383 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1384 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1385 			goto bail;
1386 		}
1387 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1388 		if (digest_len > sha_digest_len) {
1389 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1390 			goto bail;
1391 		}
1392 	}
1393 
1394 	if (mac->cd_length != digest_len) {
1395 		ret = CRYPTO_INVALID_MAC;
1396 		goto bail;
1397 	}
1398 
1399 	/* do a SHA2 update of the inner context using the specified data */
1400 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1401 	if (ret != CRYPTO_SUCCESS)
1402 		/* the update failed, free context and bail */
1403 		goto bail;
1404 
1405 	/* do a SHA2 final on the inner context */
1406 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1407 
1408 	/*
1409 	 * Do an SHA2 update on the outer context, feeding the inner
1410 	 * digest as data.
1411 	 *
1412 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1413 	 * bytes of the inner hash value.
1414 	 */
1415 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1416 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1417 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1418 		    SHA384_DIGEST_LENGTH);
1419 	else
1420 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1421 
1422 	/*
1423 	 * Do a SHA2 final on the outer context, storing the computed
1424 	 * digest in the users buffer.
1425 	 */
1426 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1427 
1428 	/*
1429 	 * Compare the computed digest against the expected digest passed
1430 	 * as argument.
1431 	 */
1432 
1433 	switch (mac->cd_format) {
1434 
1435 	case CRYPTO_DATA_RAW:
1436 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1437 		    mac->cd_offset, digest_len) != 0)
1438 			ret = CRYPTO_INVALID_MAC;
1439 		break;
1440 
1441 	case CRYPTO_DATA_UIO: {
1442 		off_t offset = mac->cd_offset;
1443 		uint_t vec_idx;
1444 		off_t scratch_offset = 0;
1445 		size_t length = digest_len;
1446 		size_t cur_len;
1447 
1448 		/* we support only kernel buffer */
1449 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1450 			return (CRYPTO_ARGUMENTS_BAD);
1451 
1452 		/* jump to the first iovec containing the expected digest */
1453 		for (vec_idx = 0;
1454 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1455 		    vec_idx < mac->cd_uio->uio_iovcnt;
1456 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1457 			;
1458 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1459 			/*
1460 			 * The caller specified an offset that is
1461 			 * larger than the total size of the buffers
1462 			 * it provided.
1463 			 */
1464 			ret = CRYPTO_DATA_LEN_RANGE;
1465 			break;
1466 		}
1467 
1468 		/* do the comparison of computed digest vs specified one */
1469 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1470 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1471 			    offset, length);
1472 
1473 			if (bcmp(digest + scratch_offset,
1474 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1475 			    cur_len) != 0) {
1476 				ret = CRYPTO_INVALID_MAC;
1477 				break;
1478 			}
1479 
1480 			length -= cur_len;
1481 			vec_idx++;
1482 			scratch_offset += cur_len;
1483 			offset = 0;
1484 		}
1485 		break;
1486 	}
1487 
1488 	case CRYPTO_DATA_MBLK: {
1489 		off_t offset = mac->cd_offset;
1490 		mblk_t *mp;
1491 		off_t scratch_offset = 0;
1492 		size_t length = digest_len;
1493 		size_t cur_len;
1494 
1495 		/* jump to the first mblk_t containing the expected digest */
1496 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1497 		    offset -= MBLKL(mp), mp = mp->b_cont)
1498 			;
1499 		if (mp == NULL) {
1500 			/*
1501 			 * The caller specified an offset that is larger than
1502 			 * the total size of the buffers it provided.
1503 			 */
1504 			ret = CRYPTO_DATA_LEN_RANGE;
1505 			break;
1506 		}
1507 
1508 		while (mp != NULL && length > 0) {
1509 			cur_len = MIN(MBLKL(mp) - offset, length);
1510 			if (bcmp(digest + scratch_offset,
1511 			    mp->b_rptr + offset, cur_len) != 0) {
1512 				ret = CRYPTO_INVALID_MAC;
1513 				break;
1514 			}
1515 
1516 			length -= cur_len;
1517 			mp = mp->b_cont;
1518 			scratch_offset += cur_len;
1519 			offset = 0;
1520 		}
1521 		break;
1522 	}
1523 
1524 	default:
1525 		ret = CRYPTO_ARGUMENTS_BAD;
1526 	}
1527 
1528 	return (ret);
1529 bail:
1530 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1531 	mac->cd_length = 0;
1532 	return (ret);
1533 }
1534 
1535 /*
1536  * KCF software provider context management entry points.
1537  */
1538 
1539 /* ARGSUSED */
1540 static int
1541 sha2_create_ctx_template(crypto_provider_handle_t provider,
1542     crypto_mechanism_t *mechanism, crypto_key_t *key,
1543     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1544     crypto_req_handle_t req)
1545 {
1546 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1547 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1548 	uint32_t sha_digest_len, sha_hmac_block_size;
1549 
1550 	/*
1551 	 * Set the digest length and block size to values approriate to the
1552 	 * mechanism
1553 	 */
1554 	switch (mechanism->cm_type) {
1555 	case SHA256_HMAC_MECH_INFO_TYPE:
1556 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1557 		sha_digest_len = SHA256_DIGEST_LENGTH;
1558 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1559 		break;
1560 	case SHA384_HMAC_MECH_INFO_TYPE:
1561 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1562 	case SHA512_HMAC_MECH_INFO_TYPE:
1563 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1564 		sha_digest_len = SHA512_DIGEST_LENGTH;
1565 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1566 		break;
1567 	default:
1568 		return (CRYPTO_MECHANISM_INVALID);
1569 	}
1570 
1571 	/* Add support for key by attributes (RFE 4706552) */
1572 	if (key->ck_format != CRYPTO_KEY_RAW)
1573 		return (CRYPTO_ARGUMENTS_BAD);
1574 
1575 	/*
1576 	 * Allocate and initialize SHA2 context.
1577 	 */
1578 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1579 	    crypto_kmflag(req));
1580 	if (sha2_hmac_ctx_tmpl == NULL)
1581 		return (CRYPTO_HOST_MEMORY);
1582 
1583 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1584 
1585 	if (keylen_in_bytes > sha_hmac_block_size) {
1586 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1587 
1588 		/*
1589 		 * Hash the passed-in key to get a smaller key.
1590 		 * The inner context is used since it hasn't been
1591 		 * initialized yet.
1592 		 */
1593 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1594 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1595 		    key->ck_data, keylen_in_bytes, digested_key);
1596 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1597 		    sha_digest_len);
1598 	} else {
1599 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1600 		    keylen_in_bytes);
1601 	}
1602 
1603 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1604 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1605 
1606 	return (CRYPTO_SUCCESS);
1607 }
1608 
1609 static int
1610 sha2_free_context(crypto_ctx_t *ctx)
1611 {
1612 	uint_t ctx_len;
1613 
1614 	if (ctx->cc_provider_private == NULL)
1615 		return (CRYPTO_SUCCESS);
1616 
1617 	/*
1618 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1619 	 * have different lengths.
1620 	 *
1621 	 * Note: Below is dependent on the mechanism ordering.
1622 	 */
1623 
1624 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1625 		ctx_len = sizeof (sha2_ctx_t);
1626 	else
1627 		ctx_len = sizeof (sha2_hmac_ctx_t);
1628 
1629 	bzero(ctx->cc_provider_private, ctx_len);
1630 	kmem_free(ctx->cc_provider_private, ctx_len);
1631 	ctx->cc_provider_private = NULL;
1632 
1633 	return (CRYPTO_SUCCESS);
1634 }
1635