xref: /titanic_41/usr/src/common/crypto/sha2/sha2.c (revision 7c478bd95313f5f23a4c958a745db2134aa03244)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or http://www.opensolaris.org/os/licensing.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #pragma ident	"%Z%%M%	%I%	%E% SMI"
28 
29 
30 /*
31  * The basic framework for this code came from the reference
32  * implementation for MD5.  That implementation is Copyright (C)
33  * 1991-2, RSA Data Security, Inc. Created 1991. All rights reserved.
34  *
35  * License to copy and use this software is granted provided that it
36  * is identified as the "RSA Data Security, Inc. MD5 Message-Digest
37  * Algorithm" in all material mentioning or referencing this software
38  * or this function.
39  *
40  * License is also granted to make and use derivative works provided
41  * that such works are identified as "derived from the RSA Data
42  * Security, Inc. MD5 Message-Digest Algorithm" in all material
43  * mentioning or referencing the derived work.
44  *
45  * RSA Data Security, Inc. makes no representations concerning either
46  * the merchantability of this software or the suitability of this
47  * software for any particular purpose. It is provided "as is"
48  * without express or implied warranty of any kind.
49  *
50  * These notices must be retained in any copies of any part of this
51  * documentation and/or software.
52  *
53  * NOTE: Cleaned-up and optimized, version of SHA2, based on the FIPS 180-2
54  * standard, available at http://www.itl.nist.gov/div897/pubs/fip180-2.htm
55  * Not as fast as one would like -- further optimizations are encouraged
56  * and appreciated.
57  */
58 
59 #include <sys/types.h>
60 #include <sys/param.h>
61 #include <sys/systm.h>
62 #include <sys/sysmacros.h>
63 #include <sys/sha2.h>
64 #include <sys/sha2_consts.h>
65 
66 #ifdef _KERNEL
67 
68 #include <sys/modctl.h>
69 #include <sys/cmn_err.h>
70 #include <sys/crypto/common.h>
71 #include <sys/crypto/spi.h>
72 #include <sys/strsun.h>
73 
74 /*
75  * The sha2 module is created with two modlinkages:
76  * - a modlmisc that allows consumers to directly call the entry points
77  *   SHA2Init, SHA2Update, and SHA2Final.
78  * - a modlcrypto that allows the module to register with the Kernel
79  *   Cryptographic Framework (KCF) as a software provider for the SHA2
80  *   mechanisms.
81  */
82 
83 #else
84 
85 #include <strings.h>
86 #include <stdlib.h>
87 #include <errno.h>
88 
89 
90 #endif	/* !_KERNEL */
91 
92 static void Encode(uint8_t *, uint32_t *, size_t);
93 static void Encode64(uint8_t *, uint64_t *, size_t);
94 static void SHA256Transform(SHA2_CTX *, const uint8_t *);
95 static void SHA512Transform(SHA2_CTX *, const uint8_t *);
96 
97 static uint8_t PADDING[128] = { 0x80, /* all zeros */ };
98 
99 /* Ch and Maj are the basic SHA2 functions. */
100 #define	Ch(b, c, d)	(((b) & (c)) ^ ((~b) & (d)))
101 #define	Maj(b, c, d)	(((b) & (c)) ^ ((b) & (d)) ^ ((c) & (d)))
102 
103 /* Rotates x right n bits. */
104 #define	ROTR(x, n)	\
105 	(((x) >> (n)) | ((x) << ((sizeof (x) * NBBY)-(n))))
106 
107 /* Shift x right n bits */
108 #define	SHR(x, n)	((x) >> (n))
109 
110 /* SHA256 Functions */
111 #define	BIGSIGMA0_256(x)	(ROTR((x), 2) ^ ROTR((x), 13) ^ ROTR((x), 22))
112 #define	BIGSIGMA1_256(x)	(ROTR((x), 6) ^ ROTR((x), 11) ^ ROTR((x), 25))
113 #define	SIGMA0_256(x)		(ROTR((x), 7) ^ ROTR((x), 18) ^ SHR((x), 3))
114 #define	SIGMA1_256(x)		(ROTR((x), 17) ^ ROTR((x), 19) ^ SHR((x), 10))
115 
116 #define	SHA256ROUND(a, b, c, d, e, f, g, h, i, w)			\
117 	T1 = h + BIGSIGMA1_256(e) + Ch(e, f, g) + SHA256_CONST(i) + w;	\
118 	d += T1;							\
119 	T2 = BIGSIGMA0_256(a) + Maj(a, b, c);				\
120 	h = T1 + T2
121 
122 /* SHA384/512 Functions */
123 #define	BIGSIGMA0(x)	(ROTR((x), 28) ^ ROTR((x), 34) ^ ROTR((x), 39))
124 #define	BIGSIGMA1(x)	(ROTR((x), 14) ^ ROTR((x), 18) ^ ROTR((x), 41))
125 #define	SIGMA0(x)	(ROTR((x), 1) ^ ROTR((x), 8) ^ SHR((x), 7))
126 #define	SIGMA1(x)	(ROTR((x), 19) ^ ROTR((x), 61) ^ SHR((x), 6))
127 #define	SHA512ROUND(a, b, c, d, e, f, g, h, i, w)			\
128 	T1 = h + BIGSIGMA1(e) + Ch(e, f, g) + SHA512_CONST(i) + w;	\
129 	d += T1;							\
130 	T2 = BIGSIGMA0(a) + Maj(a, b, c);				\
131 	h = T1 + T2
132 
133 #ifdef _KERNEL
134 
135 static struct modlmisc modlmisc = {
136 	&mod_miscops,
137 	"SHA2 Message-Digest Algorithm"
138 };
139 
140 static struct modlcrypto modlcrypto = {
141 	&mod_cryptoops,
142 	"SHA2 Kernel SW Provider %I%"
143 };
144 
145 static struct modlinkage modlinkage = {
146 	MODREV_1, &modlmisc, &modlcrypto, NULL
147 };
148 
149 /*
150  * CSPI information (entry points, provider info, etc.)
151  */
152 
153 #endif /* _KERNEL */
154 
155 /*
156  * List of support mechanisms in this module.
157  *
158  * It is important to note that in the module, division or modulus calculations
159  * are used on the enumerated type to determine which mechanism is being used;
160  * therefore, changing the order or additional mechanisms should be done
161  * carefully
162  */
163 typedef enum sha2_mech_type {
164 	SHA256_MECH_INFO_TYPE,		/* SUN_CKM_SHA256 */
165 	SHA256_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC */
166 	SHA256_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC_GENERAL */
167 	SHA384_MECH_INFO_TYPE,		/* SUN_CKM_SHA384 */
168 	SHA384_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC */
169 	SHA384_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC_GENERAL */
170 	SHA512_MECH_INFO_TYPE,		/* SUN_CKM_SHA512 */
171 	SHA512_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA512_HMAC */
172 	SHA512_HMAC_GEN_MECH_INFO_TYPE	/* SUN_CKM_SHA512_HMAC_GENERAL */
173 } sha2_mech_type_t;
174 
175 #ifdef _KERNEL
176 
177 #define	SHA2_HMAC_MIN_KEY_LEN	8	/* SHA2-HMAC min key length in bits */
178 #define	SHA2_HMAC_MAX_KEY_LEN	INT_MAX /* SHA2-HMAC max key length in bits */
179 
180 #define	SHA256_DIGEST_LENGTH	32	/* SHA256 digest length in bytes */
181 #define	SHA384_DIGEST_LENGTH	48	/* SHA384 digest length in bytes */
182 #define	SHA512_DIGEST_LENGTH	64	/* SHA512 digest length in bytes */
183 
184 #define	SHA256_HMAC_BLOCK_SIZE	64	/* SHA256-HMAC block size */
185 #define	SHA512_HMAC_BLOCK_SIZE	128	/* SHA512-HMAC block size */
186 
187 /*
188  * Context for SHA2 mechanism.
189  */
190 typedef struct sha2_ctx {
191 	sha2_mech_type_t	sc_mech_type;	/* type of context */
192 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
193 } sha2_ctx_t;
194 
195 /*
196  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
197  */
198 typedef struct sha2_hmac_ctx {
199 	sha2_mech_type_t	hc_mech_type;	/* type of context */
200 	uint32_t		hc_digest_len;	/* digest len in bytes */
201 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
202 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
203 } sha2_hmac_ctx_t;
204 
205 /*
206  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
207  * by KCF to one of the entry points.
208  */
209 
210 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
211 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
212 
213 /* to extract the digest length passed as mechanism parameter */
214 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
215 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
216 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
217 	else {								\
218 		ulong_t tmp_ulong;					\
219 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
220 		(len) = (uint32_t)tmp_ulong;				\
221 	}								\
222 }
223 
224 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
225 	SHA2Init(mech, ctx);				\
226 	SHA2Update(ctx, key, len);			\
227 	SHA2Final(digest, ctx);				\
228 }
229 
230 /*
231  * Mechanism info structure passed to KCF during registration.
232  */
233 static crypto_mech_info_t sha2_mech_info_tab[] = {
234 	/* SHA256 */
235 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
236 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
237 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
238 	/* SHA256-HMAC */
239 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
240 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
241 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
242 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
243 	/* SHA256-HMAC GENERAL */
244 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
245 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
246 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
247 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
248 	/* SHA384 */
249 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
250 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
251 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
252 	/* SHA384-HMAC */
253 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
254 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
255 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
256 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
257 	/* SHA384-HMAC GENERAL */
258 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
259 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
260 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
261 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
262 	/* SHA512 */
263 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
264 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
265 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
266 	/* SHA512-HMAC */
267 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
268 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
269 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
270 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
271 	/* SHA512-HMAC GENERAL */
272 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
273 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
274 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
275 	    CRYPTO_KEYSIZE_UNIT_IN_BITS}
276 };
277 
278 void SHA2Init(uint64_t, SHA2_CTX *);
279 void SHA2Update(SHA2_CTX *, const uint8_t *, uint32_t);
280 void SHA2Final(uint8_t *, SHA2_CTX *);
281 
282 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
283 
284 static crypto_control_ops_t sha2_control_ops = {
285 	sha2_provider_status
286 };
287 
288 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
289     crypto_req_handle_t);
290 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
291     crypto_req_handle_t);
292 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
293     crypto_req_handle_t);
294 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
295     crypto_req_handle_t);
296 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
297     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
298     crypto_req_handle_t);
299 
300 static crypto_digest_ops_t sha2_digest_ops = {
301 	sha2_digest_init,
302 	sha2_digest,
303 	sha2_digest_update,
304 	NULL,
305 	sha2_digest_final,
306 	sha2_digest_atomic
307 };
308 
309 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
310     crypto_spi_ctx_template_t, crypto_req_handle_t);
311 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
312     crypto_req_handle_t);
313 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
314 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
315     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
316     crypto_spi_ctx_template_t, crypto_req_handle_t);
317 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
318     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
319     crypto_spi_ctx_template_t, crypto_req_handle_t);
320 
321 static crypto_mac_ops_t sha2_mac_ops = {
322 	sha2_mac_init,
323 	NULL,
324 	sha2_mac_update,
325 	sha2_mac_final,
326 	sha2_mac_atomic,
327 	sha2_mac_verify_atomic
328 };
329 
330 static int sha2_create_ctx_template(crypto_provider_handle_t,
331     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
332     size_t *, crypto_req_handle_t);
333 static int sha2_free_context(crypto_ctx_t *);
334 
335 static crypto_ctx_ops_t sha2_ctx_ops = {
336 	sha2_create_ctx_template,
337 	sha2_free_context
338 };
339 
340 static crypto_ops_t sha2_crypto_ops = {
341 	&sha2_control_ops,
342 	&sha2_digest_ops,
343 	NULL,
344 	&sha2_mac_ops,
345 	NULL,
346 	NULL,
347 	NULL,
348 	NULL,
349 	NULL,
350 	NULL,
351 	NULL,
352 	NULL,
353 	NULL,
354 	&sha2_ctx_ops
355 };
356 
357 static crypto_provider_info_t sha2_prov_info = {
358 	CRYPTO_SPI_VERSION_1,
359 	"SHA2 Software Provider",
360 	CRYPTO_SW_PROVIDER,
361 	{&modlinkage},
362 	NULL,
363 	&sha2_crypto_ops,
364 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
365 	sha2_mech_info_tab
366 };
367 
368 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
369 
370 int
371 _init()
372 {
373 	int ret;
374 
375 	if ((ret = mod_install(&modlinkage)) != 0)
376 		return (ret);
377 
378 	/*
379 	 * Register with KCF. If the registration fails, log an
380 	 * error but do not uninstall the module, since the functionality
381 	 * provided by misc/sha2 should still be available.
382 	 */
383 	if ((ret = crypto_register_provider(&sha2_prov_info,
384 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
385 		cmn_err(CE_WARN, "sha2 _init: "
386 		    "crypto_register_provider() failed (0x%x)", ret);
387 
388 	return (0);
389 }
390 
391 int
392 _info(struct modinfo *modinfop)
393 {
394 	return (mod_info(&modlinkage, modinfop));
395 }
396 
397 #endif /* _KERNEL */
398 
399 
400 /*
401  * sparc optimization:
402  *
403  * on the sparc, we can load big endian 32-bit data easily.  note that
404  * special care must be taken to ensure the address is 32-bit aligned.
405  * in the interest of speed, we don't check to make sure, since
406  * careful programming can guarantee this for us.
407  */
408 
409 #if	defined(_BIG_ENDIAN)
410 
411 #define	LOAD_BIG_32(addr)	(*(uint32_t *)(addr))
412 
413 #else	/* little endian -- will work on big endian, but slowly */
414 
415 #define	LOAD_BIG_32(addr)	\
416 	(((addr)[0] << 24) | ((addr)[1] << 16) | ((addr)[2] << 8) | (addr)[3])
417 #endif
418 
419 
420 #if	defined(_BIG_ENDIAN)
421 
422 #define	LOAD_BIG_64(addr)	(*(uint64_t *)(addr))
423 
424 #else	/* little endian -- will work on big endian, but slowly */
425 
426 #define	LOAD_BIG_64(addr)	\
427 	(((uint64_t)(addr)[0] << 56) | ((uint64_t)(addr)[1] << 48) |	\
428 	    ((uint64_t)(addr)[2] << 40) | ((uint64_t)(addr)[3] << 32) |	\
429 	    ((uint64_t)(addr)[4] << 24) | ((uint64_t)(addr)[5] << 16) |	\
430 	    ((uint64_t)(addr)[6] << 8) | (uint64_t)(addr)[7])
431 
432 #endif
433 
434 
435 /* SHA256 Transform */
436 
437 static void
438 SHA256Transform(SHA2_CTX *ctx, const uint8_t *blk)
439 {
440 
441 	uint32_t a = ctx->state.s32[0];
442 	uint32_t b = ctx->state.s32[1];
443 	uint32_t c = ctx->state.s32[2];
444 	uint32_t d = ctx->state.s32[3];
445 	uint32_t e = ctx->state.s32[4];
446 	uint32_t f = ctx->state.s32[5];
447 	uint32_t g = ctx->state.s32[6];
448 	uint32_t h = ctx->state.s32[7];
449 
450 	uint32_t w0, w1, w2, w3, w4, w5, w6, w7;
451 	uint32_t w8, w9, w10, w11, w12, w13, w14, w15;
452 	uint32_t T1, T2;
453 
454 #if	defined(__sparc)
455 	static const uint32_t sha256_consts[] = {
456 		SHA256_CONST_0, SHA256_CONST_1, SHA256_CONST_2,
457 		SHA256_CONST_3, SHA256_CONST_4, SHA256_CONST_5,
458 		SHA256_CONST_6, SHA256_CONST_7, SHA256_CONST_8,
459 		SHA256_CONST_9, SHA256_CONST_10, SHA256_CONST_11,
460 		SHA256_CONST_12, SHA256_CONST_13, SHA256_CONST_14,
461 		SHA256_CONST_15, SHA256_CONST_16, SHA256_CONST_17,
462 		SHA256_CONST_18, SHA256_CONST_19, SHA256_CONST_20,
463 		SHA256_CONST_21, SHA256_CONST_22, SHA256_CONST_23,
464 		SHA256_CONST_24, SHA256_CONST_25, SHA256_CONST_26,
465 		SHA256_CONST_27, SHA256_CONST_28, SHA256_CONST_29,
466 		SHA256_CONST_30, SHA256_CONST_31, SHA256_CONST_32,
467 		SHA256_CONST_33, SHA256_CONST_34, SHA256_CONST_35,
468 		SHA256_CONST_36, SHA256_CONST_37, SHA256_CONST_38,
469 		SHA256_CONST_39, SHA256_CONST_40, SHA256_CONST_41,
470 		SHA256_CONST_42, SHA256_CONST_43, SHA256_CONST_44,
471 		SHA256_CONST_45, SHA256_CONST_46, SHA256_CONST_47,
472 		SHA256_CONST_48, SHA256_CONST_49, SHA256_CONST_50,
473 		SHA256_CONST_51, SHA256_CONST_52, SHA256_CONST_53,
474 		SHA256_CONST_54, SHA256_CONST_55, SHA256_CONST_56,
475 		SHA256_CONST_57, SHA256_CONST_58, SHA256_CONST_59,
476 		SHA256_CONST_60, SHA256_CONST_61, SHA256_CONST_62,
477 		SHA256_CONST_63
478 	};
479 #endif
480 
481 	if ((uintptr_t)blk & 0x3) {		/* not 4-byte aligned? */
482 		bcopy(blk, ctx->buf_un.buf32,  sizeof (ctx->buf_un.buf32));
483 		blk = (uint8_t *)ctx->buf_un.buf32;
484 	}
485 
486 	w0 =  LOAD_BIG_32(blk + 4 * 0);
487 	SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0);
488 	w1 =  LOAD_BIG_32(blk + 4 * 1);
489 	SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1);
490 	w2 =  LOAD_BIG_32(blk + 4 * 2);
491 	SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2);
492 	w3 =  LOAD_BIG_32(blk + 4 * 3);
493 	SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3);
494 	w4 =  LOAD_BIG_32(blk + 4 * 4);
495 	SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4);
496 	w5 =  LOAD_BIG_32(blk + 4 * 5);
497 	SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5);
498 	w6 =  LOAD_BIG_32(blk + 4 * 6);
499 	SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6);
500 	w7 =  LOAD_BIG_32(blk + 4 * 7);
501 	SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7);
502 	w8 =  LOAD_BIG_32(blk + 4 * 8);
503 	SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8);
504 	w9 =  LOAD_BIG_32(blk + 4 * 9);
505 	SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9);
506 	w10 =  LOAD_BIG_32(blk + 4 * 10);
507 	SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10);
508 	w11 =  LOAD_BIG_32(blk + 4 * 11);
509 	SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11);
510 	w12 =  LOAD_BIG_32(blk + 4 * 12);
511 	SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12);
512 	w13 =  LOAD_BIG_32(blk + 4 * 13);
513 	SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13);
514 	w14 =  LOAD_BIG_32(blk + 4 * 14);
515 	SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14);
516 	w15 =  LOAD_BIG_32(blk + 4 * 15);
517 	SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15);
518 
519 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
520 	SHA256ROUND(a, b, c, d, e, f, g, h, 16, w0);
521 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
522 	SHA256ROUND(h, a, b, c, d, e, f, g, 17, w1);
523 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
524 	SHA256ROUND(g, h, a, b, c, d, e, f, 18, w2);
525 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
526 	SHA256ROUND(f, g, h, a, b, c, d, e, 19, w3);
527 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
528 	SHA256ROUND(e, f, g, h, a, b, c, d, 20, w4);
529 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
530 	SHA256ROUND(d, e, f, g, h, a, b, c, 21, w5);
531 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
532 	SHA256ROUND(c, d, e, f, g, h, a, b, 22, w6);
533 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
534 	SHA256ROUND(b, c, d, e, f, g, h, a, 23, w7);
535 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
536 	SHA256ROUND(a, b, c, d, e, f, g, h, 24, w8);
537 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
538 	SHA256ROUND(h, a, b, c, d, e, f, g, 25, w9);
539 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
540 	SHA256ROUND(g, h, a, b, c, d, e, f, 26, w10);
541 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
542 	SHA256ROUND(f, g, h, a, b, c, d, e, 27, w11);
543 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
544 	SHA256ROUND(e, f, g, h, a, b, c, d, 28, w12);
545 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
546 	SHA256ROUND(d, e, f, g, h, a, b, c, 29, w13);
547 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
548 	SHA256ROUND(c, d, e, f, g, h, a, b, 30, w14);
549 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
550 	SHA256ROUND(b, c, d, e, f, g, h, a, 31, w15);
551 
552 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
553 	SHA256ROUND(a, b, c, d, e, f, g, h, 32, w0);
554 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
555 	SHA256ROUND(h, a, b, c, d, e, f, g, 33, w1);
556 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
557 	SHA256ROUND(g, h, a, b, c, d, e, f, 34, w2);
558 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
559 	SHA256ROUND(f, g, h, a, b, c, d, e, 35, w3);
560 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
561 	SHA256ROUND(e, f, g, h, a, b, c, d, 36, w4);
562 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
563 	SHA256ROUND(d, e, f, g, h, a, b, c, 37, w5);
564 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
565 	SHA256ROUND(c, d, e, f, g, h, a, b, 38, w6);
566 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
567 	SHA256ROUND(b, c, d, e, f, g, h, a, 39, w7);
568 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
569 	SHA256ROUND(a, b, c, d, e, f, g, h, 40, w8);
570 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
571 	SHA256ROUND(h, a, b, c, d, e, f, g, 41, w9);
572 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
573 	SHA256ROUND(g, h, a, b, c, d, e, f, 42, w10);
574 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
575 	SHA256ROUND(f, g, h, a, b, c, d, e, 43, w11);
576 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
577 	SHA256ROUND(e, f, g, h, a, b, c, d, 44, w12);
578 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
579 	SHA256ROUND(d, e, f, g, h, a, b, c, 45, w13);
580 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
581 	SHA256ROUND(c, d, e, f, g, h, a, b, 46, w14);
582 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
583 	SHA256ROUND(b, c, d, e, f, g, h, a, 47, w15);
584 
585 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
586 	SHA256ROUND(a, b, c, d, e, f, g, h, 48, w0);
587 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
588 	SHA256ROUND(h, a, b, c, d, e, f, g, 49, w1);
589 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
590 	SHA256ROUND(g, h, a, b, c, d, e, f, 50, w2);
591 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
592 	SHA256ROUND(f, g, h, a, b, c, d, e, 51, w3);
593 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
594 	SHA256ROUND(e, f, g, h, a, b, c, d, 52, w4);
595 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
596 	SHA256ROUND(d, e, f, g, h, a, b, c, 53, w5);
597 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
598 	SHA256ROUND(c, d, e, f, g, h, a, b, 54, w6);
599 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
600 	SHA256ROUND(b, c, d, e, f, g, h, a, 55, w7);
601 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
602 	SHA256ROUND(a, b, c, d, e, f, g, h, 56, w8);
603 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
604 	SHA256ROUND(h, a, b, c, d, e, f, g, 57, w9);
605 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
606 	SHA256ROUND(g, h, a, b, c, d, e, f, 58, w10);
607 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
608 	SHA256ROUND(f, g, h, a, b, c, d, e, 59, w11);
609 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
610 	SHA256ROUND(e, f, g, h, a, b, c, d, 60, w12);
611 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
612 	SHA256ROUND(d, e, f, g, h, a, b, c, 61, w13);
613 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
614 	SHA256ROUND(c, d, e, f, g, h, a, b, 62, w14);
615 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
616 	SHA256ROUND(b, c, d, e, f, g, h, a, 63, w15);
617 
618 	ctx->state.s32[0] += a;
619 	ctx->state.s32[1] += b;
620 	ctx->state.s32[2] += c;
621 	ctx->state.s32[3] += d;
622 	ctx->state.s32[4] += e;
623 	ctx->state.s32[5] += f;
624 	ctx->state.s32[6] += g;
625 	ctx->state.s32[7] += h;
626 }
627 
628 
629 /* SHA384 and SHA512 Transform */
630 
631 static void
632 SHA512Transform(SHA2_CTX *ctx, const uint8_t *blk)
633 {
634 
635 	uint64_t a = ctx->state.s64[0];
636 	uint64_t b = ctx->state.s64[1];
637 	uint64_t c = ctx->state.s64[2];
638 	uint64_t d = ctx->state.s64[3];
639 	uint64_t e = ctx->state.s64[4];
640 	uint64_t f = ctx->state.s64[5];
641 	uint64_t g = ctx->state.s64[6];
642 	uint64_t h = ctx->state.s64[7];
643 
644 	uint64_t w0, w1, w2, w3, w4, w5, w6, w7;
645 	uint64_t w8, w9, w10, w11, w12, w13, w14, w15;
646 	uint64_t T1, T2;
647 
648 #if	defined(__sparc)
649 	static const uint64_t sha512_consts[] = {
650 		SHA512_CONST_0, SHA512_CONST_1, SHA512_CONST_2,
651 		SHA512_CONST_3, SHA512_CONST_4, SHA512_CONST_5,
652 		SHA512_CONST_6, SHA512_CONST_7, SHA512_CONST_8,
653 		SHA512_CONST_9, SHA512_CONST_10, SHA512_CONST_11,
654 		SHA512_CONST_12, SHA512_CONST_13, SHA512_CONST_14,
655 		SHA512_CONST_15, SHA512_CONST_16, SHA512_CONST_17,
656 		SHA512_CONST_18, SHA512_CONST_19, SHA512_CONST_20,
657 		SHA512_CONST_21, SHA512_CONST_22, SHA512_CONST_23,
658 		SHA512_CONST_24, SHA512_CONST_25, SHA512_CONST_26,
659 		SHA512_CONST_27, SHA512_CONST_28, SHA512_CONST_29,
660 		SHA512_CONST_30, SHA512_CONST_31, SHA512_CONST_32,
661 		SHA512_CONST_33, SHA512_CONST_34, SHA512_CONST_35,
662 		SHA512_CONST_36, SHA512_CONST_37, SHA512_CONST_38,
663 		SHA512_CONST_39, SHA512_CONST_40, SHA512_CONST_41,
664 		SHA512_CONST_42, SHA512_CONST_43, SHA512_CONST_44,
665 		SHA512_CONST_45, SHA512_CONST_46, SHA512_CONST_47,
666 		SHA512_CONST_48, SHA512_CONST_49, SHA512_CONST_50,
667 		SHA512_CONST_51, SHA512_CONST_52, SHA512_CONST_53,
668 		SHA512_CONST_54, SHA512_CONST_55, SHA512_CONST_56,
669 		SHA512_CONST_57, SHA512_CONST_58, SHA512_CONST_59,
670 		SHA512_CONST_60, SHA512_CONST_61, SHA512_CONST_62,
671 		SHA512_CONST_63, SHA512_CONST_64, SHA512_CONST_65,
672 		SHA512_CONST_66, SHA512_CONST_67, SHA512_CONST_68,
673 		SHA512_CONST_69, SHA512_CONST_70, SHA512_CONST_71,
674 		SHA512_CONST_72, SHA512_CONST_73, SHA512_CONST_74,
675 		SHA512_CONST_75, SHA512_CONST_76, SHA512_CONST_77,
676 		SHA512_CONST_78, SHA512_CONST_79
677 	};
678 #endif
679 
680 
681 	if ((uintptr_t)blk & 0x7) {		/* not 8-byte aligned? */
682 		bcopy(blk, ctx->buf_un.buf64,  sizeof (ctx->buf_un.buf64));
683 		blk = (uint8_t *)ctx->buf_un.buf64;
684 	}
685 
686 	w0 =  LOAD_BIG_64(blk + 8 * 0);
687 	SHA512ROUND(a, b, c, d, e, f, g, h, 0, w0);
688 	w1 =  LOAD_BIG_64(blk + 8 * 1);
689 	SHA512ROUND(h, a, b, c, d, e, f, g, 1, w1);
690 	w2 =  LOAD_BIG_64(blk + 8 * 2);
691 	SHA512ROUND(g, h, a, b, c, d, e, f, 2, w2);
692 	w3 =  LOAD_BIG_64(blk + 8 * 3);
693 	SHA512ROUND(f, g, h, a, b, c, d, e, 3, w3);
694 	w4 =  LOAD_BIG_64(blk + 8 * 4);
695 	SHA512ROUND(e, f, g, h, a, b, c, d, 4, w4);
696 	w5 =  LOAD_BIG_64(blk + 8 * 5);
697 	SHA512ROUND(d, e, f, g, h, a, b, c, 5, w5);
698 	w6 =  LOAD_BIG_64(blk + 8 * 6);
699 	SHA512ROUND(c, d, e, f, g, h, a, b, 6, w6);
700 	w7 =  LOAD_BIG_64(blk + 8 * 7);
701 	SHA512ROUND(b, c, d, e, f, g, h, a, 7, w7);
702 	w8 =  LOAD_BIG_64(blk + 8 * 8);
703 	SHA512ROUND(a, b, c, d, e, f, g, h, 8, w8);
704 	w9 =  LOAD_BIG_64(blk + 8 * 9);
705 	SHA512ROUND(h, a, b, c, d, e, f, g, 9, w9);
706 	w10 =  LOAD_BIG_64(blk + 8 * 10);
707 	SHA512ROUND(g, h, a, b, c, d, e, f, 10, w10);
708 	w11 =  LOAD_BIG_64(blk + 8 * 11);
709 	SHA512ROUND(f, g, h, a, b, c, d, e, 11, w11);
710 	w12 =  LOAD_BIG_64(blk + 8 * 12);
711 	SHA512ROUND(e, f, g, h, a, b, c, d, 12, w12);
712 	w13 =  LOAD_BIG_64(blk + 8 * 13);
713 	SHA512ROUND(d, e, f, g, h, a, b, c, 13, w13);
714 	w14 =  LOAD_BIG_64(blk + 8 * 14);
715 	SHA512ROUND(c, d, e, f, g, h, a, b, 14, w14);
716 	w15 =  LOAD_BIG_64(blk + 8 * 15);
717 	SHA512ROUND(b, c, d, e, f, g, h, a, 15, w15);
718 
719 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
720 	SHA512ROUND(a, b, c, d, e, f, g, h, 16, w0);
721 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
722 	SHA512ROUND(h, a, b, c, d, e, f, g, 17, w1);
723 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
724 	SHA512ROUND(g, h, a, b, c, d, e, f, 18, w2);
725 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
726 	SHA512ROUND(f, g, h, a, b, c, d, e, 19, w3);
727 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
728 	SHA512ROUND(e, f, g, h, a, b, c, d, 20, w4);
729 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
730 	SHA512ROUND(d, e, f, g, h, a, b, c, 21, w5);
731 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
732 	SHA512ROUND(c, d, e, f, g, h, a, b, 22, w6);
733 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
734 	SHA512ROUND(b, c, d, e, f, g, h, a, 23, w7);
735 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
736 	SHA512ROUND(a, b, c, d, e, f, g, h, 24, w8);
737 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
738 	SHA512ROUND(h, a, b, c, d, e, f, g, 25, w9);
739 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
740 	SHA512ROUND(g, h, a, b, c, d, e, f, 26, w10);
741 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
742 	SHA512ROUND(f, g, h, a, b, c, d, e, 27, w11);
743 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
744 	SHA512ROUND(e, f, g, h, a, b, c, d, 28, w12);
745 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
746 	SHA512ROUND(d, e, f, g, h, a, b, c, 29, w13);
747 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
748 	SHA512ROUND(c, d, e, f, g, h, a, b, 30, w14);
749 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
750 	SHA512ROUND(b, c, d, e, f, g, h, a, 31, w15);
751 
752 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
753 	SHA512ROUND(a, b, c, d, e, f, g, h, 32, w0);
754 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
755 	SHA512ROUND(h, a, b, c, d, e, f, g, 33, w1);
756 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
757 	SHA512ROUND(g, h, a, b, c, d, e, f, 34, w2);
758 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
759 	SHA512ROUND(f, g, h, a, b, c, d, e, 35, w3);
760 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
761 	SHA512ROUND(e, f, g, h, a, b, c, d, 36, w4);
762 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
763 	SHA512ROUND(d, e, f, g, h, a, b, c, 37, w5);
764 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
765 	SHA512ROUND(c, d, e, f, g, h, a, b, 38, w6);
766 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
767 	SHA512ROUND(b, c, d, e, f, g, h, a, 39, w7);
768 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
769 	SHA512ROUND(a, b, c, d, e, f, g, h, 40, w8);
770 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
771 	SHA512ROUND(h, a, b, c, d, e, f, g, 41, w9);
772 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
773 	SHA512ROUND(g, h, a, b, c, d, e, f, 42, w10);
774 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
775 	SHA512ROUND(f, g, h, a, b, c, d, e, 43, w11);
776 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
777 	SHA512ROUND(e, f, g, h, a, b, c, d, 44, w12);
778 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
779 	SHA512ROUND(d, e, f, g, h, a, b, c, 45, w13);
780 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
781 	SHA512ROUND(c, d, e, f, g, h, a, b, 46, w14);
782 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
783 	SHA512ROUND(b, c, d, e, f, g, h, a, 47, w15);
784 
785 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
786 	SHA512ROUND(a, b, c, d, e, f, g, h, 48, w0);
787 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
788 	SHA512ROUND(h, a, b, c, d, e, f, g, 49, w1);
789 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
790 	SHA512ROUND(g, h, a, b, c, d, e, f, 50, w2);
791 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
792 	SHA512ROUND(f, g, h, a, b, c, d, e, 51, w3);
793 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
794 	SHA512ROUND(e, f, g, h, a, b, c, d, 52, w4);
795 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
796 	SHA512ROUND(d, e, f, g, h, a, b, c, 53, w5);
797 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
798 	SHA512ROUND(c, d, e, f, g, h, a, b, 54, w6);
799 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
800 	SHA512ROUND(b, c, d, e, f, g, h, a, 55, w7);
801 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
802 	SHA512ROUND(a, b, c, d, e, f, g, h, 56, w8);
803 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
804 	SHA512ROUND(h, a, b, c, d, e, f, g, 57, w9);
805 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
806 	SHA512ROUND(g, h, a, b, c, d, e, f, 58, w10);
807 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
808 	SHA512ROUND(f, g, h, a, b, c, d, e, 59, w11);
809 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
810 	SHA512ROUND(e, f, g, h, a, b, c, d, 60, w12);
811 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
812 	SHA512ROUND(d, e, f, g, h, a, b, c, 61, w13);
813 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
814 	SHA512ROUND(c, d, e, f, g, h, a, b, 62, w14);
815 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
816 	SHA512ROUND(b, c, d, e, f, g, h, a, 63, w15);
817 
818 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
819 	SHA512ROUND(a, b, c, d, e, f, g, h, 64, w0);
820 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
821 	SHA512ROUND(h, a, b, c, d, e, f, g, 65, w1);
822 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
823 	SHA512ROUND(g, h, a, b, c, d, e, f, 66, w2);
824 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
825 	SHA512ROUND(f, g, h, a, b, c, d, e, 67, w3);
826 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
827 	SHA512ROUND(e, f, g, h, a, b, c, d, 68, w4);
828 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
829 	SHA512ROUND(d, e, f, g, h, a, b, c, 69, w5);
830 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
831 	SHA512ROUND(c, d, e, f, g, h, a, b, 70, w6);
832 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
833 	SHA512ROUND(b, c, d, e, f, g, h, a, 71, w7);
834 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
835 	SHA512ROUND(a, b, c, d, e, f, g, h, 72, w8);
836 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
837 	SHA512ROUND(h, a, b, c, d, e, f, g, 73, w9);
838 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
839 	SHA512ROUND(g, h, a, b, c, d, e, f, 74, w10);
840 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
841 	SHA512ROUND(f, g, h, a, b, c, d, e, 75, w11);
842 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
843 	SHA512ROUND(e, f, g, h, a, b, c, d, 76, w12);
844 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
845 	SHA512ROUND(d, e, f, g, h, a, b, c, 77, w13);
846 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
847 	SHA512ROUND(c, d, e, f, g, h, a, b, 78, w14);
848 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
849 	SHA512ROUND(b, c, d, e, f, g, h, a, 79, w15);
850 
851 	ctx->state.s64[0] += a;
852 	ctx->state.s64[1] += b;
853 	ctx->state.s64[2] += c;
854 	ctx->state.s64[3] += d;
855 	ctx->state.s64[4] += e;
856 	ctx->state.s64[5] += f;
857 	ctx->state.s64[6] += g;
858 	ctx->state.s64[7] += h;
859 
860 }
861 
862 
863 /*
864  * devpro compiler optimization:
865  *
866  * the compiler can generate better code if it knows that `input' and
867  * `output' do not point to the same source.  there is no portable
868  * way to tell the compiler this, but the sun compiler recognizes the
869  * `_Restrict' keyword to indicate this condition.  use it if possible.
870  */
871 
872 #ifdef	__RESTRICT
873 #define	restrict	_Restrict
874 #else
875 #define	restrict	/* nothing */
876 #endif
877 
878 /*
879  * Encode()
880  *
881  * purpose: to convert a list of numbers from little endian to big endian
882  *   input: uint8_t *	: place to store the converted big endian numbers
883  *	    uint32_t *	: place to get numbers to convert from
884  *          size_t	: the length of the input in bytes
885  *  output: void
886  */
887 
888 static void
889 Encode(uint8_t *restrict output, uint32_t *restrict input, size_t len)
890 {
891 	size_t		i, j;
892 
893 #if	defined(__sparc)
894 	if (IS_P2ALIGNED(output, sizeof (uint32_t))) {
895 		for (i = 0, j = 0; j < len; i++, j += 4) {
896 			/* LINTED: pointer alignment */
897 			*((uint32_t *)(output + j)) = input[i];
898 		}
899 	} else {
900 #endif	/* little endian -- will work on big endian, but slowly */
901 		for (i = 0, j = 0; j < len; i++, j += 4) {
902 			output[j]	= (input[i] >> 24) & 0xff;
903 			output[j + 1]	= (input[i] >> 16) & 0xff;
904 			output[j + 2]	= (input[i] >>  8) & 0xff;
905 			output[j + 3]	= input[i] & 0xff;
906 		}
907 #if	defined(__sparc)
908 	}
909 #endif
910 }
911 
912 static void
913 Encode64(uint8_t *restrict output, uint64_t *restrict input, size_t len)
914 {
915 	size_t		i, j;
916 
917 #if	defined(__sparc)
918 	if (IS_P2ALIGNED(output, sizeof (uint64_t))) {
919 		for (i = 0, j = 0; j < len; i++, j += 8) {
920 			/* LINTED: pointer alignment */
921 			*((uint64_t *)(output + j)) = input[i];
922 		}
923 	} else {
924 #endif	/* little endian -- will work on big endian, but slowly */
925 		for (i = 0, j = 0; j < len; i++, j += 8) {
926 
927 			output[j]	= (input[i] >> 56) & 0xff;
928 			output[j + 1]	= (input[i] >> 48) & 0xff;
929 			output[j + 2]	= (input[i] >> 40) & 0xff;
930 			output[j + 3]	= (input[i] >> 32) & 0xff;
931 			output[j + 4]	= (input[i] >> 24) & 0xff;
932 			output[j + 5]	= (input[i] >> 16) & 0xff;
933 			output[j + 6]	= (input[i] >>  8) & 0xff;
934 			output[j + 7]	= input[i] & 0xff;
935 		}
936 #if	defined(__sparc)
937 	}
938 #endif
939 }
940 
941 
942 #ifdef _KERNEL
943 
944 /*
945  * KCF software provider control entry points.
946  */
947 /* ARGSUSED */
948 static void
949 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
950 {
951 	*status = CRYPTO_PROVIDER_READY;
952 }
953 
954 /*
955  * KCF software provider digest entry points.
956  */
957 
958 static int
959 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
960     crypto_req_handle_t req)
961 {
962 
963 	/*
964 	 * Allocate and initialize SHA2 context.
965 	 */
966 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
967 	    crypto_kmflag(req));
968 	if (ctx->cc_provider_private == NULL)
969 		return (CRYPTO_HOST_MEMORY);
970 
971 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
972 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
973 
974 	return (CRYPTO_SUCCESS);
975 }
976 
977 /*
978  * Helper SHA2 digest update function for uio data.
979  */
980 static int
981 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
982 {
983 	off_t offset = data->cd_offset;
984 	size_t length = data->cd_length;
985 	uint_t vec_idx;
986 	size_t cur_len;
987 
988 	/* we support only kernel buffer */
989 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
990 		return (CRYPTO_ARGUMENTS_BAD);
991 
992 	/*
993 	 * Jump to the first iovec containing data to be
994 	 * digested.
995 	 */
996 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
997 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
998 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len);
999 	if (vec_idx == data->cd_uio->uio_iovcnt) {
1000 		/*
1001 		 * The caller specified an offset that is larger than the
1002 		 * total size of the buffers it provided.
1003 		 */
1004 		return (CRYPTO_DATA_LEN_RANGE);
1005 	}
1006 
1007 	/*
1008 	 * Now do the digesting on the iovecs.
1009 	 */
1010 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
1011 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
1012 		    offset, length);
1013 
1014 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
1015 		    uio_iov[vec_idx].iov_base + offset, cur_len);
1016 		length -= cur_len;
1017 		vec_idx++;
1018 		offset = 0;
1019 	}
1020 
1021 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
1022 		/*
1023 		 * The end of the specified iovec's was reached but
1024 		 * the length requested could not be processed, i.e.
1025 		 * The caller requested to digest more data than it provided.
1026 		 */
1027 		return (CRYPTO_DATA_LEN_RANGE);
1028 	}
1029 
1030 	return (CRYPTO_SUCCESS);
1031 }
1032 
1033 /*
1034  * Helper SHA2 digest final function for uio data.
1035  * digest_len is the length of the desired digest. If digest_len
1036  * is smaller than the default SHA2 digest length, the caller
1037  * must pass a scratch buffer, digest_scratch, which must
1038  * be at least the algorithm's digest length bytes.
1039  */
1040 static int
1041 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1042     ulong_t digest_len, uchar_t *digest_scratch)
1043 {
1044 	off_t offset = digest->cd_offset;
1045 	uint_t vec_idx;
1046 
1047 	/* we support only kernel buffer */
1048 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
1049 		return (CRYPTO_ARGUMENTS_BAD);
1050 
1051 	/*
1052 	 * Jump to the first iovec containing ptr to the digest to
1053 	 * be returned.
1054 	 */
1055 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
1056 	    vec_idx < digest->cd_uio->uio_iovcnt;
1057 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len);
1058 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
1059 		/*
1060 		 * The caller specified an offset that is
1061 		 * larger than the total size of the buffers
1062 		 * it provided.
1063 		 */
1064 		return (CRYPTO_DATA_LEN_RANGE);
1065 	}
1066 
1067 	if (offset + digest_len <=
1068 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
1069 		/*
1070 		 * The computed SHA2 digest will fit in the current
1071 		 * iovec.
1072 		 */
1073 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1074 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1075 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1076 			(digest_len != SHA512_DIGEST_LENGTH))) {
1077 			/*
1078 			 * The caller requested a short digest. Digest
1079 			 * into a scratch buffer and return to
1080 			 * the user only what was requested.
1081 			 */
1082 			SHA2Final(digest_scratch, sha2_ctx);
1083 
1084 			bcopy(digest_scratch, (uchar_t *)digest->
1085 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1086 			    digest_len);
1087 		} else {
1088 			SHA2Final((uchar_t *)digest->
1089 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1090 			    sha2_ctx);
1091 
1092 		}
1093 	} else {
1094 		/*
1095 		 * The computed digest will be crossing one or more iovec's.
1096 		 * This is bad performance-wise but we need to support it.
1097 		 * Allocate a small scratch buffer on the stack and
1098 		 * copy it piece meal to the specified digest iovec's.
1099 		 */
1100 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1101 		off_t scratch_offset = 0;
1102 		size_t length = digest_len;
1103 		size_t cur_len;
1104 
1105 		SHA2Final(digest_tmp, sha2_ctx);
1106 
1107 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
1108 			cur_len =
1109 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
1110 				    offset, length);
1111 			bcopy(digest_tmp + scratch_offset,
1112 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
1113 			    cur_len);
1114 
1115 			length -= cur_len;
1116 			vec_idx++;
1117 			scratch_offset += cur_len;
1118 			offset = 0;
1119 		}
1120 
1121 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
1122 			/*
1123 			 * The end of the specified iovec's was reached but
1124 			 * the length requested could not be processed, i.e.
1125 			 * The caller requested to digest more data than it
1126 			 * provided.
1127 			 */
1128 			return (CRYPTO_DATA_LEN_RANGE);
1129 		}
1130 	}
1131 
1132 	return (CRYPTO_SUCCESS);
1133 }
1134 
1135 /*
1136  * Helper SHA2 digest update for mblk's.
1137  */
1138 static int
1139 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
1140 {
1141 	off_t offset = data->cd_offset;
1142 	size_t length = data->cd_length;
1143 	mblk_t *mp;
1144 	size_t cur_len;
1145 
1146 	/*
1147 	 * Jump to the first mblk_t containing data to be digested.
1148 	 */
1149 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
1150 	    offset -= MBLKL(mp), mp = mp->b_cont);
1151 	if (mp == NULL) {
1152 		/*
1153 		 * The caller specified an offset that is larger than the
1154 		 * total size of the buffers it provided.
1155 		 */
1156 		return (CRYPTO_DATA_LEN_RANGE);
1157 	}
1158 
1159 	/*
1160 	 * Now do the digesting on the mblk chain.
1161 	 */
1162 	while (mp != NULL && length > 0) {
1163 		cur_len = MIN(MBLKL(mp) - offset, length);
1164 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
1165 		length -= cur_len;
1166 		offset = 0;
1167 		mp = mp->b_cont;
1168 	}
1169 
1170 	if (mp == NULL && length > 0) {
1171 		/*
1172 		 * The end of the mblk was reached but the length requested
1173 		 * could not be processed, i.e. The caller requested
1174 		 * to digest more data than it provided.
1175 		 */
1176 		return (CRYPTO_DATA_LEN_RANGE);
1177 	}
1178 
1179 	return (CRYPTO_SUCCESS);
1180 }
1181 
1182 /*
1183  * Helper SHA2 digest final for mblk's.
1184  * digest_len is the length of the desired digest. If digest_len
1185  * is smaller than the default SHA2 digest length, the caller
1186  * must pass a scratch buffer, digest_scratch, which must
1187  * be at least the algorithm's digest length bytes.
1188  */
1189 static int
1190 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1191     ulong_t digest_len, uchar_t *digest_scratch)
1192 {
1193 	off_t offset = digest->cd_offset;
1194 	mblk_t *mp;
1195 
1196 	/*
1197 	 * Jump to the first mblk_t that will be used to store the digest.
1198 	 */
1199 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
1200 	    offset -= MBLKL(mp), mp = mp->b_cont);
1201 	if (mp == NULL) {
1202 		/*
1203 		 * The caller specified an offset that is larger than the
1204 		 * total size of the buffers it provided.
1205 		 */
1206 		return (CRYPTO_DATA_LEN_RANGE);
1207 	}
1208 
1209 	if (offset + digest_len <= MBLKL(mp)) {
1210 		/*
1211 		 * The computed SHA2 digest will fit in the current mblk.
1212 		 * Do the SHA2Final() in-place.
1213 		 */
1214 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1215 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1216 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1217 			(digest_len != SHA512_DIGEST_LENGTH))) {
1218 			/*
1219 			 * The caller requested a short digest. Digest
1220 			 * into a scratch buffer and return to
1221 			 * the user only what was requested.
1222 			 */
1223 			SHA2Final(digest_scratch, sha2_ctx);
1224 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
1225 		} else {
1226 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
1227 		}
1228 	} else {
1229 		/*
1230 		 * The computed digest will be crossing one or more mblk's.
1231 		 * This is bad performance-wise but we need to support it.
1232 		 * Allocate a small scratch buffer on the stack and
1233 		 * copy it piece meal to the specified digest iovec's.
1234 		 */
1235 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1236 		off_t scratch_offset = 0;
1237 		size_t length = digest_len;
1238 		size_t cur_len;
1239 
1240 		SHA2Final(digest_tmp, sha2_ctx);
1241 
1242 		while (mp != NULL && length > 0) {
1243 			cur_len = MIN(MBLKL(mp) - offset, length);
1244 			bcopy(digest_tmp + scratch_offset,
1245 			    mp->b_rptr + offset, cur_len);
1246 
1247 			length -= cur_len;
1248 			mp = mp->b_cont;
1249 			scratch_offset += cur_len;
1250 			offset = 0;
1251 		}
1252 
1253 		if (mp == NULL && length > 0) {
1254 			/*
1255 			 * The end of the specified mblk was reached but
1256 			 * the length requested could not be processed, i.e.
1257 			 * The caller requested to digest more data than it
1258 			 * provided.
1259 			 */
1260 			return (CRYPTO_DATA_LEN_RANGE);
1261 		}
1262 	}
1263 
1264 	return (CRYPTO_SUCCESS);
1265 }
1266 
1267 /* ARGSUSED */
1268 static int
1269 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
1270     crypto_req_handle_t req)
1271 {
1272 	int ret = CRYPTO_SUCCESS;
1273 	uint_t sha_digest_len;
1274 
1275 	ASSERT(ctx->cc_provider_private != NULL);
1276 
1277 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1278 	case SHA256_MECH_INFO_TYPE:
1279 		sha_digest_len = SHA256_DIGEST_LENGTH;
1280 		break;
1281 	case SHA384_MECH_INFO_TYPE:
1282 		sha_digest_len = SHA384_DIGEST_LENGTH;
1283 		break;
1284 	case SHA512_MECH_INFO_TYPE:
1285 		sha_digest_len = SHA512_DIGEST_LENGTH;
1286 		break;
1287 	default:
1288 		return (CRYPTO_MECHANISM_INVALID);
1289 	}
1290 
1291 	/*
1292 	 * We need to just return the length needed to store the output.
1293 	 * We should not destroy the context for the following cases.
1294 	 */
1295 	if ((digest->cd_length == 0) ||
1296 	    (digest->cd_length < sha_digest_len)) {
1297 		digest->cd_length = sha_digest_len;
1298 		return (CRYPTO_BUFFER_TOO_SMALL);
1299 	}
1300 
1301 	/*
1302 	 * Do the SHA2 update on the specified input data.
1303 	 */
1304 	switch (data->cd_format) {
1305 	case CRYPTO_DATA_RAW:
1306 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1307 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1308 		    data->cd_length);
1309 		break;
1310 	case CRYPTO_DATA_UIO:
1311 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1312 		    data);
1313 		break;
1314 	case CRYPTO_DATA_MBLK:
1315 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1316 		    data);
1317 		break;
1318 	default:
1319 		ret = CRYPTO_ARGUMENTS_BAD;
1320 	}
1321 
1322 	if (ret != CRYPTO_SUCCESS) {
1323 		/* the update failed, free context and bail */
1324 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1325 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1326 		ctx->cc_provider_private = NULL;
1327 		digest->cd_length = 0;
1328 		return (ret);
1329 	}
1330 
1331 	/*
1332 	 * Do a SHA2 final, must be done separately since the digest
1333 	 * type can be different than the input data type.
1334 	 */
1335 	switch (digest->cd_format) {
1336 	case CRYPTO_DATA_RAW:
1337 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1338 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1339 		break;
1340 	case CRYPTO_DATA_UIO:
1341 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1342 		    digest, sha_digest_len, NULL);
1343 		break;
1344 	case CRYPTO_DATA_MBLK:
1345 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1346 		    digest, sha_digest_len, NULL);
1347 		break;
1348 	default:
1349 		ret = CRYPTO_ARGUMENTS_BAD;
1350 	}
1351 
1352 	/* all done, free context and return */
1353 
1354 	if (ret == CRYPTO_SUCCESS) {
1355 		digest->cd_length = sha_digest_len;
1356 	} else {
1357 		/*
1358 		 * Only bzero context on failure, since SHA2Final()
1359 		 * does it for us.
1360 		 */
1361 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1362 		digest->cd_length = 0;
1363 	}
1364 
1365 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1366 	ctx->cc_provider_private = NULL;
1367 	return (ret);
1368 }
1369 
1370 /* ARGSUSED */
1371 static int
1372 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
1373     crypto_req_handle_t req)
1374 {
1375 	int ret = CRYPTO_SUCCESS;
1376 
1377 	ASSERT(ctx->cc_provider_private != NULL);
1378 
1379 	/*
1380 	 * Do the SHA2 update on the specified input data.
1381 	 */
1382 	switch (data->cd_format) {
1383 	case CRYPTO_DATA_RAW:
1384 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1385 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1386 		    data->cd_length);
1387 		break;
1388 	case CRYPTO_DATA_UIO:
1389 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1390 		    data);
1391 		break;
1392 	case CRYPTO_DATA_MBLK:
1393 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1394 		    data);
1395 		break;
1396 	default:
1397 		ret = CRYPTO_ARGUMENTS_BAD;
1398 	}
1399 
1400 	return (ret);
1401 }
1402 
1403 /* ARGSUSED */
1404 static int
1405 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
1406     crypto_req_handle_t req)
1407 {
1408 	int ret = CRYPTO_SUCCESS;
1409 	uint_t sha_digest_len;
1410 
1411 	ASSERT(ctx->cc_provider_private != NULL);
1412 
1413 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1414 	case SHA256_MECH_INFO_TYPE:
1415 		sha_digest_len = SHA256_DIGEST_LENGTH;
1416 		break;
1417 	case SHA384_MECH_INFO_TYPE:
1418 		sha_digest_len = SHA384_DIGEST_LENGTH;
1419 		break;
1420 	case SHA512_MECH_INFO_TYPE:
1421 		sha_digest_len = SHA512_DIGEST_LENGTH;
1422 		break;
1423 	default:
1424 		return (CRYPTO_MECHANISM_INVALID);
1425 	}
1426 
1427 	/*
1428 	 * We need to just return the length needed to store the output.
1429 	 * We should not destroy the context for the following cases.
1430 	 */
1431 	if ((digest->cd_length == 0) ||
1432 	    (digest->cd_length < sha_digest_len)) {
1433 		digest->cd_length = sha_digest_len;
1434 		return (CRYPTO_BUFFER_TOO_SMALL);
1435 	}
1436 
1437 	/*
1438 	 * Do a SHA2 final.
1439 	 */
1440 	switch (digest->cd_format) {
1441 	case CRYPTO_DATA_RAW:
1442 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1443 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1444 		break;
1445 	case CRYPTO_DATA_UIO:
1446 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1447 		    digest, sha_digest_len, NULL);
1448 		break;
1449 	case CRYPTO_DATA_MBLK:
1450 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1451 		    digest, sha_digest_len, NULL);
1452 		break;
1453 	default:
1454 		ret = CRYPTO_ARGUMENTS_BAD;
1455 	}
1456 
1457 	/* all done, free context and return */
1458 
1459 	if (ret == CRYPTO_SUCCESS) {
1460 		digest->cd_length = sha_digest_len;
1461 	} else {
1462 		/*
1463 		 * Only bzero context this on failure, since SHA2Final()
1464 		 * does it for us.
1465 		 */
1466 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1467 		digest->cd_length = 0;
1468 	}
1469 
1470 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1471 	ctx->cc_provider_private = NULL;
1472 
1473 	return (ret);
1474 }
1475 
1476 /* ARGSUSED */
1477 static int
1478 sha2_digest_atomic(crypto_provider_handle_t provider,
1479     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1480     crypto_data_t *data, crypto_data_t *digest,
1481     crypto_req_handle_t req)
1482 {
1483 	int ret = CRYPTO_SUCCESS;
1484 	SHA2_CTX sha2_ctx;
1485 	uint32_t sha_digest_len;
1486 
1487 	/*
1488 	 * Do the SHA inits.
1489 	 */
1490 
1491 	SHA2Init(mechanism->cm_type, &sha2_ctx);
1492 
1493 	switch (data->cd_format) {
1494 	case CRYPTO_DATA_RAW:
1495 		SHA2Update(&sha2_ctx, (uint8_t *)data->
1496 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
1497 		break;
1498 	case CRYPTO_DATA_UIO:
1499 		ret = sha2_digest_update_uio(&sha2_ctx, data);
1500 		break;
1501 	case CRYPTO_DATA_MBLK:
1502 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
1503 		break;
1504 	default:
1505 		ret = CRYPTO_ARGUMENTS_BAD;
1506 	}
1507 
1508 	/*
1509 	 * Do the SHA updates on the specified input data.
1510 	 */
1511 
1512 	if (ret != CRYPTO_SUCCESS) {
1513 		/* the update failed, bail */
1514 		bzero(&sha2_ctx, sizeof (SHA2_CTX));
1515 		digest->cd_length = 0;
1516 		return (ret);
1517 
1518 	}
1519 
1520 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
1521 		sha_digest_len = SHA256_DIGEST_LENGTH;
1522 	else
1523 		sha_digest_len = SHA512_DIGEST_LENGTH;
1524 
1525 	/*
1526 	 * Do a SHA2 final, must be done separately since the digest
1527 	 * type can be different than the input data type.
1528 	 */
1529 	switch (digest->cd_format) {
1530 	case CRYPTO_DATA_RAW:
1531 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1532 		    digest->cd_offset, &sha2_ctx);
1533 		break;
1534 	case CRYPTO_DATA_UIO:
1535 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
1536 		    sha_digest_len, NULL);
1537 		break;
1538 	case CRYPTO_DATA_MBLK:
1539 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
1540 		    sha_digest_len, NULL);
1541 		break;
1542 	default:
1543 		ret = CRYPTO_ARGUMENTS_BAD;
1544 	}
1545 
1546 	if (ret == CRYPTO_SUCCESS) {
1547 		digest->cd_length = sha_digest_len;
1548 	} else {
1549 		/*
1550 		 * Only bzero context on failure, since SHA2Final()
1551 		 * does it for us.
1552 		 */
1553 		bzero(&sha2_ctx, sizeof (SHA2_CTX));
1554 		digest->cd_length = 0;
1555 	}
1556 
1557 	return (ret);
1558 }
1559 
1560 /*
1561  * KCF software provider mac entry points.
1562  *
1563  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
1564  *
1565  * Init:
1566  * The initialization routine initializes what we denote
1567  * as the inner and outer contexts by doing
1568  * - for inner context: SHA2(key XOR ipad)
1569  * - for outer context: SHA2(key XOR opad)
1570  *
1571  * Update:
1572  * Each subsequent SHA2 HMAC update will result in an
1573  * update of the inner context with the specified data.
1574  *
1575  * Final:
1576  * The SHA2 HMAC final will do a SHA2 final operation on the
1577  * inner context, and the resulting digest will be used
1578  * as the data for an update on the outer context. Last
1579  * but not least, a SHA2 final on the outer context will
1580  * be performed to obtain the SHA2 HMAC digest to return
1581  * to the user.
1582  */
1583 
1584 /*
1585  * Initialize a SHA2-HMAC context.
1586  */
1587 static void
1588 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
1589 {
1590 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1591 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1592 	int i, block_size, blocks_per_int64;
1593 
1594 	/* Determine the block size */
1595 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
1596 		block_size = SHA256_HMAC_BLOCK_SIZE;
1597 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1598 	} else {
1599 		block_size = SHA512_HMAC_BLOCK_SIZE;
1600 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1601 	}
1602 
1603 	(void) bzero(ipad, block_size);
1604 	(void) bzero(opad, block_size);
1605 	(void) bcopy(keyval, ipad, length_in_bytes);
1606 	(void) bcopy(keyval, opad, length_in_bytes);
1607 
1608 	/* XOR key with ipad (0x36) and opad (0x5c) */
1609 	for (i = 0; i < blocks_per_int64; i ++) {
1610 		ipad[i] ^= 0x3636363636363636;
1611 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
1612 	}
1613 
1614 	/* perform SHA2 on ipad */
1615 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
1616 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
1617 
1618 	/* perform SHA2 on opad */
1619 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
1620 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
1621 
1622 }
1623 
1624 /*
1625  */
1626 static int
1627 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1628     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
1629     crypto_req_handle_t req)
1630 {
1631 	int ret = CRYPTO_SUCCESS;
1632 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1633 	uint_t sha_digest_len, sha_hmac_block_size;
1634 
1635 	/*
1636 	 * Set the digest length and block size to values approriate to the
1637 	 * mechanism
1638 	 */
1639 	switch (mechanism->cm_type) {
1640 	case SHA256_HMAC_MECH_INFO_TYPE:
1641 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1642 		sha_digest_len = SHA256_DIGEST_LENGTH;
1643 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1644 		break;
1645 	case SHA384_HMAC_MECH_INFO_TYPE:
1646 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1647 	case SHA512_HMAC_MECH_INFO_TYPE:
1648 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1649 		sha_digest_len = SHA512_DIGEST_LENGTH;
1650 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1651 		break;
1652 	default:
1653 		return (CRYPTO_MECHANISM_INVALID);
1654 	}
1655 
1656 	if (key->ck_format != CRYPTO_KEY_RAW)
1657 		return (CRYPTO_ARGUMENTS_BAD);
1658 
1659 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1660 	    crypto_kmflag(req));
1661 	if (ctx->cc_provider_private == NULL)
1662 		return (CRYPTO_HOST_MEMORY);
1663 
1664 	if (ctx_template != NULL) {
1665 		/* reuse context template */
1666 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
1667 		    sizeof (sha2_hmac_ctx_t));
1668 	} else {
1669 		/* no context template, compute context */
1670 		if (keylen_in_bytes > sha_hmac_block_size) {
1671 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
1672 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
1673 
1674 			/*
1675 			 * Hash the passed-in key to get a smaller key.
1676 			 * The inner context is used since it hasn't been
1677 			 * initialized yet.
1678 			 */
1679 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1680 			    &hmac_ctx->hc_icontext,
1681 			    key->ck_data, keylen_in_bytes, digested_key);
1682 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1683 			    digested_key, sha_digest_len);
1684 		} else {
1685 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1686 			    key->ck_data, keylen_in_bytes);
1687 		}
1688 	}
1689 
1690 	/*
1691 	 * Get the mechanism parameters, if applicable.
1692 	 */
1693 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
1694 	if (mechanism->cm_type % 3 == 2) {
1695 		if (mechanism->cm_param == NULL ||
1696 		    mechanism->cm_param_len != sizeof (ulong_t))
1697 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1698 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1699 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1700 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1701 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1702 	}
1703 
1704 	if (ret != CRYPTO_SUCCESS) {
1705 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1706 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1707 		ctx->cc_provider_private = NULL;
1708 	}
1709 
1710 	return (ret);
1711 }
1712 
1713 /* ARGSUSED */
1714 static int
1715 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1716     crypto_req_handle_t req)
1717 {
1718 	int ret = CRYPTO_SUCCESS;
1719 
1720 	ASSERT(ctx->cc_provider_private != NULL);
1721 
1722 	/*
1723 	 * Do a SHA2 update of the inner context using the specified
1724 	 * data.
1725 	 */
1726 	switch (data->cd_format) {
1727 	case CRYPTO_DATA_RAW:
1728 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1729 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1730 		    data->cd_length);
1731 		break;
1732 	case CRYPTO_DATA_UIO:
1733 		ret = sha2_digest_update_uio(
1734 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1735 		break;
1736 	case CRYPTO_DATA_MBLK:
1737 		ret = sha2_digest_update_mblk(
1738 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1739 		break;
1740 	default:
1741 		ret = CRYPTO_ARGUMENTS_BAD;
1742 	}
1743 
1744 	return (ret);
1745 }
1746 
1747 /* ARGSUSED */
1748 static int
1749 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1750 {
1751 	int ret = CRYPTO_SUCCESS;
1752 	uchar_t digest[SHA512_DIGEST_LENGTH];
1753 	uint32_t digest_len, sha_digest_len;
1754 
1755 	ASSERT(ctx->cc_provider_private != NULL);
1756 
1757 	/* Set the digest lengths to values approriate to the mechanism */
1758 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1759 	case SHA256_HMAC_MECH_INFO_TYPE:
1760 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1761 		break;
1762 	case SHA384_HMAC_MECH_INFO_TYPE:
1763 	case SHA512_HMAC_MECH_INFO_TYPE:
1764 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1765 		break;
1766 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1767 		sha_digest_len = SHA256_DIGEST_LENGTH;
1768 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1769 		break;
1770 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1771 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1772 		sha_digest_len = SHA512_DIGEST_LENGTH;
1773 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1774 		break;
1775 	}
1776 
1777 	/*
1778 	 * We need to just return the length needed to store the output.
1779 	 * We should not destroy the context for the following cases.
1780 	 */
1781 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1782 		mac->cd_length = digest_len;
1783 		return (CRYPTO_BUFFER_TOO_SMALL);
1784 	}
1785 
1786 	/*
1787 	 * Do a SHA2 final on the inner context.
1788 	 */
1789 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1790 
1791 	/*
1792 	 * Do a SHA2 update on the outer context, feeding the inner
1793 	 * digest as data.
1794 	 */
1795 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1796 	    sha_digest_len);
1797 
1798 	/*
1799 	 * Do a SHA2 final on the outer context, storing the computing
1800 	 * digest in the users buffer.
1801 	 */
1802 	switch (mac->cd_format) {
1803 	case CRYPTO_DATA_RAW:
1804 		if (digest_len != sha_digest_len) {
1805 			/*
1806 			 * The caller requested a short digest. Digest
1807 			 * into a scratch buffer and return to
1808 			 * the user only what was requested.
1809 			 */
1810 			SHA2Final(digest,
1811 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1812 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1813 			    mac->cd_offset, digest_len);
1814 		} else {
1815 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1816 			    mac->cd_offset,
1817 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1818 		}
1819 		break;
1820 	case CRYPTO_DATA_UIO:
1821 		ret = sha2_digest_final_uio(
1822 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1823 		    digest_len, digest);
1824 		break;
1825 	case CRYPTO_DATA_MBLK:
1826 		ret = sha2_digest_final_mblk(
1827 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1828 		    digest_len, digest);
1829 		break;
1830 	default:
1831 		ret = CRYPTO_ARGUMENTS_BAD;
1832 	}
1833 
1834 	if (ret == CRYPTO_SUCCESS) {
1835 		mac->cd_length = digest_len;
1836 	} else {
1837 		/*
1838 		 * Only bzero outer context on failure, since SHA2Final()
1839 		 * does it for us.
1840 		 * We don't have to bzero the inner context since we
1841 		 * always invoke a SHA2Final() on it.
1842 		 */
1843 		bzero(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext,
1844 		    sizeof (SHA2_CTX));
1845 		mac->cd_length = 0;
1846 	}
1847 
1848 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1849 	ctx->cc_provider_private = NULL;
1850 
1851 	return (ret);
1852 }
1853 
1854 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1855 	switch (data->cd_format) {					\
1856 	case CRYPTO_DATA_RAW:						\
1857 		SHA2Update(&(ctx).hc_icontext,				\
1858 		    (uint8_t *)data->cd_raw.iov_base +			\
1859 		    data->cd_offset, data->cd_length);			\
1860 		break;							\
1861 	case CRYPTO_DATA_UIO:						\
1862 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1863 		break;							\
1864 	case CRYPTO_DATA_MBLK:						\
1865 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1866 		    data);						\
1867 		break;							\
1868 	default:							\
1869 		ret = CRYPTO_ARGUMENTS_BAD;				\
1870 	}								\
1871 }
1872 
1873 /* ARGSUSED */
1874 static int
1875 sha2_mac_atomic(crypto_provider_handle_t provider,
1876     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1877     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1878     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1879 {
1880 	int ret = CRYPTO_SUCCESS;
1881 	uchar_t digest[SHA512_DIGEST_LENGTH];
1882 	sha2_hmac_ctx_t sha2_hmac_ctx;
1883 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1884 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1885 
1886 	/*
1887 	 * Set the digest length and block size to values approriate to the
1888 	 * mechanism
1889 	 */
1890 	switch (mechanism->cm_type) {
1891 	case SHA256_HMAC_MECH_INFO_TYPE:
1892 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1893 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1894 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1895 		break;
1896 	case SHA384_HMAC_MECH_INFO_TYPE:
1897 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1898 	case SHA512_HMAC_MECH_INFO_TYPE:
1899 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1900 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1901 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1902 		break;
1903 	default:
1904 		return (CRYPTO_MECHANISM_INVALID);
1905 	}
1906 
1907 	/* Add support for key by attributes (RFE 4706552) */
1908 	if (key->ck_format != CRYPTO_KEY_RAW)
1909 		return (CRYPTO_ARGUMENTS_BAD);
1910 
1911 	if (ctx_template != NULL) {
1912 		/* reuse context template */
1913 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1914 	} else {
1915 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1916 		/* no context template, initialize context */
1917 		if (keylen_in_bytes > sha_hmac_block_size) {
1918 			/*
1919 			 * Hash the passed-in key to get a smaller key.
1920 			 * The inner context is used since it hasn't been
1921 			 * initialized yet.
1922 			 */
1923 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1924 			    &sha2_hmac_ctx.hc_icontext,
1925 			    key->ck_data, keylen_in_bytes, digest);
1926 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1927 			    sha_digest_len);
1928 		} else {
1929 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1930 			    keylen_in_bytes);
1931 		}
1932 	}
1933 
1934 	/* get the mechanism parameters, if applicable */
1935 	if ((mechanism->cm_type % 3) == 2) {
1936 		if (mechanism->cm_param == NULL ||
1937 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1938 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1939 			goto bail;
1940 		}
1941 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1942 		if (digest_len > sha_digest_len) {
1943 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1944 			goto bail;
1945 		}
1946 	}
1947 
1948 	/* do a SHA2 update of the inner context using the specified data */
1949 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1950 	if (ret != CRYPTO_SUCCESS)
1951 		/* the update failed, free context and bail */
1952 		goto bail;
1953 
1954 	/*
1955 	 * Do a SHA2 final on the inner context.
1956 	 */
1957 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1958 
1959 	/*
1960 	 * Do an SHA2 update on the outer context, feeding the inner
1961 	 * digest as data.
1962 	 *
1963 	 * Make sure that SHA384 is handled special because
1964 	 * it cannot feed a 60-byte inner hash to the outer
1965 	 */
1966 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1967 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1968 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1969 		    SHA384_DIGEST_LENGTH);
1970 	else
1971 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1972 
1973 	/*
1974 	 * Do a SHA2 final on the outer context, storing the computed
1975 	 * digest in the users buffer.
1976 	 */
1977 	switch (mac->cd_format) {
1978 	case CRYPTO_DATA_RAW:
1979 		if (digest_len != sha_digest_len) {
1980 			/*
1981 			 * The caller requested a short digest. Digest
1982 			 * into a scratch buffer and return to
1983 			 * the user only what was requested.
1984 			 */
1985 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1986 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1987 			    mac->cd_offset, digest_len);
1988 		} else {
1989 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1990 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1991 		}
1992 		break;
1993 	case CRYPTO_DATA_UIO:
1994 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1995 		    digest_len, digest);
1996 		break;
1997 	case CRYPTO_DATA_MBLK:
1998 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1999 		    digest_len, digest);
2000 		break;
2001 	default:
2002 		ret = CRYPTO_ARGUMENTS_BAD;
2003 	}
2004 
2005 	if (ret == CRYPTO_SUCCESS) {
2006 		mac->cd_length = digest_len;
2007 	} else {
2008 		/*
2009 		 * Only bzero outer context on failure, since SHA2Final()
2010 		 * does it for us.
2011 		 * We don't have to bzero the inner context since we
2012 		 * always invoke a SHA2Final() on it.
2013 		 */
2014 		bzero(&sha2_hmac_ctx.hc_ocontext, sizeof (SHA2_CTX));
2015 		mac->cd_length = 0;
2016 	}
2017 
2018 	return (ret);
2019 bail:
2020 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2021 	mac->cd_length = 0;
2022 	return (ret);
2023 }
2024 
2025 /* ARGSUSED */
2026 static int
2027 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
2028     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
2029     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
2030     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
2031 {
2032 	int ret = CRYPTO_SUCCESS;
2033 	uchar_t digest[SHA512_DIGEST_LENGTH];
2034 	sha2_hmac_ctx_t sha2_hmac_ctx;
2035 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
2036 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2037 
2038 	/*
2039 	 * Set the digest length and block size to values approriate to the
2040 	 * mechanism
2041 	 */
2042 	switch (mechanism->cm_type) {
2043 	case SHA256_HMAC_MECH_INFO_TYPE:
2044 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2045 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
2046 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2047 		break;
2048 	case SHA384_HMAC_MECH_INFO_TYPE:
2049 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2050 	case SHA512_HMAC_MECH_INFO_TYPE:
2051 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2052 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
2053 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2054 		break;
2055 	default:
2056 		return (CRYPTO_MECHANISM_INVALID);
2057 	}
2058 
2059 	/* Add support for key by attributes (RFE 4706552) */
2060 	if (key->ck_format != CRYPTO_KEY_RAW)
2061 		return (CRYPTO_ARGUMENTS_BAD);
2062 
2063 	if (ctx_template != NULL) {
2064 		/* reuse context template */
2065 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2066 	} else {
2067 		/* no context template, initialize context */
2068 		if (keylen_in_bytes > sha_hmac_block_size) {
2069 			/*
2070 			 * Hash the passed-in key to get a smaller key.
2071 			 * The inner context is used since it hasn't been
2072 			 * initialized yet.
2073 			 */
2074 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2075 			    &sha2_hmac_ctx.hc_icontext,
2076 			    key->ck_data, keylen_in_bytes, digest);
2077 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
2078 			    sha_digest_len);
2079 		} else {
2080 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
2081 			    keylen_in_bytes);
2082 		}
2083 	}
2084 
2085 	/* get the mechanism parameters, if applicable */
2086 	if (mechanism->cm_type % 3 == 2) {
2087 		if (mechanism->cm_param == NULL ||
2088 		    mechanism->cm_param_len != sizeof (ulong_t)) {
2089 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2090 			goto bail;
2091 		}
2092 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
2093 		if (digest_len > sha_digest_len) {
2094 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2095 			goto bail;
2096 		}
2097 	}
2098 
2099 	if (mac->cd_length != digest_len) {
2100 		ret = CRYPTO_INVALID_MAC;
2101 		goto bail;
2102 	}
2103 
2104 	/* do a SHA2 update of the inner context using the specified data */
2105 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
2106 	if (ret != CRYPTO_SUCCESS)
2107 		/* the update failed, free context and bail */
2108 		goto bail;
2109 
2110 	/* do a SHA2 final on the inner context */
2111 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
2112 
2113 	/*
2114 	 * Do an SHA2 update on the outer context, feeding the inner
2115 	 * digest as data.
2116 	 */
2117 	SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
2118 
2119 	/*
2120 	 * Do a SHA2 final on the outer context, storing the computed
2121 	 * digest in the users buffer.
2122 	 */
2123 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
2124 
2125 	/*
2126 	 * Compare the computed digest against the expected digest passed
2127 	 * as argument.
2128 	 */
2129 
2130 	switch (mac->cd_format) {
2131 
2132 	case CRYPTO_DATA_RAW:
2133 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
2134 		    mac->cd_offset, digest_len) != 0)
2135 			ret = CRYPTO_INVALID_MAC;
2136 		break;
2137 
2138 	case CRYPTO_DATA_UIO: {
2139 		off_t offset = mac->cd_offset;
2140 		uint_t vec_idx;
2141 		off_t scratch_offset = 0;
2142 		size_t length = digest_len;
2143 		size_t cur_len;
2144 
2145 		/* we support only kernel buffer */
2146 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
2147 			return (CRYPTO_ARGUMENTS_BAD);
2148 
2149 		/* jump to the first iovec containing the expected digest */
2150 		for (vec_idx = 0;
2151 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
2152 		    vec_idx < mac->cd_uio->uio_iovcnt;
2153 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len);
2154 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
2155 			/*
2156 			 * The caller specified an offset that is
2157 			 * larger than the total size of the buffers
2158 			 * it provided.
2159 			 */
2160 			ret = CRYPTO_DATA_LEN_RANGE;
2161 			break;
2162 		}
2163 
2164 		/* do the comparison of computed digest vs specified one */
2165 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
2166 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
2167 			    offset, length);
2168 
2169 			if (bcmp(digest + scratch_offset,
2170 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
2171 			    cur_len) != 0) {
2172 				ret = CRYPTO_INVALID_MAC;
2173 				break;
2174 			}
2175 
2176 			length -= cur_len;
2177 			vec_idx++;
2178 			scratch_offset += cur_len;
2179 			offset = 0;
2180 		}
2181 		break;
2182 	}
2183 
2184 	case CRYPTO_DATA_MBLK: {
2185 		off_t offset = mac->cd_offset;
2186 		mblk_t *mp;
2187 		off_t scratch_offset = 0;
2188 		size_t length = digest_len;
2189 		size_t cur_len;
2190 
2191 		/* jump to the first mblk_t containing the expected digest */
2192 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
2193 		    offset -= MBLKL(mp), mp = mp->b_cont);
2194 		if (mp == NULL) {
2195 			/*
2196 			 * The caller specified an offset that is larger than
2197 			 * the total size of the buffers it provided.
2198 			 */
2199 			ret = CRYPTO_DATA_LEN_RANGE;
2200 			break;
2201 		}
2202 
2203 		while (mp != NULL && length > 0) {
2204 			cur_len = MIN(MBLKL(mp) - offset, length);
2205 			if (bcmp(digest + scratch_offset,
2206 			    mp->b_rptr + offset, cur_len) != 0) {
2207 				ret = CRYPTO_INVALID_MAC;
2208 				break;
2209 			}
2210 
2211 			length -= cur_len;
2212 			mp = mp->b_cont;
2213 			scratch_offset += cur_len;
2214 			offset = 0;
2215 		}
2216 		break;
2217 	}
2218 
2219 	default:
2220 		ret = CRYPTO_ARGUMENTS_BAD;
2221 	}
2222 
2223 	return (ret);
2224 bail:
2225 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2226 	mac->cd_length = 0;
2227 	return (ret);
2228 }
2229 
2230 /*
2231  * KCF software provider context management entry points.
2232  */
2233 
2234 /* ARGSUSED */
2235 static int
2236 sha2_create_ctx_template(crypto_provider_handle_t provider,
2237     crypto_mechanism_t *mechanism, crypto_key_t *key,
2238     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
2239     crypto_req_handle_t req)
2240 {
2241 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
2242 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2243 	uint32_t sha_digest_len, sha_hmac_block_size;
2244 
2245 	/*
2246 	 * Set the digest length and block size to values approriate to the
2247 	 * mechanism
2248 	 */
2249 	switch (mechanism->cm_type) {
2250 	case SHA256_HMAC_MECH_INFO_TYPE:
2251 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2252 		sha_digest_len = SHA256_DIGEST_LENGTH;
2253 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2254 		break;
2255 	case SHA384_HMAC_MECH_INFO_TYPE:
2256 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2257 	case SHA512_HMAC_MECH_INFO_TYPE:
2258 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2259 		sha_digest_len = SHA512_DIGEST_LENGTH;
2260 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2261 		break;
2262 	default:
2263 		return (CRYPTO_MECHANISM_INVALID);
2264 	}
2265 
2266 	/* Add support for key by attributes (RFE 4706552) */
2267 	if (key->ck_format != CRYPTO_KEY_RAW)
2268 		return (CRYPTO_ARGUMENTS_BAD);
2269 
2270 	/*
2271 	 * Allocate and initialize SHA2 context.
2272 	 */
2273 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
2274 	    crypto_kmflag(req));
2275 	if (sha2_hmac_ctx_tmpl == NULL)
2276 		return (CRYPTO_HOST_MEMORY);
2277 
2278 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
2279 
2280 	if (keylen_in_bytes > sha_hmac_block_size) {
2281 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
2282 
2283 		/*
2284 		 * Hash the passed-in key to get a smaller key.
2285 		 * The inner context is used since it hasn't been
2286 		 * initialized yet.
2287 		 */
2288 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2289 		    &sha2_hmac_ctx_tmpl->hc_icontext,
2290 		    key->ck_data, keylen_in_bytes, digested_key);
2291 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
2292 		    sha_digest_len);
2293 	} else {
2294 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
2295 		    keylen_in_bytes);
2296 	}
2297 
2298 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
2299 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
2300 
2301 	return (CRYPTO_SUCCESS);
2302 }
2303 
2304 static int
2305 sha2_free_context(crypto_ctx_t *ctx)
2306 {
2307 	uint_t ctx_len;
2308 
2309 	if (ctx->cc_provider_private == NULL)
2310 		return (CRYPTO_SUCCESS);
2311 
2312 	/*
2313 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
2314 	 * have different lengths.
2315 	 *
2316 	 * Note: Below is dependent on the mechanism ordering.
2317 	 */
2318 
2319 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
2320 		ctx_len = sizeof (sha2_ctx_t);
2321 	else
2322 		ctx_len = sizeof (sha2_hmac_ctx_t);
2323 
2324 	bzero(ctx->cc_provider_private, ctx_len);
2325 	kmem_free(ctx->cc_provider_private, ctx_len);
2326 	ctx->cc_provider_private = NULL;
2327 
2328 	return (CRYPTO_SUCCESS);
2329 }
2330 
2331 #endif /* _KERNEL */
2332 
2333 void
2334 SHA2Init(uint64_t mech, SHA2_CTX *ctx)
2335 {
2336 
2337 	switch (mech) {
2338 	case SHA256_MECH_INFO_TYPE:
2339 	case SHA256_HMAC_MECH_INFO_TYPE:
2340 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2341 		ctx->state.s32[0] = 0x6a09e667U;
2342 		ctx->state.s32[1] = 0xbb67ae85U;
2343 		ctx->state.s32[2] = 0x3c6ef372U;
2344 		ctx->state.s32[3] = 0xa54ff53aU;
2345 		ctx->state.s32[4] = 0x510e527fU;
2346 		ctx->state.s32[5] = 0x9b05688cU;
2347 		ctx->state.s32[6] = 0x1f83d9abU;
2348 		ctx->state.s32[7] = 0x5be0cd19U;
2349 		break;
2350 	case SHA384_MECH_INFO_TYPE:
2351 	case SHA384_HMAC_MECH_INFO_TYPE:
2352 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2353 		ctx->state.s64[0] = 0xcbbb9d5dc1059ed8ULL;
2354 		ctx->state.s64[1] = 0x629a292a367cd507ULL;
2355 		ctx->state.s64[2] = 0x9159015a3070dd17ULL;
2356 		ctx->state.s64[3] = 0x152fecd8f70e5939ULL;
2357 		ctx->state.s64[4] = 0x67332667ffc00b31ULL;
2358 		ctx->state.s64[5] = 0x8eb44a8768581511ULL;
2359 		ctx->state.s64[6] = 0xdb0c2e0d64f98fa7ULL;
2360 		ctx->state.s64[7] = 0x47b5481dbefa4fa4ULL;
2361 		break;
2362 	case SHA512_MECH_INFO_TYPE:
2363 	case SHA512_HMAC_MECH_INFO_TYPE:
2364 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2365 		ctx->state.s64[0] = 0x6a09e667f3bcc908ULL;
2366 		ctx->state.s64[1] = 0xbb67ae8584caa73bULL;
2367 		ctx->state.s64[2] = 0x3c6ef372fe94f82bULL;
2368 		ctx->state.s64[3] = 0xa54ff53a5f1d36f1ULL;
2369 		ctx->state.s64[4] = 0x510e527fade682d1ULL;
2370 		ctx->state.s64[5] = 0x9b05688c2b3e6c1fULL;
2371 		ctx->state.s64[6] = 0x1f83d9abfb41bd6bULL;
2372 		ctx->state.s64[7] = 0x5be0cd19137e2179ULL;
2373 		break;
2374 #ifdef _KERNEL
2375 	default:
2376 		cmn_err(CE_WARN, "sha2_init: "
2377 		    "failed to find a supported algorithm: 0x%x",
2378 		    (uint32_t)mech);
2379 
2380 #endif /* _KERNEL */
2381 	}
2382 
2383 	ctx->algotype = mech;
2384 	ctx->count.c64[0] = ctx->count.c64[1] = 0;
2385 }
2386 
2387 /*
2388  * SHA2Update()
2389  *
2390  * purpose: continues an sha2 digest operation, using the message block
2391  *          to update the context.
2392  *   input: SHA2_CTX *	: the context to update
2393  *          uint8_t *	: the message block
2394  *          uint32_t    : the length of the message block in bytes
2395  *  output: void
2396  */
2397 
2398 void
2399 SHA2Update(SHA2_CTX *ctx, const uint8_t *input, uint32_t input_len)
2400 {
2401 	uint32_t i, buf_index, buf_len, buf_limit;
2402 
2403 	/* check for noop */
2404 	if (input_len == 0)
2405 		return;
2406 
2407 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2408 		buf_limit = 64;
2409 
2410 		/* compute number of bytes mod 64 */
2411 		buf_index = (ctx->count.c32[1] >> 3) & 0x3F;
2412 
2413 		/* update number of bits */
2414 		if ((ctx->count.c32[1] += (input_len << 3)) < (input_len << 3))
2415 			ctx->count.c32[0]++;
2416 
2417 		ctx->count.c32[0] += (input_len >> 29);
2418 
2419 	} else {
2420 		buf_limit = 128;
2421 
2422 		/* compute number of bytes mod 128 */
2423 		buf_index = (ctx->count.c64[1] >> 3) & 0x7F;
2424 
2425 		/* update number of bits */
2426 		if ((ctx->count.c64[1] += (input_len << 3)) < (input_len << 3))
2427 			ctx->count.c64[0]++;
2428 
2429 		ctx->count.c64[0] += (input_len >> 29);
2430 	}
2431 
2432 	buf_len = buf_limit - buf_index;
2433 
2434 	/* transform as many times as possible */
2435 	i = 0;
2436 	if (input_len >= buf_len) {
2437 
2438 		/*
2439 		 * general optimization:
2440 		 *
2441 		 * only do initial bcopy() and SHA2Transform() if
2442 		 * buf_index != 0.  if buf_index == 0, we're just
2443 		 * wasting our time doing the bcopy() since there
2444 		 * wasn't any data left over from a previous call to
2445 		 * SHA2Update().
2446 		 */
2447 		if (buf_index) {
2448 			bcopy(input, &ctx->buf_un.buf8[buf_index], buf_len);
2449 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2450 				SHA256Transform(ctx, ctx->buf_un.buf8);
2451 			else
2452 				SHA512Transform(ctx, ctx->buf_un.buf8);
2453 
2454 			i = buf_len;
2455 		}
2456 
2457 
2458 		for (; i + buf_limit - 1 < input_len; i += buf_limit) {
2459 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2460 				SHA256Transform(ctx, &input[i]);
2461 			else
2462 				SHA512Transform(ctx, &input[i]);
2463 		}
2464 
2465 		/*
2466 		 * general optimization:
2467 		 *
2468 		 * if i and input_len are the same, return now instead
2469 		 * of calling bcopy(), since the bcopy() in this case
2470 		 * will be an expensive nop.
2471 		 */
2472 
2473 		if (input_len == i)
2474 			return;
2475 
2476 		buf_index = 0;
2477 	}
2478 
2479 	/* buffer remaining input */
2480 	bcopy(&input[i], &ctx->buf_un.buf8[buf_index], input_len - i);
2481 }
2482 
2483 
2484 /*
2485  * SHA2Final()
2486  *
2487  * purpose: ends an sha2 digest operation, finalizing the message digest and
2488  *          zeroing the context.
2489  *   input: uint8_t *	: a buffer to store the digest in
2490  *          SHA2_CTX *  : the context to finalize, save, and zero
2491  *  output: void
2492  */
2493 
2494 
2495 void
2496 SHA2Final(uint8_t *digest, SHA2_CTX *ctx)
2497 {
2498 	uint8_t		bitcount_be[sizeof (ctx->count.c32)];
2499 	uint8_t		bitcount_be64[sizeof (ctx->count.c64)];
2500 	uint32_t	index;
2501 
2502 
2503 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2504 		index  = (ctx->count.c32[1] >> 3) & 0x3f;
2505 		Encode(bitcount_be, ctx->count.c32, sizeof (bitcount_be));
2506 		SHA2Update(ctx, PADDING, ((index < 56) ? 56 : 120) - index);
2507 		SHA2Update(ctx, bitcount_be, sizeof (bitcount_be));
2508 		Encode(digest, ctx->state.s32, sizeof (ctx->state.s32));
2509 
2510 	} else {
2511 		index  = (ctx->count.c64[1] >> 3) & 0x7f;
2512 		Encode64(bitcount_be64, ctx->count.c64,
2513 		    sizeof (bitcount_be64));
2514 		SHA2Update(ctx, PADDING, ((index < 112) ? 112 : 240) - index);
2515 		SHA2Update(ctx, bitcount_be64, sizeof (bitcount_be64));
2516 		if (ctx->algotype <= SHA384_HMAC_GEN_MECH_INFO_TYPE) {
2517 			ctx->state.s64[6] = ctx->state.s64[7] = 0;
2518 			Encode64(digest, ctx->state.s64,
2519 			    sizeof (uint64_t) * 6);
2520 		} else
2521 			Encode64(digest, ctx->state.s64,
2522 			    sizeof (ctx->state.s64));
2523 	}
2524 }
2525