xref: /illumos-gate/usr/src/common/crypto/sha2/sha2.c (revision 032624d56c174c5c55126582b32e314a6af15522)
1 /*
2  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
3  * Use is subject to license terms.
4  */
5 
6 #pragma ident	"%Z%%M%	%I%	%E% SMI"
7 
8 
9 /*
10  * The basic framework for this code came from the reference
11  * implementation for MD5.  That implementation is Copyright (C)
12  * 1991-2, RSA Data Security, Inc. Created 1991. All rights reserved.
13  *
14  * License to copy and use this software is granted provided that it
15  * is identified as the "RSA Data Security, Inc. MD5 Message-Digest
16  * Algorithm" in all material mentioning or referencing this software
17  * or this function.
18  *
19  * License is also granted to make and use derivative works provided
20  * that such works are identified as "derived from the RSA Data
21  * Security, Inc. MD5 Message-Digest Algorithm" in all material
22  * mentioning or referencing the derived work.
23  *
24  * RSA Data Security, Inc. makes no representations concerning either
25  * the merchantability of this software or the suitability of this
26  * software for any particular purpose. It is provided "as is"
27  * without express or implied warranty of any kind.
28  *
29  * These notices must be retained in any copies of any part of this
30  * documentation and/or software.
31  *
32  * NOTE: Cleaned-up and optimized, version of SHA2, based on the FIPS 180-2
33  * standard, available at http://www.itl.nist.gov/div897/pubs/fip180-2.htm
34  * Not as fast as one would like -- further optimizations are encouraged
35  * and appreciated.
36  */
37 
38 #include <sys/types.h>
39 #include <sys/param.h>
40 #include <sys/systm.h>
41 #include <sys/sysmacros.h>
42 #include <sys/sha2.h>
43 #include <sys/sha2_consts.h>
44 
45 #ifdef _KERNEL
46 
47 #include <sys/modctl.h>
48 #include <sys/cmn_err.h>
49 #include <sys/crypto/common.h>
50 #include <sys/crypto/spi.h>
51 #include <sys/strsun.h>
52 
53 /*
54  * The sha2 module is created with two modlinkages:
55  * - a modlmisc that allows consumers to directly call the entry points
56  *   SHA2Init, SHA2Update, and SHA2Final.
57  * - a modlcrypto that allows the module to register with the Kernel
58  *   Cryptographic Framework (KCF) as a software provider for the SHA2
59  *   mechanisms.
60  */
61 
62 #else
63 
64 #include <strings.h>
65 #include <stdlib.h>
66 #include <errno.h>
67 
68 
69 #endif	/* !_KERNEL */
70 
71 static void Encode(uint8_t *, uint32_t *, size_t);
72 static void Encode64(uint8_t *, uint64_t *, size_t);
73 static void SHA256Transform(SHA2_CTX *, const uint8_t *);
74 static void SHA512Transform(SHA2_CTX *, const uint8_t *);
75 
76 static uint8_t PADDING[128] = { 0x80, /* all zeros */ };
77 
78 /* Ch and Maj are the basic SHA2 functions. */
79 #define	Ch(b, c, d)	(((b) & (c)) ^ ((~b) & (d)))
80 #define	Maj(b, c, d)	(((b) & (c)) ^ ((b) & (d)) ^ ((c) & (d)))
81 
82 /* Rotates x right n bits. */
83 #define	ROTR(x, n)	\
84 	(((x) >> (n)) | ((x) << ((sizeof (x) * NBBY)-(n))))
85 
86 /* Shift x right n bits */
87 #define	SHR(x, n)	((x) >> (n))
88 
89 /* SHA256 Functions */
90 #define	BIGSIGMA0_256(x)	(ROTR((x), 2) ^ ROTR((x), 13) ^ ROTR((x), 22))
91 #define	BIGSIGMA1_256(x)	(ROTR((x), 6) ^ ROTR((x), 11) ^ ROTR((x), 25))
92 #define	SIGMA0_256(x)		(ROTR((x), 7) ^ ROTR((x), 18) ^ SHR((x), 3))
93 #define	SIGMA1_256(x)		(ROTR((x), 17) ^ ROTR((x), 19) ^ SHR((x), 10))
94 
95 #define	SHA256ROUND(a, b, c, d, e, f, g, h, i, w)			\
96 	T1 = h + BIGSIGMA1_256(e) + Ch(e, f, g) + SHA256_CONST(i) + w;	\
97 	d += T1;							\
98 	T2 = BIGSIGMA0_256(a) + Maj(a, b, c);				\
99 	h = T1 + T2
100 
101 /* SHA384/512 Functions */
102 #define	BIGSIGMA0(x)	(ROTR((x), 28) ^ ROTR((x), 34) ^ ROTR((x), 39))
103 #define	BIGSIGMA1(x)	(ROTR((x), 14) ^ ROTR((x), 18) ^ ROTR((x), 41))
104 #define	SIGMA0(x)	(ROTR((x), 1) ^ ROTR((x), 8) ^ SHR((x), 7))
105 #define	SIGMA1(x)	(ROTR((x), 19) ^ ROTR((x), 61) ^ SHR((x), 6))
106 #define	SHA512ROUND(a, b, c, d, e, f, g, h, i, w)			\
107 	T1 = h + BIGSIGMA1(e) + Ch(e, f, g) + SHA512_CONST(i) + w;	\
108 	d += T1;							\
109 	T2 = BIGSIGMA0(a) + Maj(a, b, c);				\
110 	h = T1 + T2
111 
112 #ifdef _KERNEL
113 
114 static struct modlmisc modlmisc = {
115 	&mod_miscops,
116 	"SHA2 Message-Digest Algorithm"
117 };
118 
119 static struct modlcrypto modlcrypto = {
120 	&mod_cryptoops,
121 	"SHA2 Kernel SW Provider %I%"
122 };
123 
124 static struct modlinkage modlinkage = {
125 	MODREV_1, &modlmisc, &modlcrypto, NULL
126 };
127 
128 /*
129  * CSPI information (entry points, provider info, etc.)
130  */
131 
132 #endif /* _KERNEL */
133 
134 /*
135  * List of support mechanisms in this module.
136  *
137  * It is important to note that in the module, division or modulus calculations
138  * are used on the enumerated type to determine which mechanism is being used;
139  * therefore, changing the order or additional mechanisms should be done
140  * carefully
141  */
142 typedef enum sha2_mech_type {
143 	SHA256_MECH_INFO_TYPE,		/* SUN_CKM_SHA256 */
144 	SHA256_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC */
145 	SHA256_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC_GENERAL */
146 	SHA384_MECH_INFO_TYPE,		/* SUN_CKM_SHA384 */
147 	SHA384_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC */
148 	SHA384_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC_GENERAL */
149 	SHA512_MECH_INFO_TYPE,		/* SUN_CKM_SHA512 */
150 	SHA512_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA512_HMAC */
151 	SHA512_HMAC_GEN_MECH_INFO_TYPE	/* SUN_CKM_SHA512_HMAC_GENERAL */
152 } sha2_mech_type_t;
153 
154 #ifdef _KERNEL
155 
156 #define	SHA2_HMAC_MIN_KEY_LEN	8	/* SHA2-HMAC min key length in bits */
157 #define	SHA2_HMAC_MAX_KEY_LEN	INT_MAX /* SHA2-HMAC max key length in bits */
158 
159 #define	SHA256_DIGEST_LENGTH	32	/* SHA256 digest length in bytes */
160 #define	SHA384_DIGEST_LENGTH	48	/* SHA384 digest length in bytes */
161 #define	SHA512_DIGEST_LENGTH	64	/* SHA512 digest length in bytes */
162 
163 #define	SHA256_HMAC_BLOCK_SIZE	64	/* SHA256-HMAC block size */
164 #define	SHA512_HMAC_BLOCK_SIZE	128	/* SHA512-HMAC block size */
165 
166 /*
167  * Context for SHA2 mechanism.
168  */
169 typedef struct sha2_ctx {
170 	sha2_mech_type_t	sc_mech_type;	/* type of context */
171 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
172 } sha2_ctx_t;
173 
174 /*
175  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
176  */
177 typedef struct sha2_hmac_ctx {
178 	sha2_mech_type_t	hc_mech_type;	/* type of context */
179 	uint32_t		hc_digest_len;	/* digest len in bytes */
180 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
181 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
182 } sha2_hmac_ctx_t;
183 
184 /*
185  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
186  * by KCF to one of the entry points.
187  */
188 
189 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
190 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
191 
192 /* to extract the digest length passed as mechanism parameter */
193 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
194 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
195 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
196 	else {								\
197 		ulong_t tmp_ulong;					\
198 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
199 		(len) = (uint32_t)tmp_ulong;				\
200 	}								\
201 }
202 
203 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
204 	SHA2Init(mech, ctx);				\
205 	SHA2Update(ctx, key, len);			\
206 	SHA2Final(digest, ctx);				\
207 }
208 
209 /*
210  * Mechanism info structure passed to KCF during registration.
211  */
212 static crypto_mech_info_t sha2_mech_info_tab[] = {
213 	/* SHA256 */
214 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
215 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
216 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
217 	/* SHA256-HMAC */
218 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
219 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
220 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
221 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
222 	/* SHA256-HMAC GENERAL */
223 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
224 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
225 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
226 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
227 	/* SHA384 */
228 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
229 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
230 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
231 	/* SHA384-HMAC */
232 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
233 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
234 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
235 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
236 	/* SHA384-HMAC GENERAL */
237 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
238 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
239 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
240 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
241 	/* SHA512 */
242 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
243 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
244 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
245 	/* SHA512-HMAC */
246 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
247 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
248 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
249 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
250 	/* SHA512-HMAC GENERAL */
251 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
252 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
253 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
254 	    CRYPTO_KEYSIZE_UNIT_IN_BITS}
255 };
256 
257 void SHA2Init(uint64_t, SHA2_CTX *);
258 void SHA2Update(SHA2_CTX *, const uint8_t *, uint32_t);
259 void SHA2Final(uint8_t *, SHA2_CTX *);
260 
261 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
262 
263 static crypto_control_ops_t sha2_control_ops = {
264 	sha2_provider_status
265 };
266 
267 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
268     crypto_req_handle_t);
269 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
270     crypto_req_handle_t);
271 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
272     crypto_req_handle_t);
273 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
274     crypto_req_handle_t);
275 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
276     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
277     crypto_req_handle_t);
278 
279 static crypto_digest_ops_t sha2_digest_ops = {
280 	sha2_digest_init,
281 	sha2_digest,
282 	sha2_digest_update,
283 	NULL,
284 	sha2_digest_final,
285 	sha2_digest_atomic
286 };
287 
288 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
289     crypto_spi_ctx_template_t, crypto_req_handle_t);
290 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
291     crypto_req_handle_t);
292 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
293 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
294     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
295     crypto_spi_ctx_template_t, crypto_req_handle_t);
296 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
297     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
298     crypto_spi_ctx_template_t, crypto_req_handle_t);
299 
300 static crypto_mac_ops_t sha2_mac_ops = {
301 	sha2_mac_init,
302 	NULL,
303 	sha2_mac_update,
304 	sha2_mac_final,
305 	sha2_mac_atomic,
306 	sha2_mac_verify_atomic
307 };
308 
309 static int sha2_create_ctx_template(crypto_provider_handle_t,
310     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
311     size_t *, crypto_req_handle_t);
312 static int sha2_free_context(crypto_ctx_t *);
313 
314 static crypto_ctx_ops_t sha2_ctx_ops = {
315 	sha2_create_ctx_template,
316 	sha2_free_context
317 };
318 
319 static crypto_ops_t sha2_crypto_ops = {
320 	&sha2_control_ops,
321 	&sha2_digest_ops,
322 	NULL,
323 	&sha2_mac_ops,
324 	NULL,
325 	NULL,
326 	NULL,
327 	NULL,
328 	NULL,
329 	NULL,
330 	NULL,
331 	NULL,
332 	NULL,
333 	&sha2_ctx_ops
334 };
335 
336 static crypto_provider_info_t sha2_prov_info = {
337 	CRYPTO_SPI_VERSION_1,
338 	"SHA2 Software Provider",
339 	CRYPTO_SW_PROVIDER,
340 	{&modlinkage},
341 	NULL,
342 	&sha2_crypto_ops,
343 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
344 	sha2_mech_info_tab
345 };
346 
347 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
348 
349 int
350 _init()
351 {
352 	int ret;
353 
354 	if ((ret = mod_install(&modlinkage)) != 0)
355 		return (ret);
356 
357 	/*
358 	 * Register with KCF. If the registration fails, log an
359 	 * error but do not uninstall the module, since the functionality
360 	 * provided by misc/sha2 should still be available.
361 	 */
362 	if ((ret = crypto_register_provider(&sha2_prov_info,
363 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
364 		cmn_err(CE_WARN, "sha2 _init: "
365 		    "crypto_register_provider() failed (0x%x)", ret);
366 
367 	return (0);
368 }
369 
370 int
371 _info(struct modinfo *modinfop)
372 {
373 	return (mod_info(&modlinkage, modinfop));
374 }
375 
376 #endif /* _KERNEL */
377 
378 
379 /*
380  * sparc optimization:
381  *
382  * on the sparc, we can load big endian 32-bit data easily.  note that
383  * special care must be taken to ensure the address is 32-bit aligned.
384  * in the interest of speed, we don't check to make sure, since
385  * careful programming can guarantee this for us.
386  */
387 
388 #if	defined(_BIG_ENDIAN)
389 
390 #define	LOAD_BIG_32(addr)	(*(uint32_t *)(addr))
391 
392 #else	/* little endian -- will work on big endian, but slowly */
393 
394 #define	LOAD_BIG_32(addr)	\
395 	(((addr)[0] << 24) | ((addr)[1] << 16) | ((addr)[2] << 8) | (addr)[3])
396 #endif
397 
398 
399 #if	defined(_BIG_ENDIAN)
400 
401 #define	LOAD_BIG_64(addr)	(*(uint64_t *)(addr))
402 
403 #else	/* little endian -- will work on big endian, but slowly */
404 
405 #define	LOAD_BIG_64(addr)	\
406 	(((uint64_t)(addr)[0] << 56) | ((uint64_t)(addr)[1] << 48) |	\
407 	    ((uint64_t)(addr)[2] << 40) | ((uint64_t)(addr)[3] << 32) |	\
408 	    ((uint64_t)(addr)[4] << 24) | ((uint64_t)(addr)[5] << 16) |	\
409 	    ((uint64_t)(addr)[6] << 8) | (uint64_t)(addr)[7])
410 
411 #endif
412 
413 
414 /* SHA256 Transform */
415 
416 static void
417 SHA256Transform(SHA2_CTX *ctx, const uint8_t *blk)
418 {
419 
420 	uint32_t a = ctx->state.s32[0];
421 	uint32_t b = ctx->state.s32[1];
422 	uint32_t c = ctx->state.s32[2];
423 	uint32_t d = ctx->state.s32[3];
424 	uint32_t e = ctx->state.s32[4];
425 	uint32_t f = ctx->state.s32[5];
426 	uint32_t g = ctx->state.s32[6];
427 	uint32_t h = ctx->state.s32[7];
428 
429 	uint32_t w0, w1, w2, w3, w4, w5, w6, w7;
430 	uint32_t w8, w9, w10, w11, w12, w13, w14, w15;
431 	uint32_t T1, T2;
432 
433 #if	defined(__sparc)
434 	static const uint32_t sha256_consts[] = {
435 		SHA256_CONST_0, SHA256_CONST_1, SHA256_CONST_2,
436 		SHA256_CONST_3, SHA256_CONST_4, SHA256_CONST_5,
437 		SHA256_CONST_6, SHA256_CONST_7, SHA256_CONST_8,
438 		SHA256_CONST_9, SHA256_CONST_10, SHA256_CONST_11,
439 		SHA256_CONST_12, SHA256_CONST_13, SHA256_CONST_14,
440 		SHA256_CONST_15, SHA256_CONST_16, SHA256_CONST_17,
441 		SHA256_CONST_18, SHA256_CONST_19, SHA256_CONST_20,
442 		SHA256_CONST_21, SHA256_CONST_22, SHA256_CONST_23,
443 		SHA256_CONST_24, SHA256_CONST_25, SHA256_CONST_26,
444 		SHA256_CONST_27, SHA256_CONST_28, SHA256_CONST_29,
445 		SHA256_CONST_30, SHA256_CONST_31, SHA256_CONST_32,
446 		SHA256_CONST_33, SHA256_CONST_34, SHA256_CONST_35,
447 		SHA256_CONST_36, SHA256_CONST_37, SHA256_CONST_38,
448 		SHA256_CONST_39, SHA256_CONST_40, SHA256_CONST_41,
449 		SHA256_CONST_42, SHA256_CONST_43, SHA256_CONST_44,
450 		SHA256_CONST_45, SHA256_CONST_46, SHA256_CONST_47,
451 		SHA256_CONST_48, SHA256_CONST_49, SHA256_CONST_50,
452 		SHA256_CONST_51, SHA256_CONST_52, SHA256_CONST_53,
453 		SHA256_CONST_54, SHA256_CONST_55, SHA256_CONST_56,
454 		SHA256_CONST_57, SHA256_CONST_58, SHA256_CONST_59,
455 		SHA256_CONST_60, SHA256_CONST_61, SHA256_CONST_62,
456 		SHA256_CONST_63
457 	};
458 #endif
459 
460 	if ((uintptr_t)blk & 0x3) {		/* not 4-byte aligned? */
461 		bcopy(blk, ctx->buf_un.buf32,  sizeof (ctx->buf_un.buf32));
462 		blk = (uint8_t *)ctx->buf_un.buf32;
463 	}
464 
465 	w0 =  LOAD_BIG_32(blk + 4 * 0);
466 	SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0);
467 	w1 =  LOAD_BIG_32(blk + 4 * 1);
468 	SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1);
469 	w2 =  LOAD_BIG_32(blk + 4 * 2);
470 	SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2);
471 	w3 =  LOAD_BIG_32(blk + 4 * 3);
472 	SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3);
473 	w4 =  LOAD_BIG_32(blk + 4 * 4);
474 	SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4);
475 	w5 =  LOAD_BIG_32(blk + 4 * 5);
476 	SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5);
477 	w6 =  LOAD_BIG_32(blk + 4 * 6);
478 	SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6);
479 	w7 =  LOAD_BIG_32(blk + 4 * 7);
480 	SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7);
481 	w8 =  LOAD_BIG_32(blk + 4 * 8);
482 	SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8);
483 	w9 =  LOAD_BIG_32(blk + 4 * 9);
484 	SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9);
485 	w10 =  LOAD_BIG_32(blk + 4 * 10);
486 	SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10);
487 	w11 =  LOAD_BIG_32(blk + 4 * 11);
488 	SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11);
489 	w12 =  LOAD_BIG_32(blk + 4 * 12);
490 	SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12);
491 	w13 =  LOAD_BIG_32(blk + 4 * 13);
492 	SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13);
493 	w14 =  LOAD_BIG_32(blk + 4 * 14);
494 	SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14);
495 	w15 =  LOAD_BIG_32(blk + 4 * 15);
496 	SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15);
497 
498 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
499 	SHA256ROUND(a, b, c, d, e, f, g, h, 16, w0);
500 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
501 	SHA256ROUND(h, a, b, c, d, e, f, g, 17, w1);
502 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
503 	SHA256ROUND(g, h, a, b, c, d, e, f, 18, w2);
504 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
505 	SHA256ROUND(f, g, h, a, b, c, d, e, 19, w3);
506 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
507 	SHA256ROUND(e, f, g, h, a, b, c, d, 20, w4);
508 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
509 	SHA256ROUND(d, e, f, g, h, a, b, c, 21, w5);
510 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
511 	SHA256ROUND(c, d, e, f, g, h, a, b, 22, w6);
512 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
513 	SHA256ROUND(b, c, d, e, f, g, h, a, 23, w7);
514 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
515 	SHA256ROUND(a, b, c, d, e, f, g, h, 24, w8);
516 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
517 	SHA256ROUND(h, a, b, c, d, e, f, g, 25, w9);
518 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
519 	SHA256ROUND(g, h, a, b, c, d, e, f, 26, w10);
520 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
521 	SHA256ROUND(f, g, h, a, b, c, d, e, 27, w11);
522 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
523 	SHA256ROUND(e, f, g, h, a, b, c, d, 28, w12);
524 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
525 	SHA256ROUND(d, e, f, g, h, a, b, c, 29, w13);
526 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
527 	SHA256ROUND(c, d, e, f, g, h, a, b, 30, w14);
528 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
529 	SHA256ROUND(b, c, d, e, f, g, h, a, 31, w15);
530 
531 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
532 	SHA256ROUND(a, b, c, d, e, f, g, h, 32, w0);
533 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
534 	SHA256ROUND(h, a, b, c, d, e, f, g, 33, w1);
535 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
536 	SHA256ROUND(g, h, a, b, c, d, e, f, 34, w2);
537 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
538 	SHA256ROUND(f, g, h, a, b, c, d, e, 35, w3);
539 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
540 	SHA256ROUND(e, f, g, h, a, b, c, d, 36, w4);
541 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
542 	SHA256ROUND(d, e, f, g, h, a, b, c, 37, w5);
543 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
544 	SHA256ROUND(c, d, e, f, g, h, a, b, 38, w6);
545 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
546 	SHA256ROUND(b, c, d, e, f, g, h, a, 39, w7);
547 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
548 	SHA256ROUND(a, b, c, d, e, f, g, h, 40, w8);
549 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
550 	SHA256ROUND(h, a, b, c, d, e, f, g, 41, w9);
551 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
552 	SHA256ROUND(g, h, a, b, c, d, e, f, 42, w10);
553 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
554 	SHA256ROUND(f, g, h, a, b, c, d, e, 43, w11);
555 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
556 	SHA256ROUND(e, f, g, h, a, b, c, d, 44, w12);
557 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
558 	SHA256ROUND(d, e, f, g, h, a, b, c, 45, w13);
559 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
560 	SHA256ROUND(c, d, e, f, g, h, a, b, 46, w14);
561 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
562 	SHA256ROUND(b, c, d, e, f, g, h, a, 47, w15);
563 
564 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
565 	SHA256ROUND(a, b, c, d, e, f, g, h, 48, w0);
566 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
567 	SHA256ROUND(h, a, b, c, d, e, f, g, 49, w1);
568 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
569 	SHA256ROUND(g, h, a, b, c, d, e, f, 50, w2);
570 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
571 	SHA256ROUND(f, g, h, a, b, c, d, e, 51, w3);
572 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
573 	SHA256ROUND(e, f, g, h, a, b, c, d, 52, w4);
574 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
575 	SHA256ROUND(d, e, f, g, h, a, b, c, 53, w5);
576 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
577 	SHA256ROUND(c, d, e, f, g, h, a, b, 54, w6);
578 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
579 	SHA256ROUND(b, c, d, e, f, g, h, a, 55, w7);
580 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
581 	SHA256ROUND(a, b, c, d, e, f, g, h, 56, w8);
582 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
583 	SHA256ROUND(h, a, b, c, d, e, f, g, 57, w9);
584 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
585 	SHA256ROUND(g, h, a, b, c, d, e, f, 58, w10);
586 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
587 	SHA256ROUND(f, g, h, a, b, c, d, e, 59, w11);
588 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
589 	SHA256ROUND(e, f, g, h, a, b, c, d, 60, w12);
590 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
591 	SHA256ROUND(d, e, f, g, h, a, b, c, 61, w13);
592 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
593 	SHA256ROUND(c, d, e, f, g, h, a, b, 62, w14);
594 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
595 	SHA256ROUND(b, c, d, e, f, g, h, a, 63, w15);
596 
597 	ctx->state.s32[0] += a;
598 	ctx->state.s32[1] += b;
599 	ctx->state.s32[2] += c;
600 	ctx->state.s32[3] += d;
601 	ctx->state.s32[4] += e;
602 	ctx->state.s32[5] += f;
603 	ctx->state.s32[6] += g;
604 	ctx->state.s32[7] += h;
605 }
606 
607 
608 /* SHA384 and SHA512 Transform */
609 
610 static void
611 SHA512Transform(SHA2_CTX *ctx, const uint8_t *blk)
612 {
613 
614 	uint64_t a = ctx->state.s64[0];
615 	uint64_t b = ctx->state.s64[1];
616 	uint64_t c = ctx->state.s64[2];
617 	uint64_t d = ctx->state.s64[3];
618 	uint64_t e = ctx->state.s64[4];
619 	uint64_t f = ctx->state.s64[5];
620 	uint64_t g = ctx->state.s64[6];
621 	uint64_t h = ctx->state.s64[7];
622 
623 	uint64_t w0, w1, w2, w3, w4, w5, w6, w7;
624 	uint64_t w8, w9, w10, w11, w12, w13, w14, w15;
625 	uint64_t T1, T2;
626 
627 #if	defined(__sparc)
628 	static const uint64_t sha512_consts[] = {
629 		SHA512_CONST_0, SHA512_CONST_1, SHA512_CONST_2,
630 		SHA512_CONST_3, SHA512_CONST_4, SHA512_CONST_5,
631 		SHA512_CONST_6, SHA512_CONST_7, SHA512_CONST_8,
632 		SHA512_CONST_9, SHA512_CONST_10, SHA512_CONST_11,
633 		SHA512_CONST_12, SHA512_CONST_13, SHA512_CONST_14,
634 		SHA512_CONST_15, SHA512_CONST_16, SHA512_CONST_17,
635 		SHA512_CONST_18, SHA512_CONST_19, SHA512_CONST_20,
636 		SHA512_CONST_21, SHA512_CONST_22, SHA512_CONST_23,
637 		SHA512_CONST_24, SHA512_CONST_25, SHA512_CONST_26,
638 		SHA512_CONST_27, SHA512_CONST_28, SHA512_CONST_29,
639 		SHA512_CONST_30, SHA512_CONST_31, SHA512_CONST_32,
640 		SHA512_CONST_33, SHA512_CONST_34, SHA512_CONST_35,
641 		SHA512_CONST_36, SHA512_CONST_37, SHA512_CONST_38,
642 		SHA512_CONST_39, SHA512_CONST_40, SHA512_CONST_41,
643 		SHA512_CONST_42, SHA512_CONST_43, SHA512_CONST_44,
644 		SHA512_CONST_45, SHA512_CONST_46, SHA512_CONST_47,
645 		SHA512_CONST_48, SHA512_CONST_49, SHA512_CONST_50,
646 		SHA512_CONST_51, SHA512_CONST_52, SHA512_CONST_53,
647 		SHA512_CONST_54, SHA512_CONST_55, SHA512_CONST_56,
648 		SHA512_CONST_57, SHA512_CONST_58, SHA512_CONST_59,
649 		SHA512_CONST_60, SHA512_CONST_61, SHA512_CONST_62,
650 		SHA512_CONST_63, SHA512_CONST_64, SHA512_CONST_65,
651 		SHA512_CONST_66, SHA512_CONST_67, SHA512_CONST_68,
652 		SHA512_CONST_69, SHA512_CONST_70, SHA512_CONST_71,
653 		SHA512_CONST_72, SHA512_CONST_73, SHA512_CONST_74,
654 		SHA512_CONST_75, SHA512_CONST_76, SHA512_CONST_77,
655 		SHA512_CONST_78, SHA512_CONST_79
656 	};
657 #endif
658 
659 
660 	if ((uintptr_t)blk & 0x7) {		/* not 8-byte aligned? */
661 		bcopy(blk, ctx->buf_un.buf64,  sizeof (ctx->buf_un.buf64));
662 		blk = (uint8_t *)ctx->buf_un.buf64;
663 	}
664 
665 	w0 =  LOAD_BIG_64(blk + 8 * 0);
666 	SHA512ROUND(a, b, c, d, e, f, g, h, 0, w0);
667 	w1 =  LOAD_BIG_64(blk + 8 * 1);
668 	SHA512ROUND(h, a, b, c, d, e, f, g, 1, w1);
669 	w2 =  LOAD_BIG_64(blk + 8 * 2);
670 	SHA512ROUND(g, h, a, b, c, d, e, f, 2, w2);
671 	w3 =  LOAD_BIG_64(blk + 8 * 3);
672 	SHA512ROUND(f, g, h, a, b, c, d, e, 3, w3);
673 	w4 =  LOAD_BIG_64(blk + 8 * 4);
674 	SHA512ROUND(e, f, g, h, a, b, c, d, 4, w4);
675 	w5 =  LOAD_BIG_64(blk + 8 * 5);
676 	SHA512ROUND(d, e, f, g, h, a, b, c, 5, w5);
677 	w6 =  LOAD_BIG_64(blk + 8 * 6);
678 	SHA512ROUND(c, d, e, f, g, h, a, b, 6, w6);
679 	w7 =  LOAD_BIG_64(blk + 8 * 7);
680 	SHA512ROUND(b, c, d, e, f, g, h, a, 7, w7);
681 	w8 =  LOAD_BIG_64(blk + 8 * 8);
682 	SHA512ROUND(a, b, c, d, e, f, g, h, 8, w8);
683 	w9 =  LOAD_BIG_64(blk + 8 * 9);
684 	SHA512ROUND(h, a, b, c, d, e, f, g, 9, w9);
685 	w10 =  LOAD_BIG_64(blk + 8 * 10);
686 	SHA512ROUND(g, h, a, b, c, d, e, f, 10, w10);
687 	w11 =  LOAD_BIG_64(blk + 8 * 11);
688 	SHA512ROUND(f, g, h, a, b, c, d, e, 11, w11);
689 	w12 =  LOAD_BIG_64(blk + 8 * 12);
690 	SHA512ROUND(e, f, g, h, a, b, c, d, 12, w12);
691 	w13 =  LOAD_BIG_64(blk + 8 * 13);
692 	SHA512ROUND(d, e, f, g, h, a, b, c, 13, w13);
693 	w14 =  LOAD_BIG_64(blk + 8 * 14);
694 	SHA512ROUND(c, d, e, f, g, h, a, b, 14, w14);
695 	w15 =  LOAD_BIG_64(blk + 8 * 15);
696 	SHA512ROUND(b, c, d, e, f, g, h, a, 15, w15);
697 
698 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
699 	SHA512ROUND(a, b, c, d, e, f, g, h, 16, w0);
700 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
701 	SHA512ROUND(h, a, b, c, d, e, f, g, 17, w1);
702 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
703 	SHA512ROUND(g, h, a, b, c, d, e, f, 18, w2);
704 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
705 	SHA512ROUND(f, g, h, a, b, c, d, e, 19, w3);
706 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
707 	SHA512ROUND(e, f, g, h, a, b, c, d, 20, w4);
708 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
709 	SHA512ROUND(d, e, f, g, h, a, b, c, 21, w5);
710 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
711 	SHA512ROUND(c, d, e, f, g, h, a, b, 22, w6);
712 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
713 	SHA512ROUND(b, c, d, e, f, g, h, a, 23, w7);
714 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
715 	SHA512ROUND(a, b, c, d, e, f, g, h, 24, w8);
716 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
717 	SHA512ROUND(h, a, b, c, d, e, f, g, 25, w9);
718 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
719 	SHA512ROUND(g, h, a, b, c, d, e, f, 26, w10);
720 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
721 	SHA512ROUND(f, g, h, a, b, c, d, e, 27, w11);
722 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
723 	SHA512ROUND(e, f, g, h, a, b, c, d, 28, w12);
724 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
725 	SHA512ROUND(d, e, f, g, h, a, b, c, 29, w13);
726 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
727 	SHA512ROUND(c, d, e, f, g, h, a, b, 30, w14);
728 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
729 	SHA512ROUND(b, c, d, e, f, g, h, a, 31, w15);
730 
731 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
732 	SHA512ROUND(a, b, c, d, e, f, g, h, 32, w0);
733 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
734 	SHA512ROUND(h, a, b, c, d, e, f, g, 33, w1);
735 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
736 	SHA512ROUND(g, h, a, b, c, d, e, f, 34, w2);
737 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
738 	SHA512ROUND(f, g, h, a, b, c, d, e, 35, w3);
739 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
740 	SHA512ROUND(e, f, g, h, a, b, c, d, 36, w4);
741 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
742 	SHA512ROUND(d, e, f, g, h, a, b, c, 37, w5);
743 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
744 	SHA512ROUND(c, d, e, f, g, h, a, b, 38, w6);
745 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
746 	SHA512ROUND(b, c, d, e, f, g, h, a, 39, w7);
747 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
748 	SHA512ROUND(a, b, c, d, e, f, g, h, 40, w8);
749 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
750 	SHA512ROUND(h, a, b, c, d, e, f, g, 41, w9);
751 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
752 	SHA512ROUND(g, h, a, b, c, d, e, f, 42, w10);
753 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
754 	SHA512ROUND(f, g, h, a, b, c, d, e, 43, w11);
755 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
756 	SHA512ROUND(e, f, g, h, a, b, c, d, 44, w12);
757 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
758 	SHA512ROUND(d, e, f, g, h, a, b, c, 45, w13);
759 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
760 	SHA512ROUND(c, d, e, f, g, h, a, b, 46, w14);
761 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
762 	SHA512ROUND(b, c, d, e, f, g, h, a, 47, w15);
763 
764 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
765 	SHA512ROUND(a, b, c, d, e, f, g, h, 48, w0);
766 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
767 	SHA512ROUND(h, a, b, c, d, e, f, g, 49, w1);
768 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
769 	SHA512ROUND(g, h, a, b, c, d, e, f, 50, w2);
770 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
771 	SHA512ROUND(f, g, h, a, b, c, d, e, 51, w3);
772 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
773 	SHA512ROUND(e, f, g, h, a, b, c, d, 52, w4);
774 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
775 	SHA512ROUND(d, e, f, g, h, a, b, c, 53, w5);
776 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
777 	SHA512ROUND(c, d, e, f, g, h, a, b, 54, w6);
778 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
779 	SHA512ROUND(b, c, d, e, f, g, h, a, 55, w7);
780 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
781 	SHA512ROUND(a, b, c, d, e, f, g, h, 56, w8);
782 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
783 	SHA512ROUND(h, a, b, c, d, e, f, g, 57, w9);
784 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
785 	SHA512ROUND(g, h, a, b, c, d, e, f, 58, w10);
786 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
787 	SHA512ROUND(f, g, h, a, b, c, d, e, 59, w11);
788 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
789 	SHA512ROUND(e, f, g, h, a, b, c, d, 60, w12);
790 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
791 	SHA512ROUND(d, e, f, g, h, a, b, c, 61, w13);
792 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
793 	SHA512ROUND(c, d, e, f, g, h, a, b, 62, w14);
794 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
795 	SHA512ROUND(b, c, d, e, f, g, h, a, 63, w15);
796 
797 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
798 	SHA512ROUND(a, b, c, d, e, f, g, h, 64, w0);
799 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
800 	SHA512ROUND(h, a, b, c, d, e, f, g, 65, w1);
801 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
802 	SHA512ROUND(g, h, a, b, c, d, e, f, 66, w2);
803 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
804 	SHA512ROUND(f, g, h, a, b, c, d, e, 67, w3);
805 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
806 	SHA512ROUND(e, f, g, h, a, b, c, d, 68, w4);
807 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
808 	SHA512ROUND(d, e, f, g, h, a, b, c, 69, w5);
809 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
810 	SHA512ROUND(c, d, e, f, g, h, a, b, 70, w6);
811 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
812 	SHA512ROUND(b, c, d, e, f, g, h, a, 71, w7);
813 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
814 	SHA512ROUND(a, b, c, d, e, f, g, h, 72, w8);
815 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
816 	SHA512ROUND(h, a, b, c, d, e, f, g, 73, w9);
817 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
818 	SHA512ROUND(g, h, a, b, c, d, e, f, 74, w10);
819 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
820 	SHA512ROUND(f, g, h, a, b, c, d, e, 75, w11);
821 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
822 	SHA512ROUND(e, f, g, h, a, b, c, d, 76, w12);
823 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
824 	SHA512ROUND(d, e, f, g, h, a, b, c, 77, w13);
825 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
826 	SHA512ROUND(c, d, e, f, g, h, a, b, 78, w14);
827 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
828 	SHA512ROUND(b, c, d, e, f, g, h, a, 79, w15);
829 
830 	ctx->state.s64[0] += a;
831 	ctx->state.s64[1] += b;
832 	ctx->state.s64[2] += c;
833 	ctx->state.s64[3] += d;
834 	ctx->state.s64[4] += e;
835 	ctx->state.s64[5] += f;
836 	ctx->state.s64[6] += g;
837 	ctx->state.s64[7] += h;
838 
839 }
840 
841 
842 /*
843  * devpro compiler optimization:
844  *
845  * the compiler can generate better code if it knows that `input' and
846  * `output' do not point to the same source.  there is no portable
847  * way to tell the compiler this, but the sun compiler recognizes the
848  * `_Restrict' keyword to indicate this condition.  use it if possible.
849  */
850 
851 #ifdef	__RESTRICT
852 #define	restrict	_Restrict
853 #else
854 #define	restrict	/* nothing */
855 #endif
856 
857 /*
858  * Encode()
859  *
860  * purpose: to convert a list of numbers from little endian to big endian
861  *   input: uint8_t *	: place to store the converted big endian numbers
862  *	    uint32_t *	: place to get numbers to convert from
863  *          size_t	: the length of the input in bytes
864  *  output: void
865  */
866 
867 static void
868 Encode(uint8_t *restrict output, uint32_t *restrict input, size_t len)
869 {
870 	size_t		i, j;
871 
872 #if	defined(__sparc)
873 	if (IS_P2ALIGNED(output, sizeof (uint32_t))) {
874 		for (i = 0, j = 0; j < len; i++, j += 4) {
875 			/* LINTED: pointer alignment */
876 			*((uint32_t *)(output + j)) = input[i];
877 		}
878 	} else {
879 #endif	/* little endian -- will work on big endian, but slowly */
880 		for (i = 0, j = 0; j < len; i++, j += 4) {
881 			output[j]	= (input[i] >> 24) & 0xff;
882 			output[j + 1]	= (input[i] >> 16) & 0xff;
883 			output[j + 2]	= (input[i] >>  8) & 0xff;
884 			output[j + 3]	= input[i] & 0xff;
885 		}
886 #if	defined(__sparc)
887 	}
888 #endif
889 }
890 
891 static void
892 Encode64(uint8_t *restrict output, uint64_t *restrict input, size_t len)
893 {
894 	size_t		i, j;
895 
896 #if	defined(__sparc)
897 	if (IS_P2ALIGNED(output, sizeof (uint64_t))) {
898 		for (i = 0, j = 0; j < len; i++, j += 8) {
899 			/* LINTED: pointer alignment */
900 			*((uint64_t *)(output + j)) = input[i];
901 		}
902 	} else {
903 #endif	/* little endian -- will work on big endian, but slowly */
904 		for (i = 0, j = 0; j < len; i++, j += 8) {
905 
906 			output[j]	= (input[i] >> 56) & 0xff;
907 			output[j + 1]	= (input[i] >> 48) & 0xff;
908 			output[j + 2]	= (input[i] >> 40) & 0xff;
909 			output[j + 3]	= (input[i] >> 32) & 0xff;
910 			output[j + 4]	= (input[i] >> 24) & 0xff;
911 			output[j + 5]	= (input[i] >> 16) & 0xff;
912 			output[j + 6]	= (input[i] >>  8) & 0xff;
913 			output[j + 7]	= input[i] & 0xff;
914 		}
915 #if	defined(__sparc)
916 	}
917 #endif
918 }
919 
920 
921 #ifdef _KERNEL
922 
923 /*
924  * KCF software provider control entry points.
925  */
926 /* ARGSUSED */
927 static void
928 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
929 {
930 	*status = CRYPTO_PROVIDER_READY;
931 }
932 
933 /*
934  * KCF software provider digest entry points.
935  */
936 
937 static int
938 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
939     crypto_req_handle_t req)
940 {
941 
942 	/*
943 	 * Allocate and initialize SHA2 context.
944 	 */
945 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
946 	    crypto_kmflag(req));
947 	if (ctx->cc_provider_private == NULL)
948 		return (CRYPTO_HOST_MEMORY);
949 
950 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
951 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
952 
953 	return (CRYPTO_SUCCESS);
954 }
955 
956 /*
957  * Helper SHA2 digest update function for uio data.
958  */
959 static int
960 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
961 {
962 	off_t offset = data->cd_offset;
963 	size_t length = data->cd_length;
964 	uint_t vec_idx;
965 	size_t cur_len;
966 
967 	/* we support only kernel buffer */
968 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
969 		return (CRYPTO_ARGUMENTS_BAD);
970 
971 	/*
972 	 * Jump to the first iovec containing data to be
973 	 * digested.
974 	 */
975 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
976 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
977 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len);
978 	if (vec_idx == data->cd_uio->uio_iovcnt) {
979 		/*
980 		 * The caller specified an offset that is larger than the
981 		 * total size of the buffers it provided.
982 		 */
983 		return (CRYPTO_DATA_LEN_RANGE);
984 	}
985 
986 	/*
987 	 * Now do the digesting on the iovecs.
988 	 */
989 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
990 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
991 		    offset, length);
992 
993 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
994 		    uio_iov[vec_idx].iov_base + offset, cur_len);
995 		length -= cur_len;
996 		vec_idx++;
997 		offset = 0;
998 	}
999 
1000 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
1001 		/*
1002 		 * The end of the specified iovec's was reached but
1003 		 * the length requested could not be processed, i.e.
1004 		 * The caller requested to digest more data than it provided.
1005 		 */
1006 		return (CRYPTO_DATA_LEN_RANGE);
1007 	}
1008 
1009 	return (CRYPTO_SUCCESS);
1010 }
1011 
1012 /*
1013  * Helper SHA2 digest final function for uio data.
1014  * digest_len is the length of the desired digest. If digest_len
1015  * is smaller than the default SHA2 digest length, the caller
1016  * must pass a scratch buffer, digest_scratch, which must
1017  * be at least the algorithm's digest length bytes.
1018  */
1019 static int
1020 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1021     ulong_t digest_len, uchar_t *digest_scratch)
1022 {
1023 	off_t offset = digest->cd_offset;
1024 	uint_t vec_idx;
1025 
1026 	/* we support only kernel buffer */
1027 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
1028 		return (CRYPTO_ARGUMENTS_BAD);
1029 
1030 	/*
1031 	 * Jump to the first iovec containing ptr to the digest to
1032 	 * be returned.
1033 	 */
1034 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
1035 	    vec_idx < digest->cd_uio->uio_iovcnt;
1036 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len);
1037 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
1038 		/*
1039 		 * The caller specified an offset that is
1040 		 * larger than the total size of the buffers
1041 		 * it provided.
1042 		 */
1043 		return (CRYPTO_DATA_LEN_RANGE);
1044 	}
1045 
1046 	if (offset + digest_len <=
1047 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
1048 		/*
1049 		 * The computed SHA2 digest will fit in the current
1050 		 * iovec.
1051 		 */
1052 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1053 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1054 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1055 			(digest_len != SHA512_DIGEST_LENGTH))) {
1056 			/*
1057 			 * The caller requested a short digest. Digest
1058 			 * into a scratch buffer and return to
1059 			 * the user only what was requested.
1060 			 */
1061 			SHA2Final(digest_scratch, sha2_ctx);
1062 
1063 			bcopy(digest_scratch, (uchar_t *)digest->
1064 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1065 			    digest_len);
1066 		} else {
1067 			SHA2Final((uchar_t *)digest->
1068 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1069 			    sha2_ctx);
1070 
1071 		}
1072 	} else {
1073 		/*
1074 		 * The computed digest will be crossing one or more iovec's.
1075 		 * This is bad performance-wise but we need to support it.
1076 		 * Allocate a small scratch buffer on the stack and
1077 		 * copy it piece meal to the specified digest iovec's.
1078 		 */
1079 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1080 		off_t scratch_offset = 0;
1081 		size_t length = digest_len;
1082 		size_t cur_len;
1083 
1084 		SHA2Final(digest_tmp, sha2_ctx);
1085 
1086 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
1087 			cur_len =
1088 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
1089 				    offset, length);
1090 			bcopy(digest_tmp + scratch_offset,
1091 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
1092 			    cur_len);
1093 
1094 			length -= cur_len;
1095 			vec_idx++;
1096 			scratch_offset += cur_len;
1097 			offset = 0;
1098 		}
1099 
1100 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
1101 			/*
1102 			 * The end of the specified iovec's was reached but
1103 			 * the length requested could not be processed, i.e.
1104 			 * The caller requested to digest more data than it
1105 			 * provided.
1106 			 */
1107 			return (CRYPTO_DATA_LEN_RANGE);
1108 		}
1109 	}
1110 
1111 	return (CRYPTO_SUCCESS);
1112 }
1113 
1114 /*
1115  * Helper SHA2 digest update for mblk's.
1116  */
1117 static int
1118 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
1119 {
1120 	off_t offset = data->cd_offset;
1121 	size_t length = data->cd_length;
1122 	mblk_t *mp;
1123 	size_t cur_len;
1124 
1125 	/*
1126 	 * Jump to the first mblk_t containing data to be digested.
1127 	 */
1128 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
1129 	    offset -= MBLKL(mp), mp = mp->b_cont);
1130 	if (mp == NULL) {
1131 		/*
1132 		 * The caller specified an offset that is larger than the
1133 		 * total size of the buffers it provided.
1134 		 */
1135 		return (CRYPTO_DATA_LEN_RANGE);
1136 	}
1137 
1138 	/*
1139 	 * Now do the digesting on the mblk chain.
1140 	 */
1141 	while (mp != NULL && length > 0) {
1142 		cur_len = MIN(MBLKL(mp) - offset, length);
1143 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
1144 		length -= cur_len;
1145 		offset = 0;
1146 		mp = mp->b_cont;
1147 	}
1148 
1149 	if (mp == NULL && length > 0) {
1150 		/*
1151 		 * The end of the mblk was reached but the length requested
1152 		 * could not be processed, i.e. The caller requested
1153 		 * to digest more data than it provided.
1154 		 */
1155 		return (CRYPTO_DATA_LEN_RANGE);
1156 	}
1157 
1158 	return (CRYPTO_SUCCESS);
1159 }
1160 
1161 /*
1162  * Helper SHA2 digest final for mblk's.
1163  * digest_len is the length of the desired digest. If digest_len
1164  * is smaller than the default SHA2 digest length, the caller
1165  * must pass a scratch buffer, digest_scratch, which must
1166  * be at least the algorithm's digest length bytes.
1167  */
1168 static int
1169 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1170     ulong_t digest_len, uchar_t *digest_scratch)
1171 {
1172 	off_t offset = digest->cd_offset;
1173 	mblk_t *mp;
1174 
1175 	/*
1176 	 * Jump to the first mblk_t that will be used to store the digest.
1177 	 */
1178 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
1179 	    offset -= MBLKL(mp), mp = mp->b_cont);
1180 	if (mp == NULL) {
1181 		/*
1182 		 * The caller specified an offset that is larger than the
1183 		 * total size of the buffers it provided.
1184 		 */
1185 		return (CRYPTO_DATA_LEN_RANGE);
1186 	}
1187 
1188 	if (offset + digest_len <= MBLKL(mp)) {
1189 		/*
1190 		 * The computed SHA2 digest will fit in the current mblk.
1191 		 * Do the SHA2Final() in-place.
1192 		 */
1193 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1194 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1195 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1196 			(digest_len != SHA512_DIGEST_LENGTH))) {
1197 			/*
1198 			 * The caller requested a short digest. Digest
1199 			 * into a scratch buffer and return to
1200 			 * the user only what was requested.
1201 			 */
1202 			SHA2Final(digest_scratch, sha2_ctx);
1203 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
1204 		} else {
1205 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
1206 		}
1207 	} else {
1208 		/*
1209 		 * The computed digest will be crossing one or more mblk's.
1210 		 * This is bad performance-wise but we need to support it.
1211 		 * Allocate a small scratch buffer on the stack and
1212 		 * copy it piece meal to the specified digest iovec's.
1213 		 */
1214 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1215 		off_t scratch_offset = 0;
1216 		size_t length = digest_len;
1217 		size_t cur_len;
1218 
1219 		SHA2Final(digest_tmp, sha2_ctx);
1220 
1221 		while (mp != NULL && length > 0) {
1222 			cur_len = MIN(MBLKL(mp) - offset, length);
1223 			bcopy(digest_tmp + scratch_offset,
1224 			    mp->b_rptr + offset, cur_len);
1225 
1226 			length -= cur_len;
1227 			mp = mp->b_cont;
1228 			scratch_offset += cur_len;
1229 			offset = 0;
1230 		}
1231 
1232 		if (mp == NULL && length > 0) {
1233 			/*
1234 			 * The end of the specified mblk was reached but
1235 			 * the length requested could not be processed, i.e.
1236 			 * The caller requested to digest more data than it
1237 			 * provided.
1238 			 */
1239 			return (CRYPTO_DATA_LEN_RANGE);
1240 		}
1241 	}
1242 
1243 	return (CRYPTO_SUCCESS);
1244 }
1245 
1246 /* ARGSUSED */
1247 static int
1248 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
1249     crypto_req_handle_t req)
1250 {
1251 	int ret = CRYPTO_SUCCESS;
1252 	uint_t sha_digest_len;
1253 
1254 	ASSERT(ctx->cc_provider_private != NULL);
1255 
1256 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1257 	case SHA256_MECH_INFO_TYPE:
1258 		sha_digest_len = SHA256_DIGEST_LENGTH;
1259 		break;
1260 	case SHA384_MECH_INFO_TYPE:
1261 		sha_digest_len = SHA384_DIGEST_LENGTH;
1262 		break;
1263 	case SHA512_MECH_INFO_TYPE:
1264 		sha_digest_len = SHA512_DIGEST_LENGTH;
1265 		break;
1266 	default:
1267 		return (CRYPTO_MECHANISM_INVALID);
1268 	}
1269 
1270 	/*
1271 	 * We need to just return the length needed to store the output.
1272 	 * We should not destroy the context for the following cases.
1273 	 */
1274 	if ((digest->cd_length == 0) ||
1275 	    (digest->cd_length < sha_digest_len)) {
1276 		digest->cd_length = sha_digest_len;
1277 		return (CRYPTO_BUFFER_TOO_SMALL);
1278 	}
1279 
1280 	/*
1281 	 * Do the SHA2 update on the specified input data.
1282 	 */
1283 	switch (data->cd_format) {
1284 	case CRYPTO_DATA_RAW:
1285 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1286 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1287 		    data->cd_length);
1288 		break;
1289 	case CRYPTO_DATA_UIO:
1290 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1291 		    data);
1292 		break;
1293 	case CRYPTO_DATA_MBLK:
1294 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1295 		    data);
1296 		break;
1297 	default:
1298 		ret = CRYPTO_ARGUMENTS_BAD;
1299 	}
1300 
1301 	if (ret != CRYPTO_SUCCESS) {
1302 		/* the update failed, free context and bail */
1303 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1304 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1305 		ctx->cc_provider_private = NULL;
1306 		digest->cd_length = 0;
1307 		return (ret);
1308 	}
1309 
1310 	/*
1311 	 * Do a SHA2 final, must be done separately since the digest
1312 	 * type can be different than the input data type.
1313 	 */
1314 	switch (digest->cd_format) {
1315 	case CRYPTO_DATA_RAW:
1316 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1317 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1318 		break;
1319 	case CRYPTO_DATA_UIO:
1320 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1321 		    digest, sha_digest_len, NULL);
1322 		break;
1323 	case CRYPTO_DATA_MBLK:
1324 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1325 		    digest, sha_digest_len, NULL);
1326 		break;
1327 	default:
1328 		ret = CRYPTO_ARGUMENTS_BAD;
1329 	}
1330 
1331 	/* all done, free context and return */
1332 
1333 	if (ret == CRYPTO_SUCCESS) {
1334 		digest->cd_length = sha_digest_len;
1335 	} else {
1336 		/*
1337 		 * Only bzero context on failure, since SHA2Final()
1338 		 * does it for us.
1339 		 */
1340 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1341 		digest->cd_length = 0;
1342 	}
1343 
1344 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1345 	ctx->cc_provider_private = NULL;
1346 	return (ret);
1347 }
1348 
1349 /* ARGSUSED */
1350 static int
1351 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
1352     crypto_req_handle_t req)
1353 {
1354 	int ret = CRYPTO_SUCCESS;
1355 
1356 	ASSERT(ctx->cc_provider_private != NULL);
1357 
1358 	/*
1359 	 * Do the SHA2 update on the specified input data.
1360 	 */
1361 	switch (data->cd_format) {
1362 	case CRYPTO_DATA_RAW:
1363 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1364 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1365 		    data->cd_length);
1366 		break;
1367 	case CRYPTO_DATA_UIO:
1368 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1369 		    data);
1370 		break;
1371 	case CRYPTO_DATA_MBLK:
1372 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1373 		    data);
1374 		break;
1375 	default:
1376 		ret = CRYPTO_ARGUMENTS_BAD;
1377 	}
1378 
1379 	return (ret);
1380 }
1381 
1382 /* ARGSUSED */
1383 static int
1384 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
1385     crypto_req_handle_t req)
1386 {
1387 	int ret = CRYPTO_SUCCESS;
1388 	uint_t sha_digest_len;
1389 
1390 	ASSERT(ctx->cc_provider_private != NULL);
1391 
1392 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1393 	case SHA256_MECH_INFO_TYPE:
1394 		sha_digest_len = SHA256_DIGEST_LENGTH;
1395 		break;
1396 	case SHA384_MECH_INFO_TYPE:
1397 		sha_digest_len = SHA384_DIGEST_LENGTH;
1398 		break;
1399 	case SHA512_MECH_INFO_TYPE:
1400 		sha_digest_len = SHA512_DIGEST_LENGTH;
1401 		break;
1402 	default:
1403 		return (CRYPTO_MECHANISM_INVALID);
1404 	}
1405 
1406 	/*
1407 	 * We need to just return the length needed to store the output.
1408 	 * We should not destroy the context for the following cases.
1409 	 */
1410 	if ((digest->cd_length == 0) ||
1411 	    (digest->cd_length < sha_digest_len)) {
1412 		digest->cd_length = sha_digest_len;
1413 		return (CRYPTO_BUFFER_TOO_SMALL);
1414 	}
1415 
1416 	/*
1417 	 * Do a SHA2 final.
1418 	 */
1419 	switch (digest->cd_format) {
1420 	case CRYPTO_DATA_RAW:
1421 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1422 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1423 		break;
1424 	case CRYPTO_DATA_UIO:
1425 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1426 		    digest, sha_digest_len, NULL);
1427 		break;
1428 	case CRYPTO_DATA_MBLK:
1429 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1430 		    digest, sha_digest_len, NULL);
1431 		break;
1432 	default:
1433 		ret = CRYPTO_ARGUMENTS_BAD;
1434 	}
1435 
1436 	/* all done, free context and return */
1437 
1438 	if (ret == CRYPTO_SUCCESS) {
1439 		digest->cd_length = sha_digest_len;
1440 	} else {
1441 		/*
1442 		 * Only bzero context this on failure, since SHA2Final()
1443 		 * does it for us.
1444 		 */
1445 		bzero(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, sizeof (SHA2_CTX));
1446 		digest->cd_length = 0;
1447 	}
1448 
1449 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1450 	ctx->cc_provider_private = NULL;
1451 
1452 	return (ret);
1453 }
1454 
1455 /* ARGSUSED */
1456 static int
1457 sha2_digest_atomic(crypto_provider_handle_t provider,
1458     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1459     crypto_data_t *data, crypto_data_t *digest,
1460     crypto_req_handle_t req)
1461 {
1462 	int ret = CRYPTO_SUCCESS;
1463 	SHA2_CTX sha2_ctx;
1464 	uint32_t sha_digest_len;
1465 
1466 	/*
1467 	 * Do the SHA inits.
1468 	 */
1469 
1470 	SHA2Init(mechanism->cm_type, &sha2_ctx);
1471 
1472 	switch (data->cd_format) {
1473 	case CRYPTO_DATA_RAW:
1474 		SHA2Update(&sha2_ctx, (uint8_t *)data->
1475 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
1476 		break;
1477 	case CRYPTO_DATA_UIO:
1478 		ret = sha2_digest_update_uio(&sha2_ctx, data);
1479 		break;
1480 	case CRYPTO_DATA_MBLK:
1481 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
1482 		break;
1483 	default:
1484 		ret = CRYPTO_ARGUMENTS_BAD;
1485 	}
1486 
1487 	/*
1488 	 * Do the SHA updates on the specified input data.
1489 	 */
1490 
1491 	if (ret != CRYPTO_SUCCESS) {
1492 		/* the update failed, bail */
1493 		bzero(&sha2_ctx, sizeof (SHA2_CTX));
1494 		digest->cd_length = 0;
1495 		return (ret);
1496 
1497 	}
1498 
1499 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
1500 		sha_digest_len = SHA256_DIGEST_LENGTH;
1501 	else
1502 		sha_digest_len = SHA512_DIGEST_LENGTH;
1503 
1504 	/*
1505 	 * Do a SHA2 final, must be done separately since the digest
1506 	 * type can be different than the input data type.
1507 	 */
1508 	switch (digest->cd_format) {
1509 	case CRYPTO_DATA_RAW:
1510 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1511 		    digest->cd_offset, &sha2_ctx);
1512 		break;
1513 	case CRYPTO_DATA_UIO:
1514 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
1515 		    sha_digest_len, NULL);
1516 		break;
1517 	case CRYPTO_DATA_MBLK:
1518 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
1519 		    sha_digest_len, NULL);
1520 		break;
1521 	default:
1522 		ret = CRYPTO_ARGUMENTS_BAD;
1523 	}
1524 
1525 	if (ret == CRYPTO_SUCCESS) {
1526 		digest->cd_length = sha_digest_len;
1527 	} else {
1528 		/*
1529 		 * Only bzero context on failure, since SHA2Final()
1530 		 * does it for us.
1531 		 */
1532 		bzero(&sha2_ctx, sizeof (SHA2_CTX));
1533 		digest->cd_length = 0;
1534 	}
1535 
1536 	return (ret);
1537 }
1538 
1539 /*
1540  * KCF software provider mac entry points.
1541  *
1542  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
1543  *
1544  * Init:
1545  * The initialization routine initializes what we denote
1546  * as the inner and outer contexts by doing
1547  * - for inner context: SHA2(key XOR ipad)
1548  * - for outer context: SHA2(key XOR opad)
1549  *
1550  * Update:
1551  * Each subsequent SHA2 HMAC update will result in an
1552  * update of the inner context with the specified data.
1553  *
1554  * Final:
1555  * The SHA2 HMAC final will do a SHA2 final operation on the
1556  * inner context, and the resulting digest will be used
1557  * as the data for an update on the outer context. Last
1558  * but not least, a SHA2 final on the outer context will
1559  * be performed to obtain the SHA2 HMAC digest to return
1560  * to the user.
1561  */
1562 
1563 /*
1564  * Initialize a SHA2-HMAC context.
1565  */
1566 static void
1567 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
1568 {
1569 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1570 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1571 	int i, block_size, blocks_per_int64;
1572 
1573 	/* Determine the block size */
1574 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
1575 		block_size = SHA256_HMAC_BLOCK_SIZE;
1576 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1577 	} else {
1578 		block_size = SHA512_HMAC_BLOCK_SIZE;
1579 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1580 	}
1581 
1582 	(void) bzero(ipad, block_size);
1583 	(void) bzero(opad, block_size);
1584 	(void) bcopy(keyval, ipad, length_in_bytes);
1585 	(void) bcopy(keyval, opad, length_in_bytes);
1586 
1587 	/* XOR key with ipad (0x36) and opad (0x5c) */
1588 	for (i = 0; i < blocks_per_int64; i ++) {
1589 		ipad[i] ^= 0x3636363636363636;
1590 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
1591 	}
1592 
1593 	/* perform SHA2 on ipad */
1594 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
1595 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
1596 
1597 	/* perform SHA2 on opad */
1598 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
1599 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
1600 
1601 }
1602 
1603 /*
1604  */
1605 static int
1606 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1607     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
1608     crypto_req_handle_t req)
1609 {
1610 	int ret = CRYPTO_SUCCESS;
1611 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1612 	uint_t sha_digest_len, sha_hmac_block_size;
1613 
1614 	/*
1615 	 * Set the digest length and block size to values approriate to the
1616 	 * mechanism
1617 	 */
1618 	switch (mechanism->cm_type) {
1619 	case SHA256_HMAC_MECH_INFO_TYPE:
1620 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1621 		sha_digest_len = SHA256_DIGEST_LENGTH;
1622 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1623 		break;
1624 	case SHA384_HMAC_MECH_INFO_TYPE:
1625 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1626 	case SHA512_HMAC_MECH_INFO_TYPE:
1627 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1628 		sha_digest_len = SHA512_DIGEST_LENGTH;
1629 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1630 		break;
1631 	default:
1632 		return (CRYPTO_MECHANISM_INVALID);
1633 	}
1634 
1635 	if (key->ck_format != CRYPTO_KEY_RAW)
1636 		return (CRYPTO_ARGUMENTS_BAD);
1637 
1638 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1639 	    crypto_kmflag(req));
1640 	if (ctx->cc_provider_private == NULL)
1641 		return (CRYPTO_HOST_MEMORY);
1642 
1643 	if (ctx_template != NULL) {
1644 		/* reuse context template */
1645 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
1646 		    sizeof (sha2_hmac_ctx_t));
1647 	} else {
1648 		/* no context template, compute context */
1649 		if (keylen_in_bytes > sha_hmac_block_size) {
1650 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
1651 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
1652 
1653 			/*
1654 			 * Hash the passed-in key to get a smaller key.
1655 			 * The inner context is used since it hasn't been
1656 			 * initialized yet.
1657 			 */
1658 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1659 			    &hmac_ctx->hc_icontext,
1660 			    key->ck_data, keylen_in_bytes, digested_key);
1661 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1662 			    digested_key, sha_digest_len);
1663 		} else {
1664 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1665 			    key->ck_data, keylen_in_bytes);
1666 		}
1667 	}
1668 
1669 	/*
1670 	 * Get the mechanism parameters, if applicable.
1671 	 */
1672 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
1673 	if (mechanism->cm_type % 3 == 2) {
1674 		if (mechanism->cm_param == NULL ||
1675 		    mechanism->cm_param_len != sizeof (ulong_t))
1676 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1677 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1678 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1679 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1680 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1681 	}
1682 
1683 	if (ret != CRYPTO_SUCCESS) {
1684 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1685 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1686 		ctx->cc_provider_private = NULL;
1687 	}
1688 
1689 	return (ret);
1690 }
1691 
1692 /* ARGSUSED */
1693 static int
1694 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1695     crypto_req_handle_t req)
1696 {
1697 	int ret = CRYPTO_SUCCESS;
1698 
1699 	ASSERT(ctx->cc_provider_private != NULL);
1700 
1701 	/*
1702 	 * Do a SHA2 update of the inner context using the specified
1703 	 * data.
1704 	 */
1705 	switch (data->cd_format) {
1706 	case CRYPTO_DATA_RAW:
1707 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1708 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1709 		    data->cd_length);
1710 		break;
1711 	case CRYPTO_DATA_UIO:
1712 		ret = sha2_digest_update_uio(
1713 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1714 		break;
1715 	case CRYPTO_DATA_MBLK:
1716 		ret = sha2_digest_update_mblk(
1717 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1718 		break;
1719 	default:
1720 		ret = CRYPTO_ARGUMENTS_BAD;
1721 	}
1722 
1723 	return (ret);
1724 }
1725 
1726 /* ARGSUSED */
1727 static int
1728 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1729 {
1730 	int ret = CRYPTO_SUCCESS;
1731 	uchar_t digest[SHA512_DIGEST_LENGTH];
1732 	uint32_t digest_len, sha_digest_len;
1733 
1734 	ASSERT(ctx->cc_provider_private != NULL);
1735 
1736 	/* Set the digest lengths to values approriate to the mechanism */
1737 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1738 	case SHA256_HMAC_MECH_INFO_TYPE:
1739 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1740 		break;
1741 	case SHA384_HMAC_MECH_INFO_TYPE:
1742 	case SHA512_HMAC_MECH_INFO_TYPE:
1743 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1744 		break;
1745 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1746 		sha_digest_len = SHA256_DIGEST_LENGTH;
1747 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1748 		break;
1749 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1750 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1751 		sha_digest_len = SHA512_DIGEST_LENGTH;
1752 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1753 		break;
1754 	}
1755 
1756 	/*
1757 	 * We need to just return the length needed to store the output.
1758 	 * We should not destroy the context for the following cases.
1759 	 */
1760 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1761 		mac->cd_length = digest_len;
1762 		return (CRYPTO_BUFFER_TOO_SMALL);
1763 	}
1764 
1765 	/*
1766 	 * Do a SHA2 final on the inner context.
1767 	 */
1768 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1769 
1770 	/*
1771 	 * Do a SHA2 update on the outer context, feeding the inner
1772 	 * digest as data.
1773 	 */
1774 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1775 	    sha_digest_len);
1776 
1777 	/*
1778 	 * Do a SHA2 final on the outer context, storing the computing
1779 	 * digest in the users buffer.
1780 	 */
1781 	switch (mac->cd_format) {
1782 	case CRYPTO_DATA_RAW:
1783 		if (digest_len != sha_digest_len) {
1784 			/*
1785 			 * The caller requested a short digest. Digest
1786 			 * into a scratch buffer and return to
1787 			 * the user only what was requested.
1788 			 */
1789 			SHA2Final(digest,
1790 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1791 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1792 			    mac->cd_offset, digest_len);
1793 		} else {
1794 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1795 			    mac->cd_offset,
1796 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1797 		}
1798 		break;
1799 	case CRYPTO_DATA_UIO:
1800 		ret = sha2_digest_final_uio(
1801 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1802 		    digest_len, digest);
1803 		break;
1804 	case CRYPTO_DATA_MBLK:
1805 		ret = sha2_digest_final_mblk(
1806 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1807 		    digest_len, digest);
1808 		break;
1809 	default:
1810 		ret = CRYPTO_ARGUMENTS_BAD;
1811 	}
1812 
1813 	if (ret == CRYPTO_SUCCESS) {
1814 		mac->cd_length = digest_len;
1815 	} else {
1816 		/*
1817 		 * Only bzero outer context on failure, since SHA2Final()
1818 		 * does it for us.
1819 		 * We don't have to bzero the inner context since we
1820 		 * always invoke a SHA2Final() on it.
1821 		 */
1822 		bzero(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext,
1823 		    sizeof (SHA2_CTX));
1824 		mac->cd_length = 0;
1825 	}
1826 
1827 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1828 	ctx->cc_provider_private = NULL;
1829 
1830 	return (ret);
1831 }
1832 
1833 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1834 	switch (data->cd_format) {					\
1835 	case CRYPTO_DATA_RAW:						\
1836 		SHA2Update(&(ctx).hc_icontext,				\
1837 		    (uint8_t *)data->cd_raw.iov_base +			\
1838 		    data->cd_offset, data->cd_length);			\
1839 		break;							\
1840 	case CRYPTO_DATA_UIO:						\
1841 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1842 		break;							\
1843 	case CRYPTO_DATA_MBLK:						\
1844 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1845 		    data);						\
1846 		break;							\
1847 	default:							\
1848 		ret = CRYPTO_ARGUMENTS_BAD;				\
1849 	}								\
1850 }
1851 
1852 /* ARGSUSED */
1853 static int
1854 sha2_mac_atomic(crypto_provider_handle_t provider,
1855     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1856     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1857     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1858 {
1859 	int ret = CRYPTO_SUCCESS;
1860 	uchar_t digest[SHA512_DIGEST_LENGTH];
1861 	sha2_hmac_ctx_t sha2_hmac_ctx;
1862 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1863 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1864 
1865 	/*
1866 	 * Set the digest length and block size to values approriate to the
1867 	 * mechanism
1868 	 */
1869 	switch (mechanism->cm_type) {
1870 	case SHA256_HMAC_MECH_INFO_TYPE:
1871 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1872 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1873 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1874 		break;
1875 	case SHA384_HMAC_MECH_INFO_TYPE:
1876 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1877 	case SHA512_HMAC_MECH_INFO_TYPE:
1878 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1879 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1880 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1881 		break;
1882 	default:
1883 		return (CRYPTO_MECHANISM_INVALID);
1884 	}
1885 
1886 	/* Add support for key by attributes (RFE 4706552) */
1887 	if (key->ck_format != CRYPTO_KEY_RAW)
1888 		return (CRYPTO_ARGUMENTS_BAD);
1889 
1890 	if (ctx_template != NULL) {
1891 		/* reuse context template */
1892 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1893 	} else {
1894 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1895 		/* no context template, initialize context */
1896 		if (keylen_in_bytes > sha_hmac_block_size) {
1897 			/*
1898 			 * Hash the passed-in key to get a smaller key.
1899 			 * The inner context is used since it hasn't been
1900 			 * initialized yet.
1901 			 */
1902 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1903 			    &sha2_hmac_ctx.hc_icontext,
1904 			    key->ck_data, keylen_in_bytes, digest);
1905 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1906 			    sha_digest_len);
1907 		} else {
1908 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1909 			    keylen_in_bytes);
1910 		}
1911 	}
1912 
1913 	/* get the mechanism parameters, if applicable */
1914 	if ((mechanism->cm_type % 3) == 2) {
1915 		if (mechanism->cm_param == NULL ||
1916 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1917 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1918 			goto bail;
1919 		}
1920 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1921 		if (digest_len > sha_digest_len) {
1922 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1923 			goto bail;
1924 		}
1925 	}
1926 
1927 	/* do a SHA2 update of the inner context using the specified data */
1928 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1929 	if (ret != CRYPTO_SUCCESS)
1930 		/* the update failed, free context and bail */
1931 		goto bail;
1932 
1933 	/*
1934 	 * Do a SHA2 final on the inner context.
1935 	 */
1936 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1937 
1938 	/*
1939 	 * Do an SHA2 update on the outer context, feeding the inner
1940 	 * digest as data.
1941 	 *
1942 	 * Make sure that SHA384 is handled special because
1943 	 * it cannot feed a 60-byte inner hash to the outer
1944 	 */
1945 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1946 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1947 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1948 		    SHA384_DIGEST_LENGTH);
1949 	else
1950 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1951 
1952 	/*
1953 	 * Do a SHA2 final on the outer context, storing the computed
1954 	 * digest in the users buffer.
1955 	 */
1956 	switch (mac->cd_format) {
1957 	case CRYPTO_DATA_RAW:
1958 		if (digest_len != sha_digest_len) {
1959 			/*
1960 			 * The caller requested a short digest. Digest
1961 			 * into a scratch buffer and return to
1962 			 * the user only what was requested.
1963 			 */
1964 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1965 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1966 			    mac->cd_offset, digest_len);
1967 		} else {
1968 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1969 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1970 		}
1971 		break;
1972 	case CRYPTO_DATA_UIO:
1973 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1974 		    digest_len, digest);
1975 		break;
1976 	case CRYPTO_DATA_MBLK:
1977 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1978 		    digest_len, digest);
1979 		break;
1980 	default:
1981 		ret = CRYPTO_ARGUMENTS_BAD;
1982 	}
1983 
1984 	if (ret == CRYPTO_SUCCESS) {
1985 		mac->cd_length = digest_len;
1986 	} else {
1987 		/*
1988 		 * Only bzero outer context on failure, since SHA2Final()
1989 		 * does it for us.
1990 		 * We don't have to bzero the inner context since we
1991 		 * always invoke a SHA2Final() on it.
1992 		 */
1993 		bzero(&sha2_hmac_ctx.hc_ocontext, sizeof (SHA2_CTX));
1994 		mac->cd_length = 0;
1995 	}
1996 
1997 	return (ret);
1998 bail:
1999 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2000 	mac->cd_length = 0;
2001 	return (ret);
2002 }
2003 
2004 /* ARGSUSED */
2005 static int
2006 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
2007     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
2008     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
2009     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
2010 {
2011 	int ret = CRYPTO_SUCCESS;
2012 	uchar_t digest[SHA512_DIGEST_LENGTH];
2013 	sha2_hmac_ctx_t sha2_hmac_ctx;
2014 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
2015 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2016 
2017 	/*
2018 	 * Set the digest length and block size to values approriate to the
2019 	 * mechanism
2020 	 */
2021 	switch (mechanism->cm_type) {
2022 	case SHA256_HMAC_MECH_INFO_TYPE:
2023 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2024 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
2025 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2026 		break;
2027 	case SHA384_HMAC_MECH_INFO_TYPE:
2028 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2029 	case SHA512_HMAC_MECH_INFO_TYPE:
2030 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2031 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
2032 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2033 		break;
2034 	default:
2035 		return (CRYPTO_MECHANISM_INVALID);
2036 	}
2037 
2038 	/* Add support for key by attributes (RFE 4706552) */
2039 	if (key->ck_format != CRYPTO_KEY_RAW)
2040 		return (CRYPTO_ARGUMENTS_BAD);
2041 
2042 	if (ctx_template != NULL) {
2043 		/* reuse context template */
2044 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2045 	} else {
2046 		/* no context template, initialize context */
2047 		if (keylen_in_bytes > sha_hmac_block_size) {
2048 			/*
2049 			 * Hash the passed-in key to get a smaller key.
2050 			 * The inner context is used since it hasn't been
2051 			 * initialized yet.
2052 			 */
2053 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2054 			    &sha2_hmac_ctx.hc_icontext,
2055 			    key->ck_data, keylen_in_bytes, digest);
2056 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
2057 			    sha_digest_len);
2058 		} else {
2059 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
2060 			    keylen_in_bytes);
2061 		}
2062 	}
2063 
2064 	/* get the mechanism parameters, if applicable */
2065 	if (mechanism->cm_type % 3 == 2) {
2066 		if (mechanism->cm_param == NULL ||
2067 		    mechanism->cm_param_len != sizeof (ulong_t)) {
2068 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2069 			goto bail;
2070 		}
2071 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
2072 		if (digest_len > sha_digest_len) {
2073 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2074 			goto bail;
2075 		}
2076 	}
2077 
2078 	if (mac->cd_length != digest_len) {
2079 		ret = CRYPTO_INVALID_MAC;
2080 		goto bail;
2081 	}
2082 
2083 	/* do a SHA2 update of the inner context using the specified data */
2084 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
2085 	if (ret != CRYPTO_SUCCESS)
2086 		/* the update failed, free context and bail */
2087 		goto bail;
2088 
2089 	/* do a SHA2 final on the inner context */
2090 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
2091 
2092 	/*
2093 	 * Do an SHA2 update on the outer context, feeding the inner
2094 	 * digest as data.
2095 	 */
2096 	SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
2097 
2098 	/*
2099 	 * Do a SHA2 final on the outer context, storing the computed
2100 	 * digest in the users buffer.
2101 	 */
2102 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
2103 
2104 	/*
2105 	 * Compare the computed digest against the expected digest passed
2106 	 * as argument.
2107 	 */
2108 
2109 	switch (mac->cd_format) {
2110 
2111 	case CRYPTO_DATA_RAW:
2112 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
2113 		    mac->cd_offset, digest_len) != 0)
2114 			ret = CRYPTO_INVALID_MAC;
2115 		break;
2116 
2117 	case CRYPTO_DATA_UIO: {
2118 		off_t offset = mac->cd_offset;
2119 		uint_t vec_idx;
2120 		off_t scratch_offset = 0;
2121 		size_t length = digest_len;
2122 		size_t cur_len;
2123 
2124 		/* we support only kernel buffer */
2125 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
2126 			return (CRYPTO_ARGUMENTS_BAD);
2127 
2128 		/* jump to the first iovec containing the expected digest */
2129 		for (vec_idx = 0;
2130 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
2131 		    vec_idx < mac->cd_uio->uio_iovcnt;
2132 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len);
2133 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
2134 			/*
2135 			 * The caller specified an offset that is
2136 			 * larger than the total size of the buffers
2137 			 * it provided.
2138 			 */
2139 			ret = CRYPTO_DATA_LEN_RANGE;
2140 			break;
2141 		}
2142 
2143 		/* do the comparison of computed digest vs specified one */
2144 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
2145 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
2146 			    offset, length);
2147 
2148 			if (bcmp(digest + scratch_offset,
2149 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
2150 			    cur_len) != 0) {
2151 				ret = CRYPTO_INVALID_MAC;
2152 				break;
2153 			}
2154 
2155 			length -= cur_len;
2156 			vec_idx++;
2157 			scratch_offset += cur_len;
2158 			offset = 0;
2159 		}
2160 		break;
2161 	}
2162 
2163 	case CRYPTO_DATA_MBLK: {
2164 		off_t offset = mac->cd_offset;
2165 		mblk_t *mp;
2166 		off_t scratch_offset = 0;
2167 		size_t length = digest_len;
2168 		size_t cur_len;
2169 
2170 		/* jump to the first mblk_t containing the expected digest */
2171 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
2172 		    offset -= MBLKL(mp), mp = mp->b_cont);
2173 		if (mp == NULL) {
2174 			/*
2175 			 * The caller specified an offset that is larger than
2176 			 * the total size of the buffers it provided.
2177 			 */
2178 			ret = CRYPTO_DATA_LEN_RANGE;
2179 			break;
2180 		}
2181 
2182 		while (mp != NULL && length > 0) {
2183 			cur_len = MIN(MBLKL(mp) - offset, length);
2184 			if (bcmp(digest + scratch_offset,
2185 			    mp->b_rptr + offset, cur_len) != 0) {
2186 				ret = CRYPTO_INVALID_MAC;
2187 				break;
2188 			}
2189 
2190 			length -= cur_len;
2191 			mp = mp->b_cont;
2192 			scratch_offset += cur_len;
2193 			offset = 0;
2194 		}
2195 		break;
2196 	}
2197 
2198 	default:
2199 		ret = CRYPTO_ARGUMENTS_BAD;
2200 	}
2201 
2202 	return (ret);
2203 bail:
2204 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2205 	mac->cd_length = 0;
2206 	return (ret);
2207 }
2208 
2209 /*
2210  * KCF software provider context management entry points.
2211  */
2212 
2213 /* ARGSUSED */
2214 static int
2215 sha2_create_ctx_template(crypto_provider_handle_t provider,
2216     crypto_mechanism_t *mechanism, crypto_key_t *key,
2217     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
2218     crypto_req_handle_t req)
2219 {
2220 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
2221 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2222 	uint32_t sha_digest_len, sha_hmac_block_size;
2223 
2224 	/*
2225 	 * Set the digest length and block size to values approriate to the
2226 	 * mechanism
2227 	 */
2228 	switch (mechanism->cm_type) {
2229 	case SHA256_HMAC_MECH_INFO_TYPE:
2230 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2231 		sha_digest_len = SHA256_DIGEST_LENGTH;
2232 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2233 		break;
2234 	case SHA384_HMAC_MECH_INFO_TYPE:
2235 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2236 	case SHA512_HMAC_MECH_INFO_TYPE:
2237 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2238 		sha_digest_len = SHA512_DIGEST_LENGTH;
2239 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2240 		break;
2241 	default:
2242 		return (CRYPTO_MECHANISM_INVALID);
2243 	}
2244 
2245 	/* Add support for key by attributes (RFE 4706552) */
2246 	if (key->ck_format != CRYPTO_KEY_RAW)
2247 		return (CRYPTO_ARGUMENTS_BAD);
2248 
2249 	/*
2250 	 * Allocate and initialize SHA2 context.
2251 	 */
2252 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
2253 	    crypto_kmflag(req));
2254 	if (sha2_hmac_ctx_tmpl == NULL)
2255 		return (CRYPTO_HOST_MEMORY);
2256 
2257 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
2258 
2259 	if (keylen_in_bytes > sha_hmac_block_size) {
2260 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
2261 
2262 		/*
2263 		 * Hash the passed-in key to get a smaller key.
2264 		 * The inner context is used since it hasn't been
2265 		 * initialized yet.
2266 		 */
2267 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2268 		    &sha2_hmac_ctx_tmpl->hc_icontext,
2269 		    key->ck_data, keylen_in_bytes, digested_key);
2270 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
2271 		    sha_digest_len);
2272 	} else {
2273 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
2274 		    keylen_in_bytes);
2275 	}
2276 
2277 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
2278 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
2279 
2280 	return (CRYPTO_SUCCESS);
2281 }
2282 
2283 static int
2284 sha2_free_context(crypto_ctx_t *ctx)
2285 {
2286 	uint_t ctx_len;
2287 
2288 	if (ctx->cc_provider_private == NULL)
2289 		return (CRYPTO_SUCCESS);
2290 
2291 	/*
2292 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
2293 	 * have different lengths.
2294 	 *
2295 	 * Note: Below is dependent on the mechanism ordering.
2296 	 */
2297 
2298 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
2299 		ctx_len = sizeof (sha2_ctx_t);
2300 	else
2301 		ctx_len = sizeof (sha2_hmac_ctx_t);
2302 
2303 	bzero(ctx->cc_provider_private, ctx_len);
2304 	kmem_free(ctx->cc_provider_private, ctx_len);
2305 	ctx->cc_provider_private = NULL;
2306 
2307 	return (CRYPTO_SUCCESS);
2308 }
2309 
2310 #endif /* _KERNEL */
2311 
2312 void
2313 SHA2Init(uint64_t mech, SHA2_CTX *ctx)
2314 {
2315 
2316 	switch (mech) {
2317 	case SHA256_MECH_INFO_TYPE:
2318 	case SHA256_HMAC_MECH_INFO_TYPE:
2319 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2320 		ctx->state.s32[0] = 0x6a09e667U;
2321 		ctx->state.s32[1] = 0xbb67ae85U;
2322 		ctx->state.s32[2] = 0x3c6ef372U;
2323 		ctx->state.s32[3] = 0xa54ff53aU;
2324 		ctx->state.s32[4] = 0x510e527fU;
2325 		ctx->state.s32[5] = 0x9b05688cU;
2326 		ctx->state.s32[6] = 0x1f83d9abU;
2327 		ctx->state.s32[7] = 0x5be0cd19U;
2328 		break;
2329 	case SHA384_MECH_INFO_TYPE:
2330 	case SHA384_HMAC_MECH_INFO_TYPE:
2331 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2332 		ctx->state.s64[0] = 0xcbbb9d5dc1059ed8ULL;
2333 		ctx->state.s64[1] = 0x629a292a367cd507ULL;
2334 		ctx->state.s64[2] = 0x9159015a3070dd17ULL;
2335 		ctx->state.s64[3] = 0x152fecd8f70e5939ULL;
2336 		ctx->state.s64[4] = 0x67332667ffc00b31ULL;
2337 		ctx->state.s64[5] = 0x8eb44a8768581511ULL;
2338 		ctx->state.s64[6] = 0xdb0c2e0d64f98fa7ULL;
2339 		ctx->state.s64[7] = 0x47b5481dbefa4fa4ULL;
2340 		break;
2341 	case SHA512_MECH_INFO_TYPE:
2342 	case SHA512_HMAC_MECH_INFO_TYPE:
2343 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2344 		ctx->state.s64[0] = 0x6a09e667f3bcc908ULL;
2345 		ctx->state.s64[1] = 0xbb67ae8584caa73bULL;
2346 		ctx->state.s64[2] = 0x3c6ef372fe94f82bULL;
2347 		ctx->state.s64[3] = 0xa54ff53a5f1d36f1ULL;
2348 		ctx->state.s64[4] = 0x510e527fade682d1ULL;
2349 		ctx->state.s64[5] = 0x9b05688c2b3e6c1fULL;
2350 		ctx->state.s64[6] = 0x1f83d9abfb41bd6bULL;
2351 		ctx->state.s64[7] = 0x5be0cd19137e2179ULL;
2352 		break;
2353 #ifdef _KERNEL
2354 	default:
2355 		cmn_err(CE_WARN, "sha2_init: "
2356 		    "failed to find a supported algorithm: 0x%x",
2357 		    (uint32_t)mech);
2358 
2359 #endif /* _KERNEL */
2360 	}
2361 
2362 	ctx->algotype = mech;
2363 	ctx->count.c64[0] = ctx->count.c64[1] = 0;
2364 }
2365 
2366 /*
2367  * SHA2Update()
2368  *
2369  * purpose: continues an sha2 digest operation, using the message block
2370  *          to update the context.
2371  *   input: SHA2_CTX *	: the context to update
2372  *          uint8_t *	: the message block
2373  *          uint32_t    : the length of the message block in bytes
2374  *  output: void
2375  */
2376 
2377 void
2378 SHA2Update(SHA2_CTX *ctx, const uint8_t *input, uint32_t input_len)
2379 {
2380 	uint32_t i, buf_index, buf_len, buf_limit;
2381 
2382 	/* check for noop */
2383 	if (input_len == 0)
2384 		return;
2385 
2386 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2387 		buf_limit = 64;
2388 
2389 		/* compute number of bytes mod 64 */
2390 		buf_index = (ctx->count.c32[1] >> 3) & 0x3F;
2391 
2392 		/* update number of bits */
2393 		if ((ctx->count.c32[1] += (input_len << 3)) < (input_len << 3))
2394 			ctx->count.c32[0]++;
2395 
2396 		ctx->count.c32[0] += (input_len >> 29);
2397 
2398 	} else {
2399 		buf_limit = 128;
2400 
2401 		/* compute number of bytes mod 128 */
2402 		buf_index = (ctx->count.c64[1] >> 3) & 0x7F;
2403 
2404 		/* update number of bits */
2405 		if ((ctx->count.c64[1] += (input_len << 3)) < (input_len << 3))
2406 			ctx->count.c64[0]++;
2407 
2408 		ctx->count.c64[0] += (input_len >> 29);
2409 	}
2410 
2411 	buf_len = buf_limit - buf_index;
2412 
2413 	/* transform as many times as possible */
2414 	i = 0;
2415 	if (input_len >= buf_len) {
2416 
2417 		/*
2418 		 * general optimization:
2419 		 *
2420 		 * only do initial bcopy() and SHA2Transform() if
2421 		 * buf_index != 0.  if buf_index == 0, we're just
2422 		 * wasting our time doing the bcopy() since there
2423 		 * wasn't any data left over from a previous call to
2424 		 * SHA2Update().
2425 		 */
2426 		if (buf_index) {
2427 			bcopy(input, &ctx->buf_un.buf8[buf_index], buf_len);
2428 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2429 				SHA256Transform(ctx, ctx->buf_un.buf8);
2430 			else
2431 				SHA512Transform(ctx, ctx->buf_un.buf8);
2432 
2433 			i = buf_len;
2434 		}
2435 
2436 
2437 		for (; i + buf_limit - 1 < input_len; i += buf_limit) {
2438 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2439 				SHA256Transform(ctx, &input[i]);
2440 			else
2441 				SHA512Transform(ctx, &input[i]);
2442 		}
2443 
2444 		/*
2445 		 * general optimization:
2446 		 *
2447 		 * if i and input_len are the same, return now instead
2448 		 * of calling bcopy(), since the bcopy() in this case
2449 		 * will be an expensive nop.
2450 		 */
2451 
2452 		if (input_len == i)
2453 			return;
2454 
2455 		buf_index = 0;
2456 	}
2457 
2458 	/* buffer remaining input */
2459 	bcopy(&input[i], &ctx->buf_un.buf8[buf_index], input_len - i);
2460 }
2461 
2462 
2463 /*
2464  * SHA2Final()
2465  *
2466  * purpose: ends an sha2 digest operation, finalizing the message digest and
2467  *          zeroing the context.
2468  *   input: uint8_t *	: a buffer to store the digest in
2469  *          SHA2_CTX *  : the context to finalize, save, and zero
2470  *  output: void
2471  */
2472 
2473 
2474 void
2475 SHA2Final(uint8_t *digest, SHA2_CTX *ctx)
2476 {
2477 	uint8_t		bitcount_be[sizeof (ctx->count.c32)];
2478 	uint8_t		bitcount_be64[sizeof (ctx->count.c64)];
2479 	uint32_t	index;
2480 
2481 
2482 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2483 		index  = (ctx->count.c32[1] >> 3) & 0x3f;
2484 		Encode(bitcount_be, ctx->count.c32, sizeof (bitcount_be));
2485 		SHA2Update(ctx, PADDING, ((index < 56) ? 56 : 120) - index);
2486 		SHA2Update(ctx, bitcount_be, sizeof (bitcount_be));
2487 		Encode(digest, ctx->state.s32, sizeof (ctx->state.s32));
2488 
2489 	} else {
2490 		index  = (ctx->count.c64[1] >> 3) & 0x7f;
2491 		Encode64(bitcount_be64, ctx->count.c64,
2492 		    sizeof (bitcount_be64));
2493 		SHA2Update(ctx, PADDING, ((index < 112) ? 112 : 240) - index);
2494 		SHA2Update(ctx, bitcount_be64, sizeof (bitcount_be64));
2495 		if (ctx->algotype <= SHA384_HMAC_GEN_MECH_INFO_TYPE) {
2496 			ctx->state.s64[6] = ctx->state.s64[7] = 0;
2497 			Encode64(digest, ctx->state.s64,
2498 			    sizeof (uint64_t) * 6);
2499 		} else
2500 			Encode64(digest, ctx->state.s64,
2501 			    sizeof (ctx->state.s64));
2502 	}
2503 }
2504