xref: /illumos-gate/usr/src/common/crypto/sha2/sha2.c (revision fd3af1c7b4cc69da990babda7f07c7c5bf1e1597)
1 /*
2  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
3  * Use is subject to license terms.
4  */
5 
6 #pragma ident	"%Z%%M%	%I%	%E% SMI"
7 
8 
9 /*
10  * The basic framework for this code came from the reference
11  * implementation for MD5.  That implementation is Copyright (C)
12  * 1991-2, RSA Data Security, Inc. Created 1991. All rights reserved.
13  *
14  * License to copy and use this software is granted provided that it
15  * is identified as the "RSA Data Security, Inc. MD5 Message-Digest
16  * Algorithm" in all material mentioning or referencing this software
17  * or this function.
18  *
19  * License is also granted to make and use derivative works provided
20  * that such works are identified as "derived from the RSA Data
21  * Security, Inc. MD5 Message-Digest Algorithm" in all material
22  * mentioning or referencing the derived work.
23  *
24  * RSA Data Security, Inc. makes no representations concerning either
25  * the merchantability of this software or the suitability of this
26  * software for any particular purpose. It is provided "as is"
27  * without express or implied warranty of any kind.
28  *
29  * These notices must be retained in any copies of any part of this
30  * documentation and/or software.
31  *
32  * NOTE: Cleaned-up and optimized, version of SHA2, based on the FIPS 180-2
33  * standard, available at http://www.itl.nist.gov/div897/pubs/fip180-2.htm
34  * Not as fast as one would like -- further optimizations are encouraged
35  * and appreciated.
36  */
37 
38 #include <sys/types.h>
39 #include <sys/param.h>
40 #include <sys/systm.h>
41 #include <sys/sysmacros.h>
42 #include <sys/sha2.h>
43 #include <sys/sha2_consts.h>
44 
45 #ifdef _KERNEL
46 
47 #include <sys/modctl.h>
48 #include <sys/cmn_err.h>
49 #include <sys/crypto/common.h>
50 #include <sys/crypto/spi.h>
51 #include <sys/strsun.h>
52 
53 /*
54  * The sha2 module is created with two modlinkages:
55  * - a modlmisc that allows consumers to directly call the entry points
56  *   SHA2Init, SHA2Update, and SHA2Final.
57  * - a modlcrypto that allows the module to register with the Kernel
58  *   Cryptographic Framework (KCF) as a software provider for the SHA2
59  *   mechanisms.
60  */
61 
62 #else
63 
64 #include <strings.h>
65 #include <stdlib.h>
66 #include <errno.h>
67 
68 #endif	/* !_KERNEL */
69 
70 static void Encode(uint8_t *, uint32_t *, size_t);
71 static void Encode64(uint8_t *, uint64_t *, size_t);
72 static void SHA256Transform(SHA2_CTX *, const uint8_t *);
73 static void SHA512Transform(SHA2_CTX *, const uint8_t *);
74 
75 static uint8_t PADDING[128] = { 0x80, /* all zeros */ };
76 
77 /* Ch and Maj are the basic SHA2 functions. */
78 #define	Ch(b, c, d)	(((b) & (c)) ^ ((~b) & (d)))
79 #define	Maj(b, c, d)	(((b) & (c)) ^ ((b) & (d)) ^ ((c) & (d)))
80 
81 /* Rotates x right n bits. */
82 #define	ROTR(x, n)	\
83 	(((x) >> (n)) | ((x) << ((sizeof (x) * NBBY)-(n))))
84 
85 /* Shift x right n bits */
86 #define	SHR(x, n)	((x) >> (n))
87 
88 /* SHA256 Functions */
89 #define	BIGSIGMA0_256(x)	(ROTR((x), 2) ^ ROTR((x), 13) ^ ROTR((x), 22))
90 #define	BIGSIGMA1_256(x)	(ROTR((x), 6) ^ ROTR((x), 11) ^ ROTR((x), 25))
91 #define	SIGMA0_256(x)		(ROTR((x), 7) ^ ROTR((x), 18) ^ SHR((x), 3))
92 #define	SIGMA1_256(x)		(ROTR((x), 17) ^ ROTR((x), 19) ^ SHR((x), 10))
93 
94 #define	SHA256ROUND(a, b, c, d, e, f, g, h, i, w)			\
95 	T1 = h + BIGSIGMA1_256(e) + Ch(e, f, g) + SHA256_CONST(i) + w;	\
96 	d += T1;							\
97 	T2 = BIGSIGMA0_256(a) + Maj(a, b, c);				\
98 	h = T1 + T2
99 
100 /* SHA384/512 Functions */
101 #define	BIGSIGMA0(x)	(ROTR((x), 28) ^ ROTR((x), 34) ^ ROTR((x), 39))
102 #define	BIGSIGMA1(x)	(ROTR((x), 14) ^ ROTR((x), 18) ^ ROTR((x), 41))
103 #define	SIGMA0(x)	(ROTR((x), 1) ^ ROTR((x), 8) ^ SHR((x), 7))
104 #define	SIGMA1(x)	(ROTR((x), 19) ^ ROTR((x), 61) ^ SHR((x), 6))
105 #define	SHA512ROUND(a, b, c, d, e, f, g, h, i, w)			\
106 	T1 = h + BIGSIGMA1(e) + Ch(e, f, g) + SHA512_CONST(i) + w;	\
107 	d += T1;							\
108 	T2 = BIGSIGMA0(a) + Maj(a, b, c);				\
109 	h = T1 + T2
110 
111 #ifdef _KERNEL
112 
113 static struct modlmisc modlmisc = {
114 	&mod_miscops,
115 	"SHA2 Message-Digest Algorithm"
116 };
117 
118 static struct modlcrypto modlcrypto = {
119 	&mod_cryptoops,
120 	"SHA2 Kernel SW Provider %I%"
121 };
122 
123 static struct modlinkage modlinkage = {
124 	MODREV_1, &modlmisc, &modlcrypto, NULL
125 };
126 
127 /*
128  * CSPI information (entry points, provider info, etc.)
129  */
130 
131 #endif /* _KERNEL */
132 
133 /*
134  * List of support mechanisms in this module.
135  *
136  * It is important to note that in the module, division or modulus calculations
137  * are used on the enumerated type to determine which mechanism is being used;
138  * therefore, changing the order or additional mechanisms should be done
139  * carefully
140  */
141 typedef enum sha2_mech_type {
142 	SHA256_MECH_INFO_TYPE,		/* SUN_CKM_SHA256 */
143 	SHA256_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC */
144 	SHA256_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA256_HMAC_GENERAL */
145 	SHA384_MECH_INFO_TYPE,		/* SUN_CKM_SHA384 */
146 	SHA384_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC */
147 	SHA384_HMAC_GEN_MECH_INFO_TYPE,	/* SUN_CKM_SHA384_HMAC_GENERAL */
148 	SHA512_MECH_INFO_TYPE,		/* SUN_CKM_SHA512 */
149 	SHA512_HMAC_MECH_INFO_TYPE,	/* SUN_CKM_SHA512_HMAC */
150 	SHA512_HMAC_GEN_MECH_INFO_TYPE	/* SUN_CKM_SHA512_HMAC_GENERAL */
151 } sha2_mech_type_t;
152 
153 #ifdef _KERNEL
154 
155 
156 /*
157  * Context for SHA2 mechanism.
158  */
159 typedef struct sha2_ctx {
160 	sha2_mech_type_t	sc_mech_type;	/* type of context */
161 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
162 } sha2_ctx_t;
163 
164 /*
165  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
166  */
167 typedef struct sha2_hmac_ctx {
168 	sha2_mech_type_t	hc_mech_type;	/* type of context */
169 	uint32_t		hc_digest_len;	/* digest len in bytes */
170 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
171 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
172 } sha2_hmac_ctx_t;
173 
174 /*
175  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
176  * by KCF to one of the entry points.
177  */
178 
179 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
180 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
181 
182 /* to extract the digest length passed as mechanism parameter */
183 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
184 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
185 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
186 	else {								\
187 		ulong_t tmp_ulong;					\
188 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
189 		(len) = (uint32_t)tmp_ulong;				\
190 	}								\
191 }
192 
193 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
194 	SHA2Init(mech, ctx);				\
195 	SHA2Update(ctx, key, len);			\
196 	SHA2Final(digest, ctx);				\
197 }
198 
199 /*
200  * Mechanism info structure passed to KCF during registration.
201  */
202 static crypto_mech_info_t sha2_mech_info_tab[] = {
203 	/* SHA256 */
204 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
205 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
206 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
207 	/* SHA256-HMAC */
208 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
209 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
210 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
211 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
212 	/* SHA256-HMAC GENERAL */
213 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
214 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
215 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
216 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
217 	/* SHA384 */
218 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
219 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
220 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
221 	/* SHA384-HMAC */
222 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
223 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
224 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
225 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
226 	/* SHA384-HMAC GENERAL */
227 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
228 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
229 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
230 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
231 	/* SHA512 */
232 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
233 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
234 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
235 	/* SHA512-HMAC */
236 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
237 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
238 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
239 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
240 	/* SHA512-HMAC GENERAL */
241 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
242 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
243 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
244 	    CRYPTO_KEYSIZE_UNIT_IN_BITS}
245 };
246 
247 void SHA2Init(uint64_t, SHA2_CTX *);
248 void SHA2Update(SHA2_CTX *, const uint8_t *, uint32_t);
249 void SHA2Final(uint8_t *, SHA2_CTX *);
250 
251 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
252 
253 static crypto_control_ops_t sha2_control_ops = {
254 	sha2_provider_status
255 };
256 
257 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
258     crypto_req_handle_t);
259 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
260     crypto_req_handle_t);
261 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
262     crypto_req_handle_t);
263 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
264     crypto_req_handle_t);
265 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
266     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
267     crypto_req_handle_t);
268 
269 static crypto_digest_ops_t sha2_digest_ops = {
270 	sha2_digest_init,
271 	sha2_digest,
272 	sha2_digest_update,
273 	NULL,
274 	sha2_digest_final,
275 	sha2_digest_atomic
276 };
277 
278 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
279     crypto_spi_ctx_template_t, crypto_req_handle_t);
280 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
281     crypto_req_handle_t);
282 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
283 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
284     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
285     crypto_spi_ctx_template_t, crypto_req_handle_t);
286 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
287     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
288     crypto_spi_ctx_template_t, crypto_req_handle_t);
289 
290 static crypto_mac_ops_t sha2_mac_ops = {
291 	sha2_mac_init,
292 	NULL,
293 	sha2_mac_update,
294 	sha2_mac_final,
295 	sha2_mac_atomic,
296 	sha2_mac_verify_atomic
297 };
298 
299 static int sha2_create_ctx_template(crypto_provider_handle_t,
300     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
301     size_t *, crypto_req_handle_t);
302 static int sha2_free_context(crypto_ctx_t *);
303 
304 static crypto_ctx_ops_t sha2_ctx_ops = {
305 	sha2_create_ctx_template,
306 	sha2_free_context
307 };
308 
309 static crypto_ops_t sha2_crypto_ops = {
310 	&sha2_control_ops,
311 	&sha2_digest_ops,
312 	NULL,
313 	&sha2_mac_ops,
314 	NULL,
315 	NULL,
316 	NULL,
317 	NULL,
318 	NULL,
319 	NULL,
320 	NULL,
321 	NULL,
322 	NULL,
323 	&sha2_ctx_ops
324 };
325 
326 static crypto_provider_info_t sha2_prov_info = {
327 	CRYPTO_SPI_VERSION_1,
328 	"SHA2 Software Provider",
329 	CRYPTO_SW_PROVIDER,
330 	{&modlinkage},
331 	NULL,
332 	&sha2_crypto_ops,
333 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
334 	sha2_mech_info_tab
335 };
336 
337 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
338 
339 int
340 _init()
341 {
342 	int ret;
343 
344 	if ((ret = mod_install(&modlinkage)) != 0)
345 		return (ret);
346 
347 	/*
348 	 * Register with KCF. If the registration fails, log an
349 	 * error but do not uninstall the module, since the functionality
350 	 * provided by misc/sha2 should still be available.
351 	 */
352 	if ((ret = crypto_register_provider(&sha2_prov_info,
353 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
354 		cmn_err(CE_WARN, "sha2 _init: "
355 		    "crypto_register_provider() failed (0x%x)", ret);
356 
357 	return (0);
358 }
359 
360 int
361 _info(struct modinfo *modinfop)
362 {
363 	return (mod_info(&modlinkage, modinfop));
364 }
365 
366 #endif /* _KERNEL */
367 
368 
369 /*
370  * sparc optimization:
371  *
372  * on the sparc, we can load big endian 32-bit data easily.  note that
373  * special care must be taken to ensure the address is 32-bit aligned.
374  * in the interest of speed, we don't check to make sure, since
375  * careful programming can guarantee this for us.
376  */
377 
378 #if	defined(_BIG_ENDIAN)
379 
380 #define	LOAD_BIG_32(addr)	(*(uint32_t *)(addr))
381 
382 #else	/* little endian -- will work on big endian, but slowly */
383 
384 #define	LOAD_BIG_32(addr)	\
385 	(((addr)[0] << 24) | ((addr)[1] << 16) | ((addr)[2] << 8) | (addr)[3])
386 #endif
387 
388 
389 #if	defined(_BIG_ENDIAN)
390 
391 #define	LOAD_BIG_64(addr)	(*(uint64_t *)(addr))
392 
393 #else	/* little endian -- will work on big endian, but slowly */
394 
395 #define	LOAD_BIG_64(addr)	\
396 	(((uint64_t)(addr)[0] << 56) | ((uint64_t)(addr)[1] << 48) |	\
397 	    ((uint64_t)(addr)[2] << 40) | ((uint64_t)(addr)[3] << 32) |	\
398 	    ((uint64_t)(addr)[4] << 24) | ((uint64_t)(addr)[5] << 16) |	\
399 	    ((uint64_t)(addr)[6] << 8) | (uint64_t)(addr)[7])
400 
401 #endif
402 
403 
404 /* SHA256 Transform */
405 
406 static void
407 SHA256Transform(SHA2_CTX *ctx, const uint8_t *blk)
408 {
409 
410 	uint32_t a = ctx->state.s32[0];
411 	uint32_t b = ctx->state.s32[1];
412 	uint32_t c = ctx->state.s32[2];
413 	uint32_t d = ctx->state.s32[3];
414 	uint32_t e = ctx->state.s32[4];
415 	uint32_t f = ctx->state.s32[5];
416 	uint32_t g = ctx->state.s32[6];
417 	uint32_t h = ctx->state.s32[7];
418 
419 	uint32_t w0, w1, w2, w3, w4, w5, w6, w7;
420 	uint32_t w8, w9, w10, w11, w12, w13, w14, w15;
421 	uint32_t T1, T2;
422 
423 #if	defined(__sparc)
424 	static const uint32_t sha256_consts[] = {
425 		SHA256_CONST_0, SHA256_CONST_1, SHA256_CONST_2,
426 		SHA256_CONST_3, SHA256_CONST_4, SHA256_CONST_5,
427 		SHA256_CONST_6, SHA256_CONST_7, SHA256_CONST_8,
428 		SHA256_CONST_9, SHA256_CONST_10, SHA256_CONST_11,
429 		SHA256_CONST_12, SHA256_CONST_13, SHA256_CONST_14,
430 		SHA256_CONST_15, SHA256_CONST_16, SHA256_CONST_17,
431 		SHA256_CONST_18, SHA256_CONST_19, SHA256_CONST_20,
432 		SHA256_CONST_21, SHA256_CONST_22, SHA256_CONST_23,
433 		SHA256_CONST_24, SHA256_CONST_25, SHA256_CONST_26,
434 		SHA256_CONST_27, SHA256_CONST_28, SHA256_CONST_29,
435 		SHA256_CONST_30, SHA256_CONST_31, SHA256_CONST_32,
436 		SHA256_CONST_33, SHA256_CONST_34, SHA256_CONST_35,
437 		SHA256_CONST_36, SHA256_CONST_37, SHA256_CONST_38,
438 		SHA256_CONST_39, SHA256_CONST_40, SHA256_CONST_41,
439 		SHA256_CONST_42, SHA256_CONST_43, SHA256_CONST_44,
440 		SHA256_CONST_45, SHA256_CONST_46, SHA256_CONST_47,
441 		SHA256_CONST_48, SHA256_CONST_49, SHA256_CONST_50,
442 		SHA256_CONST_51, SHA256_CONST_52, SHA256_CONST_53,
443 		SHA256_CONST_54, SHA256_CONST_55, SHA256_CONST_56,
444 		SHA256_CONST_57, SHA256_CONST_58, SHA256_CONST_59,
445 		SHA256_CONST_60, SHA256_CONST_61, SHA256_CONST_62,
446 		SHA256_CONST_63
447 	};
448 #endif
449 
450 	if ((uintptr_t)blk & 0x3) {		/* not 4-byte aligned? */
451 		bcopy(blk, ctx->buf_un.buf32,  sizeof (ctx->buf_un.buf32));
452 		blk = (uint8_t *)ctx->buf_un.buf32;
453 	}
454 
455 #if	defined(__sparc)
456 	/*LINTED*/
457 	w0 =  LOAD_BIG_32(blk + 4 * 0);
458 	SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0);
459 	/*LINTED*/
460 	w1 =  LOAD_BIG_32(blk + 4 * 1);
461 	SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1);
462 	/*LINTED*/
463 	w2 =  LOAD_BIG_32(blk + 4 * 2);
464 	SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2);
465 	/*LINTED*/
466 	w3 =  LOAD_BIG_32(blk + 4 * 3);
467 	SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3);
468 	/*LINTED*/
469 	w4 =  LOAD_BIG_32(blk + 4 * 4);
470 	SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4);
471 	/*LINTED*/
472 	w5 =  LOAD_BIG_32(blk + 4 * 5);
473 	SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5);
474 	/*LINTED*/
475 	w6 =  LOAD_BIG_32(blk + 4 * 6);
476 	SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6);
477 	/*LINTED*/
478 	w7 =  LOAD_BIG_32(blk + 4 * 7);
479 	SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7);
480 	/*LINTED*/
481 	w8 =  LOAD_BIG_32(blk + 4 * 8);
482 	SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8);
483 	/*LINTED*/
484 	w9 =  LOAD_BIG_32(blk + 4 * 9);
485 	SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9);
486 	/*LINTED*/
487 	w10 =  LOAD_BIG_32(blk + 4 * 10);
488 	SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10);
489 	/*LINTED*/
490 	w11 =  LOAD_BIG_32(blk + 4 * 11);
491 	SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11);
492 	/*LINTED*/
493 	w12 =  LOAD_BIG_32(blk + 4 * 12);
494 	SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12);
495 	/*LINTED*/
496 	w13 =  LOAD_BIG_32(blk + 4 * 13);
497 	SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13);
498 	/*LINTED*/
499 	w14 =  LOAD_BIG_32(blk + 4 * 14);
500 	SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14);
501 	/*LINTED*/
502 	w15 =  LOAD_BIG_32(blk + 4 * 15);
503 	SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15);
504 
505 #else
506 
507 	w0 =  LOAD_BIG_32(blk + 4 * 0);
508 	SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0);
509 	w1 =  LOAD_BIG_32(blk + 4 * 1);
510 	SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1);
511 	w2 =  LOAD_BIG_32(blk + 4 * 2);
512 	SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2);
513 	w3 =  LOAD_BIG_32(blk + 4 * 3);
514 	SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3);
515 	w4 =  LOAD_BIG_32(blk + 4 * 4);
516 	SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4);
517 	w5 =  LOAD_BIG_32(blk + 4 * 5);
518 	SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5);
519 	w6 =  LOAD_BIG_32(blk + 4 * 6);
520 	SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6);
521 	w7 =  LOAD_BIG_32(blk + 4 * 7);
522 	SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7);
523 	w8 =  LOAD_BIG_32(blk + 4 * 8);
524 	SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8);
525 	w9 =  LOAD_BIG_32(blk + 4 * 9);
526 	SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9);
527 	w10 =  LOAD_BIG_32(blk + 4 * 10);
528 	SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10);
529 	w11 =  LOAD_BIG_32(blk + 4 * 11);
530 	SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11);
531 	w12 =  LOAD_BIG_32(blk + 4 * 12);
532 	SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12);
533 	w13 =  LOAD_BIG_32(blk + 4 * 13);
534 	SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13);
535 	w14 =  LOAD_BIG_32(blk + 4 * 14);
536 	SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14);
537 	w15 =  LOAD_BIG_32(blk + 4 * 15);
538 	SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15);
539 
540 #endif
541 
542 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
543 	SHA256ROUND(a, b, c, d, e, f, g, h, 16, w0);
544 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
545 	SHA256ROUND(h, a, b, c, d, e, f, g, 17, w1);
546 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
547 	SHA256ROUND(g, h, a, b, c, d, e, f, 18, w2);
548 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
549 	SHA256ROUND(f, g, h, a, b, c, d, e, 19, w3);
550 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
551 	SHA256ROUND(e, f, g, h, a, b, c, d, 20, w4);
552 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
553 	SHA256ROUND(d, e, f, g, h, a, b, c, 21, w5);
554 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
555 	SHA256ROUND(c, d, e, f, g, h, a, b, 22, w6);
556 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
557 	SHA256ROUND(b, c, d, e, f, g, h, a, 23, w7);
558 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
559 	SHA256ROUND(a, b, c, d, e, f, g, h, 24, w8);
560 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
561 	SHA256ROUND(h, a, b, c, d, e, f, g, 25, w9);
562 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
563 	SHA256ROUND(g, h, a, b, c, d, e, f, 26, w10);
564 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
565 	SHA256ROUND(f, g, h, a, b, c, d, e, 27, w11);
566 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
567 	SHA256ROUND(e, f, g, h, a, b, c, d, 28, w12);
568 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
569 	SHA256ROUND(d, e, f, g, h, a, b, c, 29, w13);
570 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
571 	SHA256ROUND(c, d, e, f, g, h, a, b, 30, w14);
572 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
573 	SHA256ROUND(b, c, d, e, f, g, h, a, 31, w15);
574 
575 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
576 	SHA256ROUND(a, b, c, d, e, f, g, h, 32, w0);
577 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
578 	SHA256ROUND(h, a, b, c, d, e, f, g, 33, w1);
579 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
580 	SHA256ROUND(g, h, a, b, c, d, e, f, 34, w2);
581 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
582 	SHA256ROUND(f, g, h, a, b, c, d, e, 35, w3);
583 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
584 	SHA256ROUND(e, f, g, h, a, b, c, d, 36, w4);
585 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
586 	SHA256ROUND(d, e, f, g, h, a, b, c, 37, w5);
587 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
588 	SHA256ROUND(c, d, e, f, g, h, a, b, 38, w6);
589 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
590 	SHA256ROUND(b, c, d, e, f, g, h, a, 39, w7);
591 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
592 	SHA256ROUND(a, b, c, d, e, f, g, h, 40, w8);
593 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
594 	SHA256ROUND(h, a, b, c, d, e, f, g, 41, w9);
595 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
596 	SHA256ROUND(g, h, a, b, c, d, e, f, 42, w10);
597 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
598 	SHA256ROUND(f, g, h, a, b, c, d, e, 43, w11);
599 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
600 	SHA256ROUND(e, f, g, h, a, b, c, d, 44, w12);
601 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
602 	SHA256ROUND(d, e, f, g, h, a, b, c, 45, w13);
603 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
604 	SHA256ROUND(c, d, e, f, g, h, a, b, 46, w14);
605 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
606 	SHA256ROUND(b, c, d, e, f, g, h, a, 47, w15);
607 
608 	w0 = SIGMA1_256(w14) + w9 + SIGMA0_256(w1) + w0;
609 	SHA256ROUND(a, b, c, d, e, f, g, h, 48, w0);
610 	w1 = SIGMA1_256(w15) + w10 + SIGMA0_256(w2) + w1;
611 	SHA256ROUND(h, a, b, c, d, e, f, g, 49, w1);
612 	w2 = SIGMA1_256(w0) + w11 + SIGMA0_256(w3) + w2;
613 	SHA256ROUND(g, h, a, b, c, d, e, f, 50, w2);
614 	w3 = SIGMA1_256(w1) + w12 + SIGMA0_256(w4) + w3;
615 	SHA256ROUND(f, g, h, a, b, c, d, e, 51, w3);
616 	w4 = SIGMA1_256(w2) + w13 + SIGMA0_256(w5) + w4;
617 	SHA256ROUND(e, f, g, h, a, b, c, d, 52, w4);
618 	w5 = SIGMA1_256(w3) + w14 + SIGMA0_256(w6) + w5;
619 	SHA256ROUND(d, e, f, g, h, a, b, c, 53, w5);
620 	w6 = SIGMA1_256(w4) + w15 + SIGMA0_256(w7) + w6;
621 	SHA256ROUND(c, d, e, f, g, h, a, b, 54, w6);
622 	w7 = SIGMA1_256(w5) + w0 + SIGMA0_256(w8) + w7;
623 	SHA256ROUND(b, c, d, e, f, g, h, a, 55, w7);
624 	w8 = SIGMA1_256(w6) + w1 + SIGMA0_256(w9) + w8;
625 	SHA256ROUND(a, b, c, d, e, f, g, h, 56, w8);
626 	w9 = SIGMA1_256(w7) + w2 + SIGMA0_256(w10) + w9;
627 	SHA256ROUND(h, a, b, c, d, e, f, g, 57, w9);
628 	w10 = SIGMA1_256(w8) + w3 + SIGMA0_256(w11) + w10;
629 	SHA256ROUND(g, h, a, b, c, d, e, f, 58, w10);
630 	w11 = SIGMA1_256(w9) + w4 + SIGMA0_256(w12) + w11;
631 	SHA256ROUND(f, g, h, a, b, c, d, e, 59, w11);
632 	w12 = SIGMA1_256(w10) + w5 + SIGMA0_256(w13) + w12;
633 	SHA256ROUND(e, f, g, h, a, b, c, d, 60, w12);
634 	w13 = SIGMA1_256(w11) + w6 + SIGMA0_256(w14) + w13;
635 	SHA256ROUND(d, e, f, g, h, a, b, c, 61, w13);
636 	w14 = SIGMA1_256(w12) + w7 + SIGMA0_256(w15) + w14;
637 	SHA256ROUND(c, d, e, f, g, h, a, b, 62, w14);
638 	w15 = SIGMA1_256(w13) + w8 + SIGMA0_256(w0) + w15;
639 	SHA256ROUND(b, c, d, e, f, g, h, a, 63, w15);
640 
641 	ctx->state.s32[0] += a;
642 	ctx->state.s32[1] += b;
643 	ctx->state.s32[2] += c;
644 	ctx->state.s32[3] += d;
645 	ctx->state.s32[4] += e;
646 	ctx->state.s32[5] += f;
647 	ctx->state.s32[6] += g;
648 	ctx->state.s32[7] += h;
649 }
650 
651 
652 /* SHA384 and SHA512 Transform */
653 
654 static void
655 SHA512Transform(SHA2_CTX *ctx, const uint8_t *blk)
656 {
657 
658 	uint64_t a = ctx->state.s64[0];
659 	uint64_t b = ctx->state.s64[1];
660 	uint64_t c = ctx->state.s64[2];
661 	uint64_t d = ctx->state.s64[3];
662 	uint64_t e = ctx->state.s64[4];
663 	uint64_t f = ctx->state.s64[5];
664 	uint64_t g = ctx->state.s64[6];
665 	uint64_t h = ctx->state.s64[7];
666 
667 	uint64_t w0, w1, w2, w3, w4, w5, w6, w7;
668 	uint64_t w8, w9, w10, w11, w12, w13, w14, w15;
669 	uint64_t T1, T2;
670 
671 #if	defined(__sparc)
672 	static const uint64_t sha512_consts[] = {
673 		SHA512_CONST_0, SHA512_CONST_1, SHA512_CONST_2,
674 		SHA512_CONST_3, SHA512_CONST_4, SHA512_CONST_5,
675 		SHA512_CONST_6, SHA512_CONST_7, SHA512_CONST_8,
676 		SHA512_CONST_9, SHA512_CONST_10, SHA512_CONST_11,
677 		SHA512_CONST_12, SHA512_CONST_13, SHA512_CONST_14,
678 		SHA512_CONST_15, SHA512_CONST_16, SHA512_CONST_17,
679 		SHA512_CONST_18, SHA512_CONST_19, SHA512_CONST_20,
680 		SHA512_CONST_21, SHA512_CONST_22, SHA512_CONST_23,
681 		SHA512_CONST_24, SHA512_CONST_25, SHA512_CONST_26,
682 		SHA512_CONST_27, SHA512_CONST_28, SHA512_CONST_29,
683 		SHA512_CONST_30, SHA512_CONST_31, SHA512_CONST_32,
684 		SHA512_CONST_33, SHA512_CONST_34, SHA512_CONST_35,
685 		SHA512_CONST_36, SHA512_CONST_37, SHA512_CONST_38,
686 		SHA512_CONST_39, SHA512_CONST_40, SHA512_CONST_41,
687 		SHA512_CONST_42, SHA512_CONST_43, SHA512_CONST_44,
688 		SHA512_CONST_45, SHA512_CONST_46, SHA512_CONST_47,
689 		SHA512_CONST_48, SHA512_CONST_49, SHA512_CONST_50,
690 		SHA512_CONST_51, SHA512_CONST_52, SHA512_CONST_53,
691 		SHA512_CONST_54, SHA512_CONST_55, SHA512_CONST_56,
692 		SHA512_CONST_57, SHA512_CONST_58, SHA512_CONST_59,
693 		SHA512_CONST_60, SHA512_CONST_61, SHA512_CONST_62,
694 		SHA512_CONST_63, SHA512_CONST_64, SHA512_CONST_65,
695 		SHA512_CONST_66, SHA512_CONST_67, SHA512_CONST_68,
696 		SHA512_CONST_69, SHA512_CONST_70, SHA512_CONST_71,
697 		SHA512_CONST_72, SHA512_CONST_73, SHA512_CONST_74,
698 		SHA512_CONST_75, SHA512_CONST_76, SHA512_CONST_77,
699 		SHA512_CONST_78, SHA512_CONST_79
700 	};
701 #endif
702 
703 
704 	if ((uintptr_t)blk & 0x7) {		/* not 8-byte aligned? */
705 		bcopy(blk, ctx->buf_un.buf64,  sizeof (ctx->buf_un.buf64));
706 		blk = (uint8_t *)ctx->buf_un.buf64;
707 	}
708 
709 #if	defined(__sparc)
710 	/*LINTED*/
711 	w0 =  LOAD_BIG_64(blk + 8 * 0);
712 	SHA512ROUND(a, b, c, d, e, f, g, h, 0, w0);
713 	/*LINTED*/
714 	w1 =  LOAD_BIG_64(blk + 8 * 1);
715 	SHA512ROUND(h, a, b, c, d, e, f, g, 1, w1);
716 	/*LINTED*/
717 	w2 =  LOAD_BIG_64(blk + 8 * 2);
718 	SHA512ROUND(g, h, a, b, c, d, e, f, 2, w2);
719 	/*LINTED*/
720 	w3 =  LOAD_BIG_64(blk + 8 * 3);
721 	SHA512ROUND(f, g, h, a, b, c, d, e, 3, w3);
722 	/*LINTED*/
723 	w4 =  LOAD_BIG_64(blk + 8 * 4);
724 	SHA512ROUND(e, f, g, h, a, b, c, d, 4, w4);
725 	/*LINTED*/
726 	w5 =  LOAD_BIG_64(blk + 8 * 5);
727 	SHA512ROUND(d, e, f, g, h, a, b, c, 5, w5);
728 	/*LINTED*/
729 	w6 =  LOAD_BIG_64(blk + 8 * 6);
730 	SHA512ROUND(c, d, e, f, g, h, a, b, 6, w6);
731 	/*LINTED*/
732 	w7 =  LOAD_BIG_64(blk + 8 * 7);
733 	SHA512ROUND(b, c, d, e, f, g, h, a, 7, w7);
734 	/*LINTED*/
735 	w8 =  LOAD_BIG_64(blk + 8 * 8);
736 	SHA512ROUND(a, b, c, d, e, f, g, h, 8, w8);
737 	/*LINTED*/
738 	w9 =  LOAD_BIG_64(blk + 8 * 9);
739 	SHA512ROUND(h, a, b, c, d, e, f, g, 9, w9);
740 	/*LINTED*/
741 	w10 =  LOAD_BIG_64(blk + 8 * 10);
742 	SHA512ROUND(g, h, a, b, c, d, e, f, 10, w10);
743 	/*LINTED*/
744 	w11 =  LOAD_BIG_64(blk + 8 * 11);
745 	SHA512ROUND(f, g, h, a, b, c, d, e, 11, w11);
746 	/*LINTED*/
747 	w12 =  LOAD_BIG_64(blk + 8 * 12);
748 	SHA512ROUND(e, f, g, h, a, b, c, d, 12, w12);
749 	/*LINTED*/
750 	w13 =  LOAD_BIG_64(blk + 8 * 13);
751 	SHA512ROUND(d, e, f, g, h, a, b, c, 13, w13);
752 	/*LINTED*/
753 	w14 =  LOAD_BIG_64(blk + 8 * 14);
754 	SHA512ROUND(c, d, e, f, g, h, a, b, 14, w14);
755 	/*LINTED*/
756 	w15 =  LOAD_BIG_64(blk + 8 * 15);
757 	SHA512ROUND(b, c, d, e, f, g, h, a, 15, w15);
758 
759 #else
760 
761 	w0 =  LOAD_BIG_64(blk + 8 * 0);
762 	SHA512ROUND(a, b, c, d, e, f, g, h, 0, w0);
763 	w1 =  LOAD_BIG_64(blk + 8 * 1);
764 	SHA512ROUND(h, a, b, c, d, e, f, g, 1, w1);
765 	w2 =  LOAD_BIG_64(blk + 8 * 2);
766 	SHA512ROUND(g, h, a, b, c, d, e, f, 2, w2);
767 	w3 =  LOAD_BIG_64(blk + 8 * 3);
768 	SHA512ROUND(f, g, h, a, b, c, d, e, 3, w3);
769 	w4 =  LOAD_BIG_64(blk + 8 * 4);
770 	SHA512ROUND(e, f, g, h, a, b, c, d, 4, w4);
771 	w5 =  LOAD_BIG_64(blk + 8 * 5);
772 	SHA512ROUND(d, e, f, g, h, a, b, c, 5, w5);
773 	w6 =  LOAD_BIG_64(blk + 8 * 6);
774 	SHA512ROUND(c, d, e, f, g, h, a, b, 6, w6);
775 	w7 =  LOAD_BIG_64(blk + 8 * 7);
776 	SHA512ROUND(b, c, d, e, f, g, h, a, 7, w7);
777 	w8 =  LOAD_BIG_64(blk + 8 * 8);
778 	SHA512ROUND(a, b, c, d, e, f, g, h, 8, w8);
779 	w9 =  LOAD_BIG_64(blk + 8 * 9);
780 	SHA512ROUND(h, a, b, c, d, e, f, g, 9, w9);
781 	w10 =  LOAD_BIG_64(blk + 8 * 10);
782 	SHA512ROUND(g, h, a, b, c, d, e, f, 10, w10);
783 	w11 =  LOAD_BIG_64(blk + 8 * 11);
784 	SHA512ROUND(f, g, h, a, b, c, d, e, 11, w11);
785 	w12 =  LOAD_BIG_64(blk + 8 * 12);
786 	SHA512ROUND(e, f, g, h, a, b, c, d, 12, w12);
787 	w13 =  LOAD_BIG_64(blk + 8 * 13);
788 	SHA512ROUND(d, e, f, g, h, a, b, c, 13, w13);
789 	w14 =  LOAD_BIG_64(blk + 8 * 14);
790 	SHA512ROUND(c, d, e, f, g, h, a, b, 14, w14);
791 	w15 =  LOAD_BIG_64(blk + 8 * 15);
792 	SHA512ROUND(b, c, d, e, f, g, h, a, 15, w15);
793 
794 #endif
795 
796 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
797 	SHA512ROUND(a, b, c, d, e, f, g, h, 16, w0);
798 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
799 	SHA512ROUND(h, a, b, c, d, e, f, g, 17, w1);
800 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
801 	SHA512ROUND(g, h, a, b, c, d, e, f, 18, w2);
802 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
803 	SHA512ROUND(f, g, h, a, b, c, d, e, 19, w3);
804 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
805 	SHA512ROUND(e, f, g, h, a, b, c, d, 20, w4);
806 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
807 	SHA512ROUND(d, e, f, g, h, a, b, c, 21, w5);
808 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
809 	SHA512ROUND(c, d, e, f, g, h, a, b, 22, w6);
810 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
811 	SHA512ROUND(b, c, d, e, f, g, h, a, 23, w7);
812 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
813 	SHA512ROUND(a, b, c, d, e, f, g, h, 24, w8);
814 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
815 	SHA512ROUND(h, a, b, c, d, e, f, g, 25, w9);
816 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
817 	SHA512ROUND(g, h, a, b, c, d, e, f, 26, w10);
818 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
819 	SHA512ROUND(f, g, h, a, b, c, d, e, 27, w11);
820 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
821 	SHA512ROUND(e, f, g, h, a, b, c, d, 28, w12);
822 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
823 	SHA512ROUND(d, e, f, g, h, a, b, c, 29, w13);
824 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
825 	SHA512ROUND(c, d, e, f, g, h, a, b, 30, w14);
826 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
827 	SHA512ROUND(b, c, d, e, f, g, h, a, 31, w15);
828 
829 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
830 	SHA512ROUND(a, b, c, d, e, f, g, h, 32, w0);
831 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
832 	SHA512ROUND(h, a, b, c, d, e, f, g, 33, w1);
833 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
834 	SHA512ROUND(g, h, a, b, c, d, e, f, 34, w2);
835 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
836 	SHA512ROUND(f, g, h, a, b, c, d, e, 35, w3);
837 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
838 	SHA512ROUND(e, f, g, h, a, b, c, d, 36, w4);
839 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
840 	SHA512ROUND(d, e, f, g, h, a, b, c, 37, w5);
841 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
842 	SHA512ROUND(c, d, e, f, g, h, a, b, 38, w6);
843 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
844 	SHA512ROUND(b, c, d, e, f, g, h, a, 39, w7);
845 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
846 	SHA512ROUND(a, b, c, d, e, f, g, h, 40, w8);
847 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
848 	SHA512ROUND(h, a, b, c, d, e, f, g, 41, w9);
849 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
850 	SHA512ROUND(g, h, a, b, c, d, e, f, 42, w10);
851 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
852 	SHA512ROUND(f, g, h, a, b, c, d, e, 43, w11);
853 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
854 	SHA512ROUND(e, f, g, h, a, b, c, d, 44, w12);
855 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
856 	SHA512ROUND(d, e, f, g, h, a, b, c, 45, w13);
857 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
858 	SHA512ROUND(c, d, e, f, g, h, a, b, 46, w14);
859 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
860 	SHA512ROUND(b, c, d, e, f, g, h, a, 47, w15);
861 
862 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
863 	SHA512ROUND(a, b, c, d, e, f, g, h, 48, w0);
864 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
865 	SHA512ROUND(h, a, b, c, d, e, f, g, 49, w1);
866 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
867 	SHA512ROUND(g, h, a, b, c, d, e, f, 50, w2);
868 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
869 	SHA512ROUND(f, g, h, a, b, c, d, e, 51, w3);
870 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
871 	SHA512ROUND(e, f, g, h, a, b, c, d, 52, w4);
872 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
873 	SHA512ROUND(d, e, f, g, h, a, b, c, 53, w5);
874 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
875 	SHA512ROUND(c, d, e, f, g, h, a, b, 54, w6);
876 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
877 	SHA512ROUND(b, c, d, e, f, g, h, a, 55, w7);
878 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
879 	SHA512ROUND(a, b, c, d, e, f, g, h, 56, w8);
880 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
881 	SHA512ROUND(h, a, b, c, d, e, f, g, 57, w9);
882 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
883 	SHA512ROUND(g, h, a, b, c, d, e, f, 58, w10);
884 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
885 	SHA512ROUND(f, g, h, a, b, c, d, e, 59, w11);
886 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
887 	SHA512ROUND(e, f, g, h, a, b, c, d, 60, w12);
888 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
889 	SHA512ROUND(d, e, f, g, h, a, b, c, 61, w13);
890 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
891 	SHA512ROUND(c, d, e, f, g, h, a, b, 62, w14);
892 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
893 	SHA512ROUND(b, c, d, e, f, g, h, a, 63, w15);
894 
895 	w0 = SIGMA1(w14) + w9 + SIGMA0(w1) + w0;
896 	SHA512ROUND(a, b, c, d, e, f, g, h, 64, w0);
897 	w1 = SIGMA1(w15) + w10 + SIGMA0(w2) + w1;
898 	SHA512ROUND(h, a, b, c, d, e, f, g, 65, w1);
899 	w2 = SIGMA1(w0) + w11 + SIGMA0(w3) + w2;
900 	SHA512ROUND(g, h, a, b, c, d, e, f, 66, w2);
901 	w3 = SIGMA1(w1) + w12 + SIGMA0(w4) + w3;
902 	SHA512ROUND(f, g, h, a, b, c, d, e, 67, w3);
903 	w4 = SIGMA1(w2) + w13 + SIGMA0(w5) + w4;
904 	SHA512ROUND(e, f, g, h, a, b, c, d, 68, w4);
905 	w5 = SIGMA1(w3) + w14 + SIGMA0(w6) + w5;
906 	SHA512ROUND(d, e, f, g, h, a, b, c, 69, w5);
907 	w6 = SIGMA1(w4) + w15 + SIGMA0(w7) + w6;
908 	SHA512ROUND(c, d, e, f, g, h, a, b, 70, w6);
909 	w7 = SIGMA1(w5) + w0 + SIGMA0(w8) + w7;
910 	SHA512ROUND(b, c, d, e, f, g, h, a, 71, w7);
911 	w8 = SIGMA1(w6) + w1 + SIGMA0(w9) + w8;
912 	SHA512ROUND(a, b, c, d, e, f, g, h, 72, w8);
913 	w9 = SIGMA1(w7) + w2 + SIGMA0(w10) + w9;
914 	SHA512ROUND(h, a, b, c, d, e, f, g, 73, w9);
915 	w10 = SIGMA1(w8) + w3 + SIGMA0(w11) + w10;
916 	SHA512ROUND(g, h, a, b, c, d, e, f, 74, w10);
917 	w11 = SIGMA1(w9) + w4 + SIGMA0(w12) + w11;
918 	SHA512ROUND(f, g, h, a, b, c, d, e, 75, w11);
919 	w12 = SIGMA1(w10) + w5 + SIGMA0(w13) + w12;
920 	SHA512ROUND(e, f, g, h, a, b, c, d, 76, w12);
921 	w13 = SIGMA1(w11) + w6 + SIGMA0(w14) + w13;
922 	SHA512ROUND(d, e, f, g, h, a, b, c, 77, w13);
923 	w14 = SIGMA1(w12) + w7 + SIGMA0(w15) + w14;
924 	SHA512ROUND(c, d, e, f, g, h, a, b, 78, w14);
925 	w15 = SIGMA1(w13) + w8 + SIGMA0(w0) + w15;
926 	SHA512ROUND(b, c, d, e, f, g, h, a, 79, w15);
927 
928 	ctx->state.s64[0] += a;
929 	ctx->state.s64[1] += b;
930 	ctx->state.s64[2] += c;
931 	ctx->state.s64[3] += d;
932 	ctx->state.s64[4] += e;
933 	ctx->state.s64[5] += f;
934 	ctx->state.s64[6] += g;
935 	ctx->state.s64[7] += h;
936 
937 }
938 
939 
940 /*
941  * devpro compiler optimization:
942  *
943  * the compiler can generate better code if it knows that `input' and
944  * `output' do not point to the same source.  there is no portable
945  * way to tell the compiler this, but the sun compiler recognizes the
946  * `_Restrict' keyword to indicate this condition.  use it if possible.
947  */
948 
949 #ifdef	__RESTRICT
950 #define	restrict	_Restrict
951 #else
952 #define	restrict	/* nothing */
953 #endif
954 
955 /*
956  * Encode()
957  *
958  * purpose: to convert a list of numbers from little endian to big endian
959  *   input: uint8_t *	: place to store the converted big endian numbers
960  *	    uint32_t *	: place to get numbers to convert from
961  *          size_t	: the length of the input in bytes
962  *  output: void
963  */
964 
965 static void
966 Encode(uint8_t *restrict output, uint32_t *restrict input, size_t len)
967 {
968 	size_t		i, j;
969 
970 #if	defined(__sparc)
971 	if (IS_P2ALIGNED(output, sizeof (uint32_t))) {
972 		for (i = 0, j = 0; j < len; i++, j += 4) {
973 			/* LINTED: pointer alignment */
974 			*((uint32_t *)(output + j)) = input[i];
975 		}
976 	} else {
977 #endif	/* little endian -- will work on big endian, but slowly */
978 		for (i = 0, j = 0; j < len; i++, j += 4) {
979 			output[j]	= (input[i] >> 24) & 0xff;
980 			output[j + 1]	= (input[i] >> 16) & 0xff;
981 			output[j + 2]	= (input[i] >>  8) & 0xff;
982 			output[j + 3]	= input[i] & 0xff;
983 		}
984 #if	defined(__sparc)
985 	}
986 #endif
987 }
988 
989 static void
990 Encode64(uint8_t *restrict output, uint64_t *restrict input, size_t len)
991 {
992 	size_t		i, j;
993 
994 #if	defined(__sparc)
995 	if (IS_P2ALIGNED(output, sizeof (uint64_t))) {
996 		for (i = 0, j = 0; j < len; i++, j += 8) {
997 			/* LINTED: pointer alignment */
998 			*((uint64_t *)(output + j)) = input[i];
999 		}
1000 	} else {
1001 #endif	/* little endian -- will work on big endian, but slowly */
1002 		for (i = 0, j = 0; j < len; i++, j += 8) {
1003 
1004 			output[j]	= (input[i] >> 56) & 0xff;
1005 			output[j + 1]	= (input[i] >> 48) & 0xff;
1006 			output[j + 2]	= (input[i] >> 40) & 0xff;
1007 			output[j + 3]	= (input[i] >> 32) & 0xff;
1008 			output[j + 4]	= (input[i] >> 24) & 0xff;
1009 			output[j + 5]	= (input[i] >> 16) & 0xff;
1010 			output[j + 6]	= (input[i] >>  8) & 0xff;
1011 			output[j + 7]	= input[i] & 0xff;
1012 		}
1013 #if	defined(__sparc)
1014 	}
1015 #endif
1016 }
1017 
1018 
1019 #ifdef _KERNEL
1020 
1021 /*
1022  * KCF software provider control entry points.
1023  */
1024 /* ARGSUSED */
1025 static void
1026 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
1027 {
1028 	*status = CRYPTO_PROVIDER_READY;
1029 }
1030 
1031 /*
1032  * KCF software provider digest entry points.
1033  */
1034 
1035 static int
1036 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1037     crypto_req_handle_t req)
1038 {
1039 
1040 	/*
1041 	 * Allocate and initialize SHA2 context.
1042 	 */
1043 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
1044 	    crypto_kmflag(req));
1045 	if (ctx->cc_provider_private == NULL)
1046 		return (CRYPTO_HOST_MEMORY);
1047 
1048 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
1049 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1050 
1051 	return (CRYPTO_SUCCESS);
1052 }
1053 
1054 /*
1055  * Helper SHA2 digest update function for uio data.
1056  */
1057 static int
1058 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
1059 {
1060 	off_t offset = data->cd_offset;
1061 	size_t length = data->cd_length;
1062 	uint_t vec_idx;
1063 	size_t cur_len;
1064 
1065 	/* we support only kernel buffer */
1066 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
1067 		return (CRYPTO_ARGUMENTS_BAD);
1068 
1069 	/*
1070 	 * Jump to the first iovec containing data to be
1071 	 * digested.
1072 	 */
1073 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
1074 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
1075 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len);
1076 	if (vec_idx == data->cd_uio->uio_iovcnt) {
1077 		/*
1078 		 * The caller specified an offset that is larger than the
1079 		 * total size of the buffers it provided.
1080 		 */
1081 		return (CRYPTO_DATA_LEN_RANGE);
1082 	}
1083 
1084 	/*
1085 	 * Now do the digesting on the iovecs.
1086 	 */
1087 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
1088 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
1089 		    offset, length);
1090 
1091 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
1092 		    uio_iov[vec_idx].iov_base + offset, cur_len);
1093 		length -= cur_len;
1094 		vec_idx++;
1095 		offset = 0;
1096 	}
1097 
1098 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
1099 		/*
1100 		 * The end of the specified iovec's was reached but
1101 		 * the length requested could not be processed, i.e.
1102 		 * The caller requested to digest more data than it provided.
1103 		 */
1104 		return (CRYPTO_DATA_LEN_RANGE);
1105 	}
1106 
1107 	return (CRYPTO_SUCCESS);
1108 }
1109 
1110 /*
1111  * Helper SHA2 digest final function for uio data.
1112  * digest_len is the length of the desired digest. If digest_len
1113  * is smaller than the default SHA2 digest length, the caller
1114  * must pass a scratch buffer, digest_scratch, which must
1115  * be at least the algorithm's digest length bytes.
1116  */
1117 static int
1118 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1119     ulong_t digest_len, uchar_t *digest_scratch)
1120 {
1121 	off_t offset = digest->cd_offset;
1122 	uint_t vec_idx;
1123 
1124 	/* we support only kernel buffer */
1125 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
1126 		return (CRYPTO_ARGUMENTS_BAD);
1127 
1128 	/*
1129 	 * Jump to the first iovec containing ptr to the digest to
1130 	 * be returned.
1131 	 */
1132 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
1133 	    vec_idx < digest->cd_uio->uio_iovcnt;
1134 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len);
1135 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
1136 		/*
1137 		 * The caller specified an offset that is
1138 		 * larger than the total size of the buffers
1139 		 * it provided.
1140 		 */
1141 		return (CRYPTO_DATA_LEN_RANGE);
1142 	}
1143 
1144 	if (offset + digest_len <=
1145 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
1146 		/*
1147 		 * The computed SHA2 digest will fit in the current
1148 		 * iovec.
1149 		 */
1150 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1151 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1152 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1153 			(digest_len != SHA512_DIGEST_LENGTH))) {
1154 			/*
1155 			 * The caller requested a short digest. Digest
1156 			 * into a scratch buffer and return to
1157 			 * the user only what was requested.
1158 			 */
1159 			SHA2Final(digest_scratch, sha2_ctx);
1160 
1161 			bcopy(digest_scratch, (uchar_t *)digest->
1162 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1163 			    digest_len);
1164 		} else {
1165 			SHA2Final((uchar_t *)digest->
1166 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
1167 			    sha2_ctx);
1168 
1169 		}
1170 	} else {
1171 		/*
1172 		 * The computed digest will be crossing one or more iovec's.
1173 		 * This is bad performance-wise but we need to support it.
1174 		 * Allocate a small scratch buffer on the stack and
1175 		 * copy it piece meal to the specified digest iovec's.
1176 		 */
1177 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1178 		off_t scratch_offset = 0;
1179 		size_t length = digest_len;
1180 		size_t cur_len;
1181 
1182 		SHA2Final(digest_tmp, sha2_ctx);
1183 
1184 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
1185 			cur_len =
1186 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
1187 				    offset, length);
1188 			bcopy(digest_tmp + scratch_offset,
1189 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
1190 			    cur_len);
1191 
1192 			length -= cur_len;
1193 			vec_idx++;
1194 			scratch_offset += cur_len;
1195 			offset = 0;
1196 		}
1197 
1198 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
1199 			/*
1200 			 * The end of the specified iovec's was reached but
1201 			 * the length requested could not be processed, i.e.
1202 			 * The caller requested to digest more data than it
1203 			 * provided.
1204 			 */
1205 			return (CRYPTO_DATA_LEN_RANGE);
1206 		}
1207 	}
1208 
1209 	return (CRYPTO_SUCCESS);
1210 }
1211 
1212 /*
1213  * Helper SHA2 digest update for mblk's.
1214  */
1215 static int
1216 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
1217 {
1218 	off_t offset = data->cd_offset;
1219 	size_t length = data->cd_length;
1220 	mblk_t *mp;
1221 	size_t cur_len;
1222 
1223 	/*
1224 	 * Jump to the first mblk_t containing data to be digested.
1225 	 */
1226 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
1227 	    offset -= MBLKL(mp), mp = mp->b_cont);
1228 	if (mp == NULL) {
1229 		/*
1230 		 * The caller specified an offset that is larger than the
1231 		 * total size of the buffers it provided.
1232 		 */
1233 		return (CRYPTO_DATA_LEN_RANGE);
1234 	}
1235 
1236 	/*
1237 	 * Now do the digesting on the mblk chain.
1238 	 */
1239 	while (mp != NULL && length > 0) {
1240 		cur_len = MIN(MBLKL(mp) - offset, length);
1241 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
1242 		length -= cur_len;
1243 		offset = 0;
1244 		mp = mp->b_cont;
1245 	}
1246 
1247 	if (mp == NULL && length > 0) {
1248 		/*
1249 		 * The end of the mblk was reached but the length requested
1250 		 * could not be processed, i.e. The caller requested
1251 		 * to digest more data than it provided.
1252 		 */
1253 		return (CRYPTO_DATA_LEN_RANGE);
1254 	}
1255 
1256 	return (CRYPTO_SUCCESS);
1257 }
1258 
1259 /*
1260  * Helper SHA2 digest final for mblk's.
1261  * digest_len is the length of the desired digest. If digest_len
1262  * is smaller than the default SHA2 digest length, the caller
1263  * must pass a scratch buffer, digest_scratch, which must
1264  * be at least the algorithm's digest length bytes.
1265  */
1266 static int
1267 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
1268     ulong_t digest_len, uchar_t *digest_scratch)
1269 {
1270 	off_t offset = digest->cd_offset;
1271 	mblk_t *mp;
1272 
1273 	/*
1274 	 * Jump to the first mblk_t that will be used to store the digest.
1275 	 */
1276 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
1277 	    offset -= MBLKL(mp), mp = mp->b_cont);
1278 	if (mp == NULL) {
1279 		/*
1280 		 * The caller specified an offset that is larger than the
1281 		 * total size of the buffers it provided.
1282 		 */
1283 		return (CRYPTO_DATA_LEN_RANGE);
1284 	}
1285 
1286 	if (offset + digest_len <= MBLKL(mp)) {
1287 		/*
1288 		 * The computed SHA2 digest will fit in the current mblk.
1289 		 * Do the SHA2Final() in-place.
1290 		 */
1291 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1292 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
1293 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
1294 			(digest_len != SHA512_DIGEST_LENGTH))) {
1295 			/*
1296 			 * The caller requested a short digest. Digest
1297 			 * into a scratch buffer and return to
1298 			 * the user only what was requested.
1299 			 */
1300 			SHA2Final(digest_scratch, sha2_ctx);
1301 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
1302 		} else {
1303 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
1304 		}
1305 	} else {
1306 		/*
1307 		 * The computed digest will be crossing one or more mblk's.
1308 		 * This is bad performance-wise but we need to support it.
1309 		 * Allocate a small scratch buffer on the stack and
1310 		 * copy it piece meal to the specified digest iovec's.
1311 		 */
1312 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
1313 		off_t scratch_offset = 0;
1314 		size_t length = digest_len;
1315 		size_t cur_len;
1316 
1317 		SHA2Final(digest_tmp, sha2_ctx);
1318 
1319 		while (mp != NULL && length > 0) {
1320 			cur_len = MIN(MBLKL(mp) - offset, length);
1321 			bcopy(digest_tmp + scratch_offset,
1322 			    mp->b_rptr + offset, cur_len);
1323 
1324 			length -= cur_len;
1325 			mp = mp->b_cont;
1326 			scratch_offset += cur_len;
1327 			offset = 0;
1328 		}
1329 
1330 		if (mp == NULL && length > 0) {
1331 			/*
1332 			 * The end of the specified mblk was reached but
1333 			 * the length requested could not be processed, i.e.
1334 			 * The caller requested to digest more data than it
1335 			 * provided.
1336 			 */
1337 			return (CRYPTO_DATA_LEN_RANGE);
1338 		}
1339 	}
1340 
1341 	return (CRYPTO_SUCCESS);
1342 }
1343 
1344 /* ARGSUSED */
1345 static int
1346 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
1347     crypto_req_handle_t req)
1348 {
1349 	int ret = CRYPTO_SUCCESS;
1350 	uint_t sha_digest_len;
1351 
1352 	ASSERT(ctx->cc_provider_private != NULL);
1353 
1354 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1355 	case SHA256_MECH_INFO_TYPE:
1356 		sha_digest_len = SHA256_DIGEST_LENGTH;
1357 		break;
1358 	case SHA384_MECH_INFO_TYPE:
1359 		sha_digest_len = SHA384_DIGEST_LENGTH;
1360 		break;
1361 	case SHA512_MECH_INFO_TYPE:
1362 		sha_digest_len = SHA512_DIGEST_LENGTH;
1363 		break;
1364 	default:
1365 		return (CRYPTO_MECHANISM_INVALID);
1366 	}
1367 
1368 	/*
1369 	 * We need to just return the length needed to store the output.
1370 	 * We should not destroy the context for the following cases.
1371 	 */
1372 	if ((digest->cd_length == 0) ||
1373 	    (digest->cd_length < sha_digest_len)) {
1374 		digest->cd_length = sha_digest_len;
1375 		return (CRYPTO_BUFFER_TOO_SMALL);
1376 	}
1377 
1378 	/*
1379 	 * Do the SHA2 update on the specified input data.
1380 	 */
1381 	switch (data->cd_format) {
1382 	case CRYPTO_DATA_RAW:
1383 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1384 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1385 		    data->cd_length);
1386 		break;
1387 	case CRYPTO_DATA_UIO:
1388 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1389 		    data);
1390 		break;
1391 	case CRYPTO_DATA_MBLK:
1392 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1393 		    data);
1394 		break;
1395 	default:
1396 		ret = CRYPTO_ARGUMENTS_BAD;
1397 	}
1398 
1399 	if (ret != CRYPTO_SUCCESS) {
1400 		/* the update failed, free context and bail */
1401 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1402 		ctx->cc_provider_private = NULL;
1403 		digest->cd_length = 0;
1404 		return (ret);
1405 	}
1406 
1407 	/*
1408 	 * Do a SHA2 final, must be done separately since the digest
1409 	 * type can be different than the input data type.
1410 	 */
1411 	switch (digest->cd_format) {
1412 	case CRYPTO_DATA_RAW:
1413 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1414 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1415 		break;
1416 	case CRYPTO_DATA_UIO:
1417 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1418 		    digest, sha_digest_len, NULL);
1419 		break;
1420 	case CRYPTO_DATA_MBLK:
1421 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1422 		    digest, sha_digest_len, NULL);
1423 		break;
1424 	default:
1425 		ret = CRYPTO_ARGUMENTS_BAD;
1426 	}
1427 
1428 	/* all done, free context and return */
1429 
1430 	if (ret == CRYPTO_SUCCESS)
1431 		digest->cd_length = sha_digest_len;
1432 	else
1433 		digest->cd_length = 0;
1434 
1435 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1436 	ctx->cc_provider_private = NULL;
1437 	return (ret);
1438 }
1439 
1440 /* ARGSUSED */
1441 static int
1442 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
1443     crypto_req_handle_t req)
1444 {
1445 	int ret = CRYPTO_SUCCESS;
1446 
1447 	ASSERT(ctx->cc_provider_private != NULL);
1448 
1449 	/*
1450 	 * Do the SHA2 update on the specified input data.
1451 	 */
1452 	switch (data->cd_format) {
1453 	case CRYPTO_DATA_RAW:
1454 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1455 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1456 		    data->cd_length);
1457 		break;
1458 	case CRYPTO_DATA_UIO:
1459 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1460 		    data);
1461 		break;
1462 	case CRYPTO_DATA_MBLK:
1463 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1464 		    data);
1465 		break;
1466 	default:
1467 		ret = CRYPTO_ARGUMENTS_BAD;
1468 	}
1469 
1470 	return (ret);
1471 }
1472 
1473 /* ARGSUSED */
1474 static int
1475 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
1476     crypto_req_handle_t req)
1477 {
1478 	int ret = CRYPTO_SUCCESS;
1479 	uint_t sha_digest_len;
1480 
1481 	ASSERT(ctx->cc_provider_private != NULL);
1482 
1483 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
1484 	case SHA256_MECH_INFO_TYPE:
1485 		sha_digest_len = SHA256_DIGEST_LENGTH;
1486 		break;
1487 	case SHA384_MECH_INFO_TYPE:
1488 		sha_digest_len = SHA384_DIGEST_LENGTH;
1489 		break;
1490 	case SHA512_MECH_INFO_TYPE:
1491 		sha_digest_len = SHA512_DIGEST_LENGTH;
1492 		break;
1493 	default:
1494 		return (CRYPTO_MECHANISM_INVALID);
1495 	}
1496 
1497 	/*
1498 	 * We need to just return the length needed to store the output.
1499 	 * We should not destroy the context for the following cases.
1500 	 */
1501 	if ((digest->cd_length == 0) ||
1502 	    (digest->cd_length < sha_digest_len)) {
1503 		digest->cd_length = sha_digest_len;
1504 		return (CRYPTO_BUFFER_TOO_SMALL);
1505 	}
1506 
1507 	/*
1508 	 * Do a SHA2 final.
1509 	 */
1510 	switch (digest->cd_format) {
1511 	case CRYPTO_DATA_RAW:
1512 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1513 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
1514 		break;
1515 	case CRYPTO_DATA_UIO:
1516 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1517 		    digest, sha_digest_len, NULL);
1518 		break;
1519 	case CRYPTO_DATA_MBLK:
1520 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
1521 		    digest, sha_digest_len, NULL);
1522 		break;
1523 	default:
1524 		ret = CRYPTO_ARGUMENTS_BAD;
1525 	}
1526 
1527 	/* all done, free context and return */
1528 
1529 	if (ret == CRYPTO_SUCCESS)
1530 		digest->cd_length = sha_digest_len;
1531 	else
1532 		digest->cd_length = 0;
1533 
1534 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
1535 	ctx->cc_provider_private = NULL;
1536 
1537 	return (ret);
1538 }
1539 
1540 /* ARGSUSED */
1541 static int
1542 sha2_digest_atomic(crypto_provider_handle_t provider,
1543     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1544     crypto_data_t *data, crypto_data_t *digest,
1545     crypto_req_handle_t req)
1546 {
1547 	int ret = CRYPTO_SUCCESS;
1548 	SHA2_CTX sha2_ctx;
1549 	uint32_t sha_digest_len;
1550 
1551 	/*
1552 	 * Do the SHA inits.
1553 	 */
1554 
1555 	SHA2Init(mechanism->cm_type, &sha2_ctx);
1556 
1557 	switch (data->cd_format) {
1558 	case CRYPTO_DATA_RAW:
1559 		SHA2Update(&sha2_ctx, (uint8_t *)data->
1560 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
1561 		break;
1562 	case CRYPTO_DATA_UIO:
1563 		ret = sha2_digest_update_uio(&sha2_ctx, data);
1564 		break;
1565 	case CRYPTO_DATA_MBLK:
1566 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
1567 		break;
1568 	default:
1569 		ret = CRYPTO_ARGUMENTS_BAD;
1570 	}
1571 
1572 	/*
1573 	 * Do the SHA updates on the specified input data.
1574 	 */
1575 
1576 	if (ret != CRYPTO_SUCCESS) {
1577 		/* the update failed, bail */
1578 		digest->cd_length = 0;
1579 		return (ret);
1580 	}
1581 
1582 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
1583 		sha_digest_len = SHA256_DIGEST_LENGTH;
1584 	else
1585 		sha_digest_len = SHA512_DIGEST_LENGTH;
1586 
1587 	/*
1588 	 * Do a SHA2 final, must be done separately since the digest
1589 	 * type can be different than the input data type.
1590 	 */
1591 	switch (digest->cd_format) {
1592 	case CRYPTO_DATA_RAW:
1593 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
1594 		    digest->cd_offset, &sha2_ctx);
1595 		break;
1596 	case CRYPTO_DATA_UIO:
1597 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
1598 		    sha_digest_len, NULL);
1599 		break;
1600 	case CRYPTO_DATA_MBLK:
1601 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
1602 		    sha_digest_len, NULL);
1603 		break;
1604 	default:
1605 		ret = CRYPTO_ARGUMENTS_BAD;
1606 	}
1607 
1608 	if (ret == CRYPTO_SUCCESS)
1609 		digest->cd_length = sha_digest_len;
1610 	else
1611 		digest->cd_length = 0;
1612 
1613 	return (ret);
1614 }
1615 
1616 /*
1617  * KCF software provider mac entry points.
1618  *
1619  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
1620  *
1621  * Init:
1622  * The initialization routine initializes what we denote
1623  * as the inner and outer contexts by doing
1624  * - for inner context: SHA2(key XOR ipad)
1625  * - for outer context: SHA2(key XOR opad)
1626  *
1627  * Update:
1628  * Each subsequent SHA2 HMAC update will result in an
1629  * update of the inner context with the specified data.
1630  *
1631  * Final:
1632  * The SHA2 HMAC final will do a SHA2 final operation on the
1633  * inner context, and the resulting digest will be used
1634  * as the data for an update on the outer context. Last
1635  * but not least, a SHA2 final on the outer context will
1636  * be performed to obtain the SHA2 HMAC digest to return
1637  * to the user.
1638  */
1639 
1640 /*
1641  * Initialize a SHA2-HMAC context.
1642  */
1643 static void
1644 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
1645 {
1646 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1647 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
1648 	int i, block_size, blocks_per_int64;
1649 
1650 	/* Determine the block size */
1651 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
1652 		block_size = SHA256_HMAC_BLOCK_SIZE;
1653 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1654 	} else {
1655 		block_size = SHA512_HMAC_BLOCK_SIZE;
1656 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
1657 	}
1658 
1659 	(void) bzero(ipad, block_size);
1660 	(void) bzero(opad, block_size);
1661 	(void) bcopy(keyval, ipad, length_in_bytes);
1662 	(void) bcopy(keyval, opad, length_in_bytes);
1663 
1664 	/* XOR key with ipad (0x36) and opad (0x5c) */
1665 	for (i = 0; i < blocks_per_int64; i ++) {
1666 		ipad[i] ^= 0x3636363636363636;
1667 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
1668 	}
1669 
1670 	/* perform SHA2 on ipad */
1671 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
1672 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
1673 
1674 	/* perform SHA2 on opad */
1675 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
1676 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
1677 
1678 }
1679 
1680 /*
1681  */
1682 static int
1683 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1684     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
1685     crypto_req_handle_t req)
1686 {
1687 	int ret = CRYPTO_SUCCESS;
1688 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1689 	uint_t sha_digest_len, sha_hmac_block_size;
1690 
1691 	/*
1692 	 * Set the digest length and block size to values approriate to the
1693 	 * mechanism
1694 	 */
1695 	switch (mechanism->cm_type) {
1696 	case SHA256_HMAC_MECH_INFO_TYPE:
1697 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1698 		sha_digest_len = SHA256_DIGEST_LENGTH;
1699 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1700 		break;
1701 	case SHA384_HMAC_MECH_INFO_TYPE:
1702 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1703 	case SHA512_HMAC_MECH_INFO_TYPE:
1704 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1705 		sha_digest_len = SHA512_DIGEST_LENGTH;
1706 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1707 		break;
1708 	default:
1709 		return (CRYPTO_MECHANISM_INVALID);
1710 	}
1711 
1712 	if (key->ck_format != CRYPTO_KEY_RAW)
1713 		return (CRYPTO_ARGUMENTS_BAD);
1714 
1715 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1716 	    crypto_kmflag(req));
1717 	if (ctx->cc_provider_private == NULL)
1718 		return (CRYPTO_HOST_MEMORY);
1719 
1720 	if (ctx_template != NULL) {
1721 		/* reuse context template */
1722 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
1723 		    sizeof (sha2_hmac_ctx_t));
1724 	} else {
1725 		/* no context template, compute context */
1726 		if (keylen_in_bytes > sha_hmac_block_size) {
1727 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
1728 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
1729 
1730 			/*
1731 			 * Hash the passed-in key to get a smaller key.
1732 			 * The inner context is used since it hasn't been
1733 			 * initialized yet.
1734 			 */
1735 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1736 			    &hmac_ctx->hc_icontext,
1737 			    key->ck_data, keylen_in_bytes, digested_key);
1738 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1739 			    digested_key, sha_digest_len);
1740 		} else {
1741 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
1742 			    key->ck_data, keylen_in_bytes);
1743 		}
1744 	}
1745 
1746 	/*
1747 	 * Get the mechanism parameters, if applicable.
1748 	 */
1749 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
1750 	if (mechanism->cm_type % 3 == 2) {
1751 		if (mechanism->cm_param == NULL ||
1752 		    mechanism->cm_param_len != sizeof (ulong_t))
1753 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1754 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1755 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1756 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1757 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1758 	}
1759 
1760 	if (ret != CRYPTO_SUCCESS) {
1761 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1762 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1763 		ctx->cc_provider_private = NULL;
1764 	}
1765 
1766 	return (ret);
1767 }
1768 
1769 /* ARGSUSED */
1770 static int
1771 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1772     crypto_req_handle_t req)
1773 {
1774 	int ret = CRYPTO_SUCCESS;
1775 
1776 	ASSERT(ctx->cc_provider_private != NULL);
1777 
1778 	/*
1779 	 * Do a SHA2 update of the inner context using the specified
1780 	 * data.
1781 	 */
1782 	switch (data->cd_format) {
1783 	case CRYPTO_DATA_RAW:
1784 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1785 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1786 		    data->cd_length);
1787 		break;
1788 	case CRYPTO_DATA_UIO:
1789 		ret = sha2_digest_update_uio(
1790 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1791 		break;
1792 	case CRYPTO_DATA_MBLK:
1793 		ret = sha2_digest_update_mblk(
1794 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1795 		break;
1796 	default:
1797 		ret = CRYPTO_ARGUMENTS_BAD;
1798 	}
1799 
1800 	return (ret);
1801 }
1802 
1803 /* ARGSUSED */
1804 static int
1805 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1806 {
1807 	int ret = CRYPTO_SUCCESS;
1808 	uchar_t digest[SHA512_DIGEST_LENGTH];
1809 	uint32_t digest_len, sha_digest_len;
1810 
1811 	ASSERT(ctx->cc_provider_private != NULL);
1812 
1813 	/* Set the digest lengths to values approriate to the mechanism */
1814 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1815 	case SHA256_HMAC_MECH_INFO_TYPE:
1816 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1817 		break;
1818 	case SHA384_HMAC_MECH_INFO_TYPE:
1819 	case SHA512_HMAC_MECH_INFO_TYPE:
1820 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1821 		break;
1822 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1823 		sha_digest_len = SHA256_DIGEST_LENGTH;
1824 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1825 		break;
1826 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1827 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1828 		sha_digest_len = SHA512_DIGEST_LENGTH;
1829 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1830 		break;
1831 	}
1832 
1833 	/*
1834 	 * We need to just return the length needed to store the output.
1835 	 * We should not destroy the context for the following cases.
1836 	 */
1837 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1838 		mac->cd_length = digest_len;
1839 		return (CRYPTO_BUFFER_TOO_SMALL);
1840 	}
1841 
1842 	/*
1843 	 * Do a SHA2 final on the inner context.
1844 	 */
1845 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1846 
1847 	/*
1848 	 * Do a SHA2 update on the outer context, feeding the inner
1849 	 * digest as data.
1850 	 */
1851 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1852 	    sha_digest_len);
1853 
1854 	/*
1855 	 * Do a SHA2 final on the outer context, storing the computing
1856 	 * digest in the users buffer.
1857 	 */
1858 	switch (mac->cd_format) {
1859 	case CRYPTO_DATA_RAW:
1860 		if (digest_len != sha_digest_len) {
1861 			/*
1862 			 * The caller requested a short digest. Digest
1863 			 * into a scratch buffer and return to
1864 			 * the user only what was requested.
1865 			 */
1866 			SHA2Final(digest,
1867 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1868 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1869 			    mac->cd_offset, digest_len);
1870 		} else {
1871 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1872 			    mac->cd_offset,
1873 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1874 		}
1875 		break;
1876 	case CRYPTO_DATA_UIO:
1877 		ret = sha2_digest_final_uio(
1878 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1879 		    digest_len, digest);
1880 		break;
1881 	case CRYPTO_DATA_MBLK:
1882 		ret = sha2_digest_final_mblk(
1883 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1884 		    digest_len, digest);
1885 		break;
1886 	default:
1887 		ret = CRYPTO_ARGUMENTS_BAD;
1888 	}
1889 
1890 	if (ret == CRYPTO_SUCCESS)
1891 		mac->cd_length = digest_len;
1892 	else
1893 		mac->cd_length = 0;
1894 
1895 	bzero(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, sizeof (sha2_hmac_ctx_t));
1896 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1897 	ctx->cc_provider_private = NULL;
1898 
1899 	return (ret);
1900 }
1901 
1902 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1903 	switch (data->cd_format) {					\
1904 	case CRYPTO_DATA_RAW:						\
1905 		SHA2Update(&(ctx).hc_icontext,				\
1906 		    (uint8_t *)data->cd_raw.iov_base +			\
1907 		    data->cd_offset, data->cd_length);			\
1908 		break;							\
1909 	case CRYPTO_DATA_UIO:						\
1910 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1911 		break;							\
1912 	case CRYPTO_DATA_MBLK:						\
1913 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1914 		    data);						\
1915 		break;							\
1916 	default:							\
1917 		ret = CRYPTO_ARGUMENTS_BAD;				\
1918 	}								\
1919 }
1920 
1921 /* ARGSUSED */
1922 static int
1923 sha2_mac_atomic(crypto_provider_handle_t provider,
1924     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1925     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1926     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1927 {
1928 	int ret = CRYPTO_SUCCESS;
1929 	uchar_t digest[SHA512_DIGEST_LENGTH];
1930 	sha2_hmac_ctx_t sha2_hmac_ctx;
1931 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1932 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1933 
1934 	/*
1935 	 * Set the digest length and block size to values approriate to the
1936 	 * mechanism
1937 	 */
1938 	switch (mechanism->cm_type) {
1939 	case SHA256_HMAC_MECH_INFO_TYPE:
1940 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1941 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1942 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1943 		break;
1944 	case SHA384_HMAC_MECH_INFO_TYPE:
1945 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1946 	case SHA512_HMAC_MECH_INFO_TYPE:
1947 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1948 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1949 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1950 		break;
1951 	default:
1952 		return (CRYPTO_MECHANISM_INVALID);
1953 	}
1954 
1955 	/* Add support for key by attributes (RFE 4706552) */
1956 	if (key->ck_format != CRYPTO_KEY_RAW)
1957 		return (CRYPTO_ARGUMENTS_BAD);
1958 
1959 	if (ctx_template != NULL) {
1960 		/* reuse context template */
1961 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1962 	} else {
1963 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1964 		/* no context template, initialize context */
1965 		if (keylen_in_bytes > sha_hmac_block_size) {
1966 			/*
1967 			 * Hash the passed-in key to get a smaller key.
1968 			 * The inner context is used since it hasn't been
1969 			 * initialized yet.
1970 			 */
1971 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1972 			    &sha2_hmac_ctx.hc_icontext,
1973 			    key->ck_data, keylen_in_bytes, digest);
1974 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1975 			    sha_digest_len);
1976 		} else {
1977 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1978 			    keylen_in_bytes);
1979 		}
1980 	}
1981 
1982 	/* get the mechanism parameters, if applicable */
1983 	if ((mechanism->cm_type % 3) == 2) {
1984 		if (mechanism->cm_param == NULL ||
1985 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1986 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1987 			goto bail;
1988 		}
1989 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1990 		if (digest_len > sha_digest_len) {
1991 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1992 			goto bail;
1993 		}
1994 	}
1995 
1996 	/* do a SHA2 update of the inner context using the specified data */
1997 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1998 	if (ret != CRYPTO_SUCCESS)
1999 		/* the update failed, free context and bail */
2000 		goto bail;
2001 
2002 	/*
2003 	 * Do a SHA2 final on the inner context.
2004 	 */
2005 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
2006 
2007 	/*
2008 	 * Do an SHA2 update on the outer context, feeding the inner
2009 	 * digest as data.
2010 	 *
2011 	 * Make sure that SHA384 is handled special because
2012 	 * it cannot feed a 60-byte inner hash to the outer
2013 	 */
2014 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
2015 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
2016 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
2017 		    SHA384_DIGEST_LENGTH);
2018 	else
2019 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
2020 
2021 	/*
2022 	 * Do a SHA2 final on the outer context, storing the computed
2023 	 * digest in the users buffer.
2024 	 */
2025 	switch (mac->cd_format) {
2026 	case CRYPTO_DATA_RAW:
2027 		if (digest_len != sha_digest_len) {
2028 			/*
2029 			 * The caller requested a short digest. Digest
2030 			 * into a scratch buffer and return to
2031 			 * the user only what was requested.
2032 			 */
2033 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
2034 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
2035 			    mac->cd_offset, digest_len);
2036 		} else {
2037 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
2038 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
2039 		}
2040 		break;
2041 	case CRYPTO_DATA_UIO:
2042 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
2043 		    digest_len, digest);
2044 		break;
2045 	case CRYPTO_DATA_MBLK:
2046 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
2047 		    digest_len, digest);
2048 		break;
2049 	default:
2050 		ret = CRYPTO_ARGUMENTS_BAD;
2051 	}
2052 
2053 	if (ret == CRYPTO_SUCCESS) {
2054 		mac->cd_length = digest_len;
2055 		return (CRYPTO_SUCCESS);
2056 	}
2057 bail:
2058 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2059 	mac->cd_length = 0;
2060 	return (ret);
2061 }
2062 
2063 /* ARGSUSED */
2064 static int
2065 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
2066     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
2067     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
2068     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
2069 {
2070 	int ret = CRYPTO_SUCCESS;
2071 	uchar_t digest[SHA512_DIGEST_LENGTH];
2072 	sha2_hmac_ctx_t sha2_hmac_ctx;
2073 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
2074 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2075 
2076 	/*
2077 	 * Set the digest length and block size to values approriate to the
2078 	 * mechanism
2079 	 */
2080 	switch (mechanism->cm_type) {
2081 	case SHA256_HMAC_MECH_INFO_TYPE:
2082 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2083 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
2084 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2085 		break;
2086 	case SHA384_HMAC_MECH_INFO_TYPE:
2087 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2088 	case SHA512_HMAC_MECH_INFO_TYPE:
2089 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2090 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
2091 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2092 		break;
2093 	default:
2094 		return (CRYPTO_MECHANISM_INVALID);
2095 	}
2096 
2097 	/* Add support for key by attributes (RFE 4706552) */
2098 	if (key->ck_format != CRYPTO_KEY_RAW)
2099 		return (CRYPTO_ARGUMENTS_BAD);
2100 
2101 	if (ctx_template != NULL) {
2102 		/* reuse context template */
2103 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2104 	} else {
2105 		/* no context template, initialize context */
2106 		if (keylen_in_bytes > sha_hmac_block_size) {
2107 			/*
2108 			 * Hash the passed-in key to get a smaller key.
2109 			 * The inner context is used since it hasn't been
2110 			 * initialized yet.
2111 			 */
2112 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2113 			    &sha2_hmac_ctx.hc_icontext,
2114 			    key->ck_data, keylen_in_bytes, digest);
2115 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
2116 			    sha_digest_len);
2117 		} else {
2118 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
2119 			    keylen_in_bytes);
2120 		}
2121 	}
2122 
2123 	/* get the mechanism parameters, if applicable */
2124 	if (mechanism->cm_type % 3 == 2) {
2125 		if (mechanism->cm_param == NULL ||
2126 		    mechanism->cm_param_len != sizeof (ulong_t)) {
2127 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2128 			goto bail;
2129 		}
2130 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
2131 		if (digest_len > sha_digest_len) {
2132 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
2133 			goto bail;
2134 		}
2135 	}
2136 
2137 	if (mac->cd_length != digest_len) {
2138 		ret = CRYPTO_INVALID_MAC;
2139 		goto bail;
2140 	}
2141 
2142 	/* do a SHA2 update of the inner context using the specified data */
2143 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
2144 	if (ret != CRYPTO_SUCCESS)
2145 		/* the update failed, free context and bail */
2146 		goto bail;
2147 
2148 	/* do a SHA2 final on the inner context */
2149 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
2150 
2151 	/*
2152 	 * Do an SHA2 update on the outer context, feeding the inner
2153 	 * digest as data.
2154 	 */
2155 	SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
2156 
2157 	/*
2158 	 * Do a SHA2 final on the outer context, storing the computed
2159 	 * digest in the users buffer.
2160 	 */
2161 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
2162 
2163 	/*
2164 	 * Compare the computed digest against the expected digest passed
2165 	 * as argument.
2166 	 */
2167 
2168 	switch (mac->cd_format) {
2169 
2170 	case CRYPTO_DATA_RAW:
2171 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
2172 		    mac->cd_offset, digest_len) != 0)
2173 			ret = CRYPTO_INVALID_MAC;
2174 		break;
2175 
2176 	case CRYPTO_DATA_UIO: {
2177 		off_t offset = mac->cd_offset;
2178 		uint_t vec_idx;
2179 		off_t scratch_offset = 0;
2180 		size_t length = digest_len;
2181 		size_t cur_len;
2182 
2183 		/* we support only kernel buffer */
2184 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
2185 			return (CRYPTO_ARGUMENTS_BAD);
2186 
2187 		/* jump to the first iovec containing the expected digest */
2188 		for (vec_idx = 0;
2189 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
2190 		    vec_idx < mac->cd_uio->uio_iovcnt;
2191 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len);
2192 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
2193 			/*
2194 			 * The caller specified an offset that is
2195 			 * larger than the total size of the buffers
2196 			 * it provided.
2197 			 */
2198 			ret = CRYPTO_DATA_LEN_RANGE;
2199 			break;
2200 		}
2201 
2202 		/* do the comparison of computed digest vs specified one */
2203 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
2204 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
2205 			    offset, length);
2206 
2207 			if (bcmp(digest + scratch_offset,
2208 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
2209 			    cur_len) != 0) {
2210 				ret = CRYPTO_INVALID_MAC;
2211 				break;
2212 			}
2213 
2214 			length -= cur_len;
2215 			vec_idx++;
2216 			scratch_offset += cur_len;
2217 			offset = 0;
2218 		}
2219 		break;
2220 	}
2221 
2222 	case CRYPTO_DATA_MBLK: {
2223 		off_t offset = mac->cd_offset;
2224 		mblk_t *mp;
2225 		off_t scratch_offset = 0;
2226 		size_t length = digest_len;
2227 		size_t cur_len;
2228 
2229 		/* jump to the first mblk_t containing the expected digest */
2230 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
2231 		    offset -= MBLKL(mp), mp = mp->b_cont);
2232 		if (mp == NULL) {
2233 			/*
2234 			 * The caller specified an offset that is larger than
2235 			 * the total size of the buffers it provided.
2236 			 */
2237 			ret = CRYPTO_DATA_LEN_RANGE;
2238 			break;
2239 		}
2240 
2241 		while (mp != NULL && length > 0) {
2242 			cur_len = MIN(MBLKL(mp) - offset, length);
2243 			if (bcmp(digest + scratch_offset,
2244 			    mp->b_rptr + offset, cur_len) != 0) {
2245 				ret = CRYPTO_INVALID_MAC;
2246 				break;
2247 			}
2248 
2249 			length -= cur_len;
2250 			mp = mp->b_cont;
2251 			scratch_offset += cur_len;
2252 			offset = 0;
2253 		}
2254 		break;
2255 	}
2256 
2257 	default:
2258 		ret = CRYPTO_ARGUMENTS_BAD;
2259 	}
2260 
2261 	return (ret);
2262 bail:
2263 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
2264 	mac->cd_length = 0;
2265 	return (ret);
2266 }
2267 
2268 /*
2269  * KCF software provider context management entry points.
2270  */
2271 
2272 /* ARGSUSED */
2273 static int
2274 sha2_create_ctx_template(crypto_provider_handle_t provider,
2275     crypto_mechanism_t *mechanism, crypto_key_t *key,
2276     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
2277     crypto_req_handle_t req)
2278 {
2279 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
2280 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
2281 	uint32_t sha_digest_len, sha_hmac_block_size;
2282 
2283 	/*
2284 	 * Set the digest length and block size to values approriate to the
2285 	 * mechanism
2286 	 */
2287 	switch (mechanism->cm_type) {
2288 	case SHA256_HMAC_MECH_INFO_TYPE:
2289 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2290 		sha_digest_len = SHA256_DIGEST_LENGTH;
2291 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
2292 		break;
2293 	case SHA384_HMAC_MECH_INFO_TYPE:
2294 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2295 	case SHA512_HMAC_MECH_INFO_TYPE:
2296 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2297 		sha_digest_len = SHA512_DIGEST_LENGTH;
2298 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
2299 		break;
2300 	default:
2301 		return (CRYPTO_MECHANISM_INVALID);
2302 	}
2303 
2304 	/* Add support for key by attributes (RFE 4706552) */
2305 	if (key->ck_format != CRYPTO_KEY_RAW)
2306 		return (CRYPTO_ARGUMENTS_BAD);
2307 
2308 	/*
2309 	 * Allocate and initialize SHA2 context.
2310 	 */
2311 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
2312 	    crypto_kmflag(req));
2313 	if (sha2_hmac_ctx_tmpl == NULL)
2314 		return (CRYPTO_HOST_MEMORY);
2315 
2316 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
2317 
2318 	if (keylen_in_bytes > sha_hmac_block_size) {
2319 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
2320 
2321 		/*
2322 		 * Hash the passed-in key to get a smaller key.
2323 		 * The inner context is used since it hasn't been
2324 		 * initialized yet.
2325 		 */
2326 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
2327 		    &sha2_hmac_ctx_tmpl->hc_icontext,
2328 		    key->ck_data, keylen_in_bytes, digested_key);
2329 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
2330 		    sha_digest_len);
2331 	} else {
2332 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
2333 		    keylen_in_bytes);
2334 	}
2335 
2336 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
2337 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
2338 
2339 	return (CRYPTO_SUCCESS);
2340 }
2341 
2342 static int
2343 sha2_free_context(crypto_ctx_t *ctx)
2344 {
2345 	uint_t ctx_len;
2346 
2347 	if (ctx->cc_provider_private == NULL)
2348 		return (CRYPTO_SUCCESS);
2349 
2350 	/*
2351 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
2352 	 * have different lengths.
2353 	 *
2354 	 * Note: Below is dependent on the mechanism ordering.
2355 	 */
2356 
2357 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
2358 		ctx_len = sizeof (sha2_ctx_t);
2359 	else
2360 		ctx_len = sizeof (sha2_hmac_ctx_t);
2361 
2362 	bzero(ctx->cc_provider_private, ctx_len);
2363 	kmem_free(ctx->cc_provider_private, ctx_len);
2364 	ctx->cc_provider_private = NULL;
2365 
2366 	return (CRYPTO_SUCCESS);
2367 }
2368 
2369 #endif /* _KERNEL */
2370 
2371 void
2372 SHA2Init(uint64_t mech, SHA2_CTX *ctx)
2373 {
2374 
2375 	switch (mech) {
2376 	case SHA256_MECH_INFO_TYPE:
2377 	case SHA256_HMAC_MECH_INFO_TYPE:
2378 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
2379 		ctx->state.s32[0] = 0x6a09e667U;
2380 		ctx->state.s32[1] = 0xbb67ae85U;
2381 		ctx->state.s32[2] = 0x3c6ef372U;
2382 		ctx->state.s32[3] = 0xa54ff53aU;
2383 		ctx->state.s32[4] = 0x510e527fU;
2384 		ctx->state.s32[5] = 0x9b05688cU;
2385 		ctx->state.s32[6] = 0x1f83d9abU;
2386 		ctx->state.s32[7] = 0x5be0cd19U;
2387 		break;
2388 	case SHA384_MECH_INFO_TYPE:
2389 	case SHA384_HMAC_MECH_INFO_TYPE:
2390 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
2391 		ctx->state.s64[0] = 0xcbbb9d5dc1059ed8ULL;
2392 		ctx->state.s64[1] = 0x629a292a367cd507ULL;
2393 		ctx->state.s64[2] = 0x9159015a3070dd17ULL;
2394 		ctx->state.s64[3] = 0x152fecd8f70e5939ULL;
2395 		ctx->state.s64[4] = 0x67332667ffc00b31ULL;
2396 		ctx->state.s64[5] = 0x8eb44a8768581511ULL;
2397 		ctx->state.s64[6] = 0xdb0c2e0d64f98fa7ULL;
2398 		ctx->state.s64[7] = 0x47b5481dbefa4fa4ULL;
2399 		break;
2400 	case SHA512_MECH_INFO_TYPE:
2401 	case SHA512_HMAC_MECH_INFO_TYPE:
2402 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
2403 		ctx->state.s64[0] = 0x6a09e667f3bcc908ULL;
2404 		ctx->state.s64[1] = 0xbb67ae8584caa73bULL;
2405 		ctx->state.s64[2] = 0x3c6ef372fe94f82bULL;
2406 		ctx->state.s64[3] = 0xa54ff53a5f1d36f1ULL;
2407 		ctx->state.s64[4] = 0x510e527fade682d1ULL;
2408 		ctx->state.s64[5] = 0x9b05688c2b3e6c1fULL;
2409 		ctx->state.s64[6] = 0x1f83d9abfb41bd6bULL;
2410 		ctx->state.s64[7] = 0x5be0cd19137e2179ULL;
2411 		break;
2412 #ifdef _KERNEL
2413 	default:
2414 		cmn_err(CE_WARN, "sha2_init: "
2415 		    "failed to find a supported algorithm: 0x%x",
2416 		    (uint32_t)mech);
2417 
2418 #endif /* _KERNEL */
2419 	}
2420 
2421 	ctx->algotype = mech;
2422 	ctx->count.c64[0] = ctx->count.c64[1] = 0;
2423 }
2424 
2425 /*
2426  * SHA2Update()
2427  *
2428  * purpose: continues an sha2 digest operation, using the message block
2429  *          to update the context.
2430  *   input: SHA2_CTX *	: the context to update
2431  *          uint8_t *	: the message block
2432  *          uint32_t    : the length of the message block in bytes
2433  *  output: void
2434  */
2435 
2436 void
2437 SHA2Update(SHA2_CTX *ctx, const uint8_t *input, uint32_t input_len)
2438 {
2439 	uint32_t i, buf_index, buf_len, buf_limit;
2440 
2441 	/* check for noop */
2442 	if (input_len == 0)
2443 		return;
2444 
2445 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2446 		buf_limit = 64;
2447 
2448 		/* compute number of bytes mod 64 */
2449 		buf_index = (ctx->count.c32[1] >> 3) & 0x3F;
2450 
2451 		/* update number of bits */
2452 		if ((ctx->count.c32[1] += (input_len << 3)) < (input_len << 3))
2453 			ctx->count.c32[0]++;
2454 
2455 		ctx->count.c32[0] += (input_len >> 29);
2456 
2457 	} else {
2458 		buf_limit = 128;
2459 
2460 		/* compute number of bytes mod 128 */
2461 		buf_index = (ctx->count.c64[1] >> 3) & 0x7F;
2462 
2463 		/* update number of bits */
2464 		if ((ctx->count.c64[1] += (input_len << 3)) < (input_len << 3))
2465 			ctx->count.c64[0]++;
2466 
2467 		ctx->count.c64[0] += (input_len >> 29);
2468 	}
2469 
2470 	buf_len = buf_limit - buf_index;
2471 
2472 	/* transform as many times as possible */
2473 	i = 0;
2474 	if (input_len >= buf_len) {
2475 
2476 		/*
2477 		 * general optimization:
2478 		 *
2479 		 * only do initial bcopy() and SHA2Transform() if
2480 		 * buf_index != 0.  if buf_index == 0, we're just
2481 		 * wasting our time doing the bcopy() since there
2482 		 * wasn't any data left over from a previous call to
2483 		 * SHA2Update().
2484 		 */
2485 		if (buf_index) {
2486 			bcopy(input, &ctx->buf_un.buf8[buf_index], buf_len);
2487 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2488 				SHA256Transform(ctx, ctx->buf_un.buf8);
2489 			else
2490 				SHA512Transform(ctx, ctx->buf_un.buf8);
2491 
2492 			i = buf_len;
2493 		}
2494 
2495 
2496 		for (; i + buf_limit - 1 < input_len; i += buf_limit) {
2497 			if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
2498 				SHA256Transform(ctx, &input[i]);
2499 			else
2500 				SHA512Transform(ctx, &input[i]);
2501 		}
2502 
2503 		/*
2504 		 * general optimization:
2505 		 *
2506 		 * if i and input_len are the same, return now instead
2507 		 * of calling bcopy(), since the bcopy() in this case
2508 		 * will be an expensive nop.
2509 		 */
2510 
2511 		if (input_len == i)
2512 			return;
2513 
2514 		buf_index = 0;
2515 	}
2516 
2517 	/* buffer remaining input */
2518 	bcopy(&input[i], &ctx->buf_un.buf8[buf_index], input_len - i);
2519 }
2520 
2521 
2522 /*
2523  * SHA2Final()
2524  *
2525  * purpose: ends an sha2 digest operation, finalizing the message digest and
2526  *          zeroing the context.
2527  *   input: uint8_t *	: a buffer to store the digest in
2528  *          SHA2_CTX *  : the context to finalize, save, and zero
2529  *  output: void
2530  */
2531 
2532 
2533 void
2534 SHA2Final(uint8_t *digest, SHA2_CTX *ctx)
2535 {
2536 	uint8_t		bitcount_be[sizeof (ctx->count.c32)];
2537 	uint8_t		bitcount_be64[sizeof (ctx->count.c64)];
2538 	uint32_t	index;
2539 
2540 
2541 	if (ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
2542 		index  = (ctx->count.c32[1] >> 3) & 0x3f;
2543 		Encode(bitcount_be, ctx->count.c32, sizeof (bitcount_be));
2544 		SHA2Update(ctx, PADDING, ((index < 56) ? 56 : 120) - index);
2545 		SHA2Update(ctx, bitcount_be, sizeof (bitcount_be));
2546 		Encode(digest, ctx->state.s32, sizeof (ctx->state.s32));
2547 
2548 	} else {
2549 		index  = (ctx->count.c64[1] >> 3) & 0x7f;
2550 		Encode64(bitcount_be64, ctx->count.c64,
2551 		    sizeof (bitcount_be64));
2552 		SHA2Update(ctx, PADDING, ((index < 112) ? 112 : 240) - index);
2553 		SHA2Update(ctx, bitcount_be64, sizeof (bitcount_be64));
2554 		if (ctx->algotype <= SHA384_HMAC_GEN_MECH_INFO_TYPE) {
2555 			ctx->state.s64[6] = ctx->state.s64[7] = 0;
2556 			Encode64(digest, ctx->state.s64,
2557 			    sizeof (uint64_t) * 6);
2558 		} else
2559 			Encode64(digest, ctx->state.s64,
2560 			    sizeof (ctx->state.s64));
2561 	}
2562 }
2563