xref: /titanic_51/usr/src/common/crypto/edonr/edonr.c (revision 45818ee124adeaaf947698996b4f4c722afc6d1f)
1*45818ee1SMatthew Ahrens /*
2*45818ee1SMatthew Ahrens  * IDI,NTNU
3*45818ee1SMatthew Ahrens  *
4*45818ee1SMatthew Ahrens  * CDDL HEADER START
5*45818ee1SMatthew Ahrens  *
6*45818ee1SMatthew Ahrens  * The contents of this file are subject to the terms of the
7*45818ee1SMatthew Ahrens  * Common Development and Distribution License (the "License").
8*45818ee1SMatthew Ahrens  * You may not use this file except in compliance with the License.
9*45818ee1SMatthew Ahrens  *
10*45818ee1SMatthew Ahrens  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
11*45818ee1SMatthew Ahrens  * or http://opensource.org/licenses/CDDL-1.0.
12*45818ee1SMatthew Ahrens  * See the License for the specific language governing permissions
13*45818ee1SMatthew Ahrens  * and limitations under the License.
14*45818ee1SMatthew Ahrens  *
15*45818ee1SMatthew Ahrens  * When distributing Covered Code, include this CDDL HEADER in each
16*45818ee1SMatthew Ahrens  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
17*45818ee1SMatthew Ahrens  * If applicable, add the following below this CDDL HEADER, with the
18*45818ee1SMatthew Ahrens  * fields enclosed by brackets "[]" replaced with your own identifying
19*45818ee1SMatthew Ahrens  * information: Portions Copyright [yyyy] [name of copyright owner]
20*45818ee1SMatthew Ahrens  *
21*45818ee1SMatthew Ahrens  * CDDL HEADER END
22*45818ee1SMatthew Ahrens  *
23*45818ee1SMatthew Ahrens  * Copyright (C) 2009, 2010, Jorn Amundsen <jorn.amundsen@ntnu.no>
24*45818ee1SMatthew Ahrens  * Tweaked Edon-R implementation for SUPERCOP, based on NIST API.
25*45818ee1SMatthew Ahrens  *
26*45818ee1SMatthew Ahrens  * $Id: edonr.c 517 2013-02-17 20:34:39Z joern $
27*45818ee1SMatthew Ahrens  */
28*45818ee1SMatthew Ahrens /*
29*45818ee1SMatthew Ahrens  * Portions copyright (c) 2013, Saso Kiselkov, All rights reserved
30*45818ee1SMatthew Ahrens  */
31*45818ee1SMatthew Ahrens 
32*45818ee1SMatthew Ahrens /* determine where we can get bcopy/bzero declarations */
33*45818ee1SMatthew Ahrens #ifdef	_KERNEL
34*45818ee1SMatthew Ahrens #include <sys/systm.h>
35*45818ee1SMatthew Ahrens #else
36*45818ee1SMatthew Ahrens #include <strings.h>
37*45818ee1SMatthew Ahrens #endif
38*45818ee1SMatthew Ahrens #include <sys/edonr.h>
39*45818ee1SMatthew Ahrens #include <sys/debug.h>
40*45818ee1SMatthew Ahrens 
41*45818ee1SMatthew Ahrens /* big endian support, provides no-op's if run on little endian hosts */
42*45818ee1SMatthew Ahrens #include "edonr_byteorder.h"
43*45818ee1SMatthew Ahrens 
44*45818ee1SMatthew Ahrens #define	hashState224(x)	((x)->pipe->p256)
45*45818ee1SMatthew Ahrens #define	hashState256(x)	((x)->pipe->p256)
46*45818ee1SMatthew Ahrens #define	hashState384(x)	((x)->pipe->p512)
47*45818ee1SMatthew Ahrens #define	hashState512(x)	((x)->pipe->p512)
48*45818ee1SMatthew Ahrens 
49*45818ee1SMatthew Ahrens /* shift and rotate shortcuts */
50*45818ee1SMatthew Ahrens #define	shl(x, n)	((x) << n)
51*45818ee1SMatthew Ahrens #define	shr(x, n)	((x) >> n)
52*45818ee1SMatthew Ahrens 
53*45818ee1SMatthew Ahrens #define	rotl32(x, n)	(((x) << (n)) | ((x) >> (32 - (n))))
54*45818ee1SMatthew Ahrens #define	rotr32(x, n)	(((x) >> (n)) | ((x) << (32 - (n))))
55*45818ee1SMatthew Ahrens 
56*45818ee1SMatthew Ahrens #define	rotl64(x, n)	(((x) << (n)) | ((x) >> (64 - (n))))
57*45818ee1SMatthew Ahrens #define	rotr64(x, n)	(((x) >> (n)) | ((x) << (64 - (n))))
58*45818ee1SMatthew Ahrens 
59*45818ee1SMatthew Ahrens #if !defined(__C99_RESTRICT)
60*45818ee1SMatthew Ahrens #define	restrict	/* restrict */
61*45818ee1SMatthew Ahrens #endif
62*45818ee1SMatthew Ahrens 
63*45818ee1SMatthew Ahrens #define	EDONR_VALID_HASHBITLEN(x) \
64*45818ee1SMatthew Ahrens 	((x) == 512 || (x) == 384 || (x) == 256 || (x) == 224)
65*45818ee1SMatthew Ahrens 
66*45818ee1SMatthew Ahrens /* EdonR224 initial double chaining pipe */
67*45818ee1SMatthew Ahrens static const uint32_t i224p2[16] = {
68*45818ee1SMatthew Ahrens 	0x00010203ul, 0x04050607ul, 0x08090a0bul, 0x0c0d0e0ful,
69*45818ee1SMatthew Ahrens 	0x10111213ul, 0x14151617ul, 0x18191a1bul, 0x1c1d1e1ful,
70*45818ee1SMatthew Ahrens 	0x20212223ul, 0x24252627ul, 0x28292a2bul, 0x2c2d2e2ful,
71*45818ee1SMatthew Ahrens 	0x30313233ul, 0x34353637ul, 0x38393a3bul, 0x3c3d3e3ful,
72*45818ee1SMatthew Ahrens };
73*45818ee1SMatthew Ahrens 
74*45818ee1SMatthew Ahrens /* EdonR256 initial double chaining pipe */
75*45818ee1SMatthew Ahrens static const uint32_t i256p2[16] = {
76*45818ee1SMatthew Ahrens 	0x40414243ul, 0x44454647ul, 0x48494a4bul, 0x4c4d4e4ful,
77*45818ee1SMatthew Ahrens 	0x50515253ul, 0x54555657ul, 0x58595a5bul, 0x5c5d5e5ful,
78*45818ee1SMatthew Ahrens 	0x60616263ul, 0x64656667ul, 0x68696a6bul, 0x6c6d6e6ful,
79*45818ee1SMatthew Ahrens 	0x70717273ul, 0x74757677ul, 0x78797a7bul, 0x7c7d7e7ful,
80*45818ee1SMatthew Ahrens };
81*45818ee1SMatthew Ahrens 
82*45818ee1SMatthew Ahrens /* EdonR384 initial double chaining pipe */
83*45818ee1SMatthew Ahrens static const uint64_t i384p2[16] = {
84*45818ee1SMatthew Ahrens 	0x0001020304050607ull, 0x08090a0b0c0d0e0full,
85*45818ee1SMatthew Ahrens 	0x1011121314151617ull, 0x18191a1b1c1d1e1full,
86*45818ee1SMatthew Ahrens 	0x2021222324252627ull, 0x28292a2b2c2d2e2full,
87*45818ee1SMatthew Ahrens 	0x3031323334353637ull, 0x38393a3b3c3d3e3full,
88*45818ee1SMatthew Ahrens 	0x4041424344454647ull, 0x48494a4b4c4d4e4full,
89*45818ee1SMatthew Ahrens 	0x5051525354555657ull, 0x58595a5b5c5d5e5full,
90*45818ee1SMatthew Ahrens 	0x6061626364656667ull, 0x68696a6b6c6d6e6full,
91*45818ee1SMatthew Ahrens 	0x7071727374757677ull, 0x78797a7b7c7d7e7full
92*45818ee1SMatthew Ahrens };
93*45818ee1SMatthew Ahrens 
94*45818ee1SMatthew Ahrens /* EdonR512 initial double chaining pipe */
95*45818ee1SMatthew Ahrens static const uint64_t i512p2[16] = {
96*45818ee1SMatthew Ahrens 	0x8081828384858687ull, 0x88898a8b8c8d8e8full,
97*45818ee1SMatthew Ahrens 	0x9091929394959697ull, 0x98999a9b9c9d9e9full,
98*45818ee1SMatthew Ahrens 	0xa0a1a2a3a4a5a6a7ull, 0xa8a9aaabacadaeafull,
99*45818ee1SMatthew Ahrens 	0xb0b1b2b3b4b5b6b7ull, 0xb8b9babbbcbdbebfull,
100*45818ee1SMatthew Ahrens 	0xc0c1c2c3c4c5c6c7ull, 0xc8c9cacbcccdcecfull,
101*45818ee1SMatthew Ahrens 	0xd0d1d2d3d4d5d6d7ull, 0xd8d9dadbdcdddedfull,
102*45818ee1SMatthew Ahrens 	0xe0e1e2e3e4e5e6e7ull, 0xe8e9eaebecedeeefull,
103*45818ee1SMatthew Ahrens 	0xf0f1f2f3f4f5f6f7ull, 0xf8f9fafbfcfdfeffull
104*45818ee1SMatthew Ahrens };
105*45818ee1SMatthew Ahrens 
106*45818ee1SMatthew Ahrens /*
107*45818ee1SMatthew Ahrens  * First Latin Square
108*45818ee1SMatthew Ahrens  * 0   7   1   3   2   4   6   5
109*45818ee1SMatthew Ahrens  * 4   1   7   6   3   0   5   2
110*45818ee1SMatthew Ahrens  * 7   0   4   2   5   3   1   6
111*45818ee1SMatthew Ahrens  * 1   4   0   5   6   2   7   3
112*45818ee1SMatthew Ahrens  * 2   3   6   7   1   5   0   4
113*45818ee1SMatthew Ahrens  * 5   2   3   1   7   6   4   0
114*45818ee1SMatthew Ahrens  * 3   6   5   0   4   7   2   1
115*45818ee1SMatthew Ahrens  * 6   5   2   4   0   1   3   7
116*45818ee1SMatthew Ahrens  */
117*45818ee1SMatthew Ahrens #define	LS1_256(c, x0, x1, x2, x3, x4, x5, x6, x7)			\
118*45818ee1SMatthew Ahrens {									\
119*45818ee1SMatthew Ahrens 	uint32_t x04, x17, x23, x56, x07, x26;				\
120*45818ee1SMatthew Ahrens 	x04 = x0+x4, x17 = x1+x7, x07 = x04+x17;			\
121*45818ee1SMatthew Ahrens 	s0 = c + x07 + x2;						\
122*45818ee1SMatthew Ahrens 	s1 = rotl32(x07 + x3, 4);					\
123*45818ee1SMatthew Ahrens 	s2 = rotl32(x07 + x6, 8);					\
124*45818ee1SMatthew Ahrens 	x23 = x2 + x3;							\
125*45818ee1SMatthew Ahrens 	s5 = rotl32(x04 + x23 + x5, 22);				\
126*45818ee1SMatthew Ahrens 	x56 = x5 + x6;							\
127*45818ee1SMatthew Ahrens 	s6 = rotl32(x17 + x56 + x0, 24);				\
128*45818ee1SMatthew Ahrens 	x26 = x23+x56;							\
129*45818ee1SMatthew Ahrens 	s3 = rotl32(x26 + x7, 13);					\
130*45818ee1SMatthew Ahrens 	s4 = rotl32(x26 + x1, 17);					\
131*45818ee1SMatthew Ahrens 	s7 = rotl32(x26 + x4, 29);					\
132*45818ee1SMatthew Ahrens }
133*45818ee1SMatthew Ahrens 
134*45818ee1SMatthew Ahrens #define	LS1_512(c, x0, x1, x2, x3, x4, x5, x6, x7)			\
135*45818ee1SMatthew Ahrens {									\
136*45818ee1SMatthew Ahrens 	uint64_t x04, x17, x23, x56, x07, x26;				\
137*45818ee1SMatthew Ahrens 	x04 = x0+x4, x17 = x1+x7, x07 = x04+x17;			\
138*45818ee1SMatthew Ahrens 	s0 = c + x07 + x2;						\
139*45818ee1SMatthew Ahrens 	s1 = rotl64(x07 + x3, 5);					\
140*45818ee1SMatthew Ahrens 	s2 = rotl64(x07 + x6, 15);					\
141*45818ee1SMatthew Ahrens 	x23 = x2 + x3;							\
142*45818ee1SMatthew Ahrens 	s5 = rotl64(x04 + x23 + x5, 40);				\
143*45818ee1SMatthew Ahrens 	x56 = x5 + x6;							\
144*45818ee1SMatthew Ahrens 	s6 = rotl64(x17 + x56 + x0, 50);				\
145*45818ee1SMatthew Ahrens 	x26 = x23+x56;							\
146*45818ee1SMatthew Ahrens 	s3 = rotl64(x26 + x7, 22);					\
147*45818ee1SMatthew Ahrens 	s4 = rotl64(x26 + x1, 31);					\
148*45818ee1SMatthew Ahrens 	s7 = rotl64(x26 + x4, 59);					\
149*45818ee1SMatthew Ahrens }
150*45818ee1SMatthew Ahrens 
151*45818ee1SMatthew Ahrens /*
152*45818ee1SMatthew Ahrens  * Second Orthogonal Latin Square
153*45818ee1SMatthew Ahrens  * 0   4   2   3   1   6   5   7
154*45818ee1SMatthew Ahrens  * 7   6   3   2   5   4   1   0
155*45818ee1SMatthew Ahrens  * 5   3   1   6   0   2   7   4
156*45818ee1SMatthew Ahrens  * 1   0   5   4   3   7   2   6
157*45818ee1SMatthew Ahrens  * 2   1   0   7   4   5   6   3
158*45818ee1SMatthew Ahrens  * 3   5   7   0   6   1   4   2
159*45818ee1SMatthew Ahrens  * 4   7   6   1   2   0   3   5
160*45818ee1SMatthew Ahrens  * 6   2   4   5   7   3   0   1
161*45818ee1SMatthew Ahrens  */
162*45818ee1SMatthew Ahrens #define	LS2_256(c, y0, y1, y2, y3, y4, y5, y6, y7)			\
163*45818ee1SMatthew Ahrens {									\
164*45818ee1SMatthew Ahrens 	uint32_t y01, y25, y34, y67, y04, y05, y27, y37;		\
165*45818ee1SMatthew Ahrens 	y01 = y0+y1, y25 = y2+y5, y05 = y01+y25;			\
166*45818ee1SMatthew Ahrens 	t0  = ~c + y05 + y7;						\
167*45818ee1SMatthew Ahrens 	t2 = rotl32(y05 + y3, 9);					\
168*45818ee1SMatthew Ahrens 	y34 = y3+y4, y04 = y01+y34;					\
169*45818ee1SMatthew Ahrens 	t1 = rotl32(y04 + y6, 5);					\
170*45818ee1SMatthew Ahrens 	t4 = rotl32(y04 + y5, 15);					\
171*45818ee1SMatthew Ahrens 	y67 = y6+y7, y37 = y34+y67;					\
172*45818ee1SMatthew Ahrens 	t3 = rotl32(y37 + y2, 11);					\
173*45818ee1SMatthew Ahrens 	t7 = rotl32(y37 + y0, 27);					\
174*45818ee1SMatthew Ahrens 	y27 = y25+y67;							\
175*45818ee1SMatthew Ahrens 	t5 = rotl32(y27 + y4, 20);					\
176*45818ee1SMatthew Ahrens 	t6 = rotl32(y27 + y1, 25);					\
177*45818ee1SMatthew Ahrens }
178*45818ee1SMatthew Ahrens 
179*45818ee1SMatthew Ahrens #define	LS2_512(c, y0, y1, y2, y3, y4, y5, y6, y7)			\
180*45818ee1SMatthew Ahrens {									\
181*45818ee1SMatthew Ahrens 	uint64_t y01, y25, y34, y67, y04, y05, y27, y37;		\
182*45818ee1SMatthew Ahrens 	y01 = y0+y1, y25 = y2+y5, y05 = y01+y25;			\
183*45818ee1SMatthew Ahrens 	t0  = ~c + y05 + y7;						\
184*45818ee1SMatthew Ahrens 	t2 = rotl64(y05 + y3, 19);					\
185*45818ee1SMatthew Ahrens 	y34 = y3+y4, y04 = y01+y34;					\
186*45818ee1SMatthew Ahrens 	t1 = rotl64(y04 + y6, 10);					\
187*45818ee1SMatthew Ahrens 	t4 = rotl64(y04 + y5, 36);					\
188*45818ee1SMatthew Ahrens 	y67 = y6+y7, y37 = y34+y67;					\
189*45818ee1SMatthew Ahrens 	t3 = rotl64(y37 + y2, 29);					\
190*45818ee1SMatthew Ahrens 	t7 = rotl64(y37 + y0, 55);					\
191*45818ee1SMatthew Ahrens 	y27 = y25+y67;							\
192*45818ee1SMatthew Ahrens 	t5 = rotl64(y27 + y4, 44);					\
193*45818ee1SMatthew Ahrens 	t6 = rotl64(y27 + y1, 48);					\
194*45818ee1SMatthew Ahrens }
195*45818ee1SMatthew Ahrens 
196*45818ee1SMatthew Ahrens #define	quasi_exform256(r0, r1, r2, r3, r4, r5, r6, r7)			\
197*45818ee1SMatthew Ahrens {									\
198*45818ee1SMatthew Ahrens 	uint32_t s04, s17, s23, s56, t01, t25, t34, t67;		\
199*45818ee1SMatthew Ahrens 	s04 = s0 ^ s4, t01 = t0 ^ t1;					\
200*45818ee1SMatthew Ahrens 	r0 = (s04 ^ s1) + (t01 ^ t5);					\
201*45818ee1SMatthew Ahrens 	t67 = t6 ^ t7;							\
202*45818ee1SMatthew Ahrens 	r1 = (s04 ^ s7) + (t2 ^ t67);					\
203*45818ee1SMatthew Ahrens 	s23 = s2 ^ s3;							\
204*45818ee1SMatthew Ahrens 	r7 = (s23 ^ s5) + (t4 ^ t67);					\
205*45818ee1SMatthew Ahrens 	t34 = t3 ^ t4;							\
206*45818ee1SMatthew Ahrens 	r3 = (s23 ^ s4) + (t0 ^ t34);					\
207*45818ee1SMatthew Ahrens 	s56 = s5 ^ s6;							\
208*45818ee1SMatthew Ahrens 	r5 = (s3 ^ s56) + (t34 ^ t6);					\
209*45818ee1SMatthew Ahrens 	t25 = t2 ^ t5;							\
210*45818ee1SMatthew Ahrens 	r6 = (s2 ^ s56) + (t25 ^ t7);					\
211*45818ee1SMatthew Ahrens 	s17 = s1 ^ s7;							\
212*45818ee1SMatthew Ahrens 	r4 = (s0 ^ s17) + (t1 ^ t25);					\
213*45818ee1SMatthew Ahrens 	r2 = (s17 ^ s6) + (t01 ^ t3);					\
214*45818ee1SMatthew Ahrens }
215*45818ee1SMatthew Ahrens 
216*45818ee1SMatthew Ahrens #define	quasi_exform512(r0, r1, r2, r3, r4, r5, r6, r7)			\
217*45818ee1SMatthew Ahrens {									\
218*45818ee1SMatthew Ahrens 	uint64_t s04, s17, s23, s56, t01, t25, t34, t67;		\
219*45818ee1SMatthew Ahrens 	s04 = s0 ^ s4, t01 = t0 ^ t1;					\
220*45818ee1SMatthew Ahrens 	r0 = (s04 ^ s1) + (t01 ^ t5);					\
221*45818ee1SMatthew Ahrens 	t67 = t6 ^ t7;							\
222*45818ee1SMatthew Ahrens 	r1 = (s04 ^ s7) + (t2 ^ t67);					\
223*45818ee1SMatthew Ahrens 	s23 = s2 ^ s3;							\
224*45818ee1SMatthew Ahrens 	r7 = (s23 ^ s5) + (t4 ^ t67);					\
225*45818ee1SMatthew Ahrens 	t34 = t3 ^ t4;							\
226*45818ee1SMatthew Ahrens 	r3 = (s23 ^ s4) + (t0 ^ t34);					\
227*45818ee1SMatthew Ahrens 	s56 = s5 ^ s6;							\
228*45818ee1SMatthew Ahrens 	r5 = (s3 ^ s56) + (t34 ^ t6);					\
229*45818ee1SMatthew Ahrens 	t25 = t2 ^ t5;							\
230*45818ee1SMatthew Ahrens 	r6 = (s2 ^ s56) + (t25 ^ t7);					\
231*45818ee1SMatthew Ahrens 	s17 = s1 ^ s7;							\
232*45818ee1SMatthew Ahrens 	r4 = (s0 ^ s17) + (t1 ^ t25);					\
233*45818ee1SMatthew Ahrens 	r2 = (s17 ^ s6) + (t01 ^ t3);					\
234*45818ee1SMatthew Ahrens }
235*45818ee1SMatthew Ahrens 
236*45818ee1SMatthew Ahrens static size_t
237*45818ee1SMatthew Ahrens Q256(size_t bitlen, const uint32_t *data, uint32_t *restrict p)
238*45818ee1SMatthew Ahrens {
239*45818ee1SMatthew Ahrens 	size_t bl;
240*45818ee1SMatthew Ahrens 
241*45818ee1SMatthew Ahrens 	for (bl = bitlen; bl >= EdonR256_BLOCK_BITSIZE;
242*45818ee1SMatthew Ahrens 	    bl -= EdonR256_BLOCK_BITSIZE, data += 16) {
243*45818ee1SMatthew Ahrens 		uint32_t s0, s1, s2, s3, s4, s5, s6, s7, t0, t1, t2, t3, t4,
244*45818ee1SMatthew Ahrens 		    t5, t6, t7;
245*45818ee1SMatthew Ahrens 		uint32_t p0, p1, p2, p3, p4, p5, p6, p7, q0, q1, q2, q3, q4,
246*45818ee1SMatthew Ahrens 		    q5, q6, q7;
247*45818ee1SMatthew Ahrens 		const uint32_t defix = 0xaaaaaaaa;
248*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
249*45818ee1SMatthew Ahrens 		uint32_t swp0, swp1, swp2, swp3, swp4, swp5, swp6, swp7, swp8,
250*45818ee1SMatthew Ahrens 		    swp9, swp10, swp11, swp12, swp13, swp14, swp15;
251*45818ee1SMatthew Ahrens #define	d(j)	swp ## j
252*45818ee1SMatthew Ahrens #define	s32(j)	ld_swap32((uint32_t *)data + j, swp ## j)
253*45818ee1SMatthew Ahrens #else
254*45818ee1SMatthew Ahrens #define	d(j)	data[j]
255*45818ee1SMatthew Ahrens #endif
256*45818ee1SMatthew Ahrens 
257*45818ee1SMatthew Ahrens 		/* First row of quasigroup e-transformations */
258*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
259*45818ee1SMatthew Ahrens 		s32(8);
260*45818ee1SMatthew Ahrens 		s32(9);
261*45818ee1SMatthew Ahrens 		s32(10);
262*45818ee1SMatthew Ahrens 		s32(11);
263*45818ee1SMatthew Ahrens 		s32(12);
264*45818ee1SMatthew Ahrens 		s32(13);
265*45818ee1SMatthew Ahrens 		s32(14);
266*45818ee1SMatthew Ahrens 		s32(15);
267*45818ee1SMatthew Ahrens #endif
268*45818ee1SMatthew Ahrens 		LS1_256(defix, d(15), d(14), d(13), d(12), d(11), d(10), d(9),
269*45818ee1SMatthew Ahrens 		    d(8));
270*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
271*45818ee1SMatthew Ahrens 		s32(0);
272*45818ee1SMatthew Ahrens 		s32(1);
273*45818ee1SMatthew Ahrens 		s32(2);
274*45818ee1SMatthew Ahrens 		s32(3);
275*45818ee1SMatthew Ahrens 		s32(4);
276*45818ee1SMatthew Ahrens 		s32(5);
277*45818ee1SMatthew Ahrens 		s32(6);
278*45818ee1SMatthew Ahrens 		s32(7);
279*45818ee1SMatthew Ahrens #undef s32
280*45818ee1SMatthew Ahrens #endif
281*45818ee1SMatthew Ahrens 		LS2_256(defix, d(0), d(1), d(2), d(3), d(4), d(5), d(6), d(7));
282*45818ee1SMatthew Ahrens 		quasi_exform256(p0, p1, p2, p3, p4, p5, p6, p7);
283*45818ee1SMatthew Ahrens 
284*45818ee1SMatthew Ahrens 		LS1_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
285*45818ee1SMatthew Ahrens 		LS2_256(defix, d(8), d(9), d(10), d(11), d(12), d(13), d(14),
286*45818ee1SMatthew Ahrens 		    d(15));
287*45818ee1SMatthew Ahrens 		quasi_exform256(q0, q1, q2, q3, q4, q5, q6, q7);
288*45818ee1SMatthew Ahrens 
289*45818ee1SMatthew Ahrens 		/* Second row of quasigroup e-transformations */
290*45818ee1SMatthew Ahrens 		LS1_256(defix, p[8], p[9], p[10], p[11], p[12], p[13], p[14],
291*45818ee1SMatthew Ahrens 		    p[15]);
292*45818ee1SMatthew Ahrens 		LS2_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
293*45818ee1SMatthew Ahrens 		quasi_exform256(p0, p1, p2, p3, p4, p5, p6, p7);
294*45818ee1SMatthew Ahrens 
295*45818ee1SMatthew Ahrens 		LS1_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
296*45818ee1SMatthew Ahrens 		LS2_256(defix, q0, q1, q2, q3, q4, q5, q6, q7);
297*45818ee1SMatthew Ahrens 		quasi_exform256(q0, q1, q2, q3, q4, q5, q6, q7);
298*45818ee1SMatthew Ahrens 
299*45818ee1SMatthew Ahrens 		/* Third row of quasigroup e-transformations */
300*45818ee1SMatthew Ahrens 		LS1_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
301*45818ee1SMatthew Ahrens 		LS2_256(defix, p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7]);
302*45818ee1SMatthew Ahrens 		quasi_exform256(p0, p1, p2, p3, p4, p5, p6, p7);
303*45818ee1SMatthew Ahrens 
304*45818ee1SMatthew Ahrens 		LS1_256(defix, q0, q1, q2, q3, q4, q5, q6, q7);
305*45818ee1SMatthew Ahrens 		LS2_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
306*45818ee1SMatthew Ahrens 		quasi_exform256(q0, q1, q2, q3, q4, q5, q6, q7);
307*45818ee1SMatthew Ahrens 
308*45818ee1SMatthew Ahrens 		/* Fourth row of quasigroup e-transformations */
309*45818ee1SMatthew Ahrens 		LS1_256(defix, d(7), d(6), d(5), d(4), d(3), d(2), d(1), d(0));
310*45818ee1SMatthew Ahrens 		LS2_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
311*45818ee1SMatthew Ahrens 		quasi_exform256(p0, p1, p2, p3, p4, p5, p6, p7);
312*45818ee1SMatthew Ahrens 
313*45818ee1SMatthew Ahrens 		LS1_256(defix, p0, p1, p2, p3, p4, p5, p6, p7);
314*45818ee1SMatthew Ahrens 		LS2_256(defix, q0, q1, q2, q3, q4, q5, q6, q7);
315*45818ee1SMatthew Ahrens 		quasi_exform256(q0, q1, q2, q3, q4, q5, q6, q7);
316*45818ee1SMatthew Ahrens 
317*45818ee1SMatthew Ahrens 		/* Edon-R tweak on the original SHA-3 Edon-R submission. */
318*45818ee1SMatthew Ahrens 		p[0] ^= d(8) ^ p0;
319*45818ee1SMatthew Ahrens 		p[1] ^= d(9) ^ p1;
320*45818ee1SMatthew Ahrens 		p[2] ^= d(10) ^ p2;
321*45818ee1SMatthew Ahrens 		p[3] ^= d(11) ^ p3;
322*45818ee1SMatthew Ahrens 		p[4] ^= d(12) ^ p4;
323*45818ee1SMatthew Ahrens 		p[5] ^= d(13) ^ p5;
324*45818ee1SMatthew Ahrens 		p[6] ^= d(14) ^ p6;
325*45818ee1SMatthew Ahrens 		p[7] ^= d(15) ^ p7;
326*45818ee1SMatthew Ahrens 		p[8] ^= d(0) ^ q0;
327*45818ee1SMatthew Ahrens 		p[9] ^= d(1) ^ q1;
328*45818ee1SMatthew Ahrens 		p[10] ^= d(2) ^ q2;
329*45818ee1SMatthew Ahrens 		p[11] ^= d(3) ^ q3;
330*45818ee1SMatthew Ahrens 		p[12] ^= d(4) ^ q4;
331*45818ee1SMatthew Ahrens 		p[13] ^= d(5) ^ q5;
332*45818ee1SMatthew Ahrens 		p[14] ^= d(6) ^ q6;
333*45818ee1SMatthew Ahrens 		p[15] ^= d(7) ^ q7;
334*45818ee1SMatthew Ahrens 	}
335*45818ee1SMatthew Ahrens 
336*45818ee1SMatthew Ahrens #undef d
337*45818ee1SMatthew Ahrens 	return (bitlen - bl);
338*45818ee1SMatthew Ahrens }
339*45818ee1SMatthew Ahrens 
340*45818ee1SMatthew Ahrens #if defined(__IBMC__) && defined(_AIX) && defined(__64BIT__)
341*45818ee1SMatthew Ahrens static inline size_t
342*45818ee1SMatthew Ahrens #else
343*45818ee1SMatthew Ahrens static size_t
344*45818ee1SMatthew Ahrens #endif
345*45818ee1SMatthew Ahrens Q512(size_t bitlen, const uint64_t *data, uint64_t *restrict p)
346*45818ee1SMatthew Ahrens {
347*45818ee1SMatthew Ahrens 	size_t bl;
348*45818ee1SMatthew Ahrens 
349*45818ee1SMatthew Ahrens 	for (bl = bitlen; bl >= EdonR512_BLOCK_BITSIZE;
350*45818ee1SMatthew Ahrens 	    bl -= EdonR512_BLOCK_BITSIZE, data += 16) {
351*45818ee1SMatthew Ahrens 		uint64_t s0, s1, s2, s3, s4, s5, s6, s7, t0, t1, t2, t3, t4,
352*45818ee1SMatthew Ahrens 		    t5, t6, t7;
353*45818ee1SMatthew Ahrens 		uint64_t p0, p1, p2, p3, p4, p5, p6, p7, q0, q1, q2, q3, q4,
354*45818ee1SMatthew Ahrens 		    q5, q6, q7;
355*45818ee1SMatthew Ahrens 		const uint64_t defix = 0xaaaaaaaaaaaaaaaaull;
356*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
357*45818ee1SMatthew Ahrens 		uint64_t swp0, swp1, swp2, swp3, swp4, swp5, swp6, swp7, swp8,
358*45818ee1SMatthew Ahrens 		    swp9, swp10, swp11, swp12, swp13, swp14, swp15;
359*45818ee1SMatthew Ahrens #define	d(j)	swp##j
360*45818ee1SMatthew Ahrens #define	s64(j)	ld_swap64((uint64_t *)data+j, swp##j)
361*45818ee1SMatthew Ahrens #else
362*45818ee1SMatthew Ahrens #define	d(j)	data[j]
363*45818ee1SMatthew Ahrens #endif
364*45818ee1SMatthew Ahrens 
365*45818ee1SMatthew Ahrens 		/* First row of quasigroup e-transformations */
366*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
367*45818ee1SMatthew Ahrens 		s64(8);
368*45818ee1SMatthew Ahrens 		s64(9);
369*45818ee1SMatthew Ahrens 		s64(10);
370*45818ee1SMatthew Ahrens 		s64(11);
371*45818ee1SMatthew Ahrens 		s64(12);
372*45818ee1SMatthew Ahrens 		s64(13);
373*45818ee1SMatthew Ahrens 		s64(14);
374*45818ee1SMatthew Ahrens 		s64(15);
375*45818ee1SMatthew Ahrens #endif
376*45818ee1SMatthew Ahrens 		LS1_512(defix, d(15), d(14), d(13), d(12), d(11), d(10), d(9),
377*45818ee1SMatthew Ahrens 		    d(8));
378*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
379*45818ee1SMatthew Ahrens 		s64(0);
380*45818ee1SMatthew Ahrens 		s64(1);
381*45818ee1SMatthew Ahrens 		s64(2);
382*45818ee1SMatthew Ahrens 		s64(3);
383*45818ee1SMatthew Ahrens 		s64(4);
384*45818ee1SMatthew Ahrens 		s64(5);
385*45818ee1SMatthew Ahrens 		s64(6);
386*45818ee1SMatthew Ahrens 		s64(7);
387*45818ee1SMatthew Ahrens #undef s64
388*45818ee1SMatthew Ahrens #endif
389*45818ee1SMatthew Ahrens 		LS2_512(defix, d(0), d(1), d(2), d(3), d(4), d(5), d(6), d(7));
390*45818ee1SMatthew Ahrens 		quasi_exform512(p0, p1, p2, p3, p4, p5, p6, p7);
391*45818ee1SMatthew Ahrens 
392*45818ee1SMatthew Ahrens 		LS1_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
393*45818ee1SMatthew Ahrens 		LS2_512(defix, d(8), d(9), d(10), d(11), d(12), d(13), d(14),
394*45818ee1SMatthew Ahrens 		    d(15));
395*45818ee1SMatthew Ahrens 		quasi_exform512(q0, q1, q2, q3, q4, q5, q6, q7);
396*45818ee1SMatthew Ahrens 
397*45818ee1SMatthew Ahrens 		/* Second row of quasigroup e-transformations */
398*45818ee1SMatthew Ahrens 		LS1_512(defix, p[8], p[9], p[10], p[11], p[12], p[13], p[14],
399*45818ee1SMatthew Ahrens 		    p[15]);
400*45818ee1SMatthew Ahrens 		LS2_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
401*45818ee1SMatthew Ahrens 		quasi_exform512(p0, p1, p2, p3, p4, p5, p6, p7);
402*45818ee1SMatthew Ahrens 
403*45818ee1SMatthew Ahrens 		LS1_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
404*45818ee1SMatthew Ahrens 		LS2_512(defix, q0, q1, q2, q3, q4, q5, q6, q7);
405*45818ee1SMatthew Ahrens 		quasi_exform512(q0, q1, q2, q3, q4, q5, q6, q7);
406*45818ee1SMatthew Ahrens 
407*45818ee1SMatthew Ahrens 		/* Third row of quasigroup e-transformations */
408*45818ee1SMatthew Ahrens 		LS1_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
409*45818ee1SMatthew Ahrens 		LS2_512(defix, p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7]);
410*45818ee1SMatthew Ahrens 		quasi_exform512(p0, p1, p2, p3, p4, p5, p6, p7);
411*45818ee1SMatthew Ahrens 
412*45818ee1SMatthew Ahrens 		LS1_512(defix, q0, q1, q2, q3, q4, q5, q6, q7);
413*45818ee1SMatthew Ahrens 		LS2_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
414*45818ee1SMatthew Ahrens 		quasi_exform512(q0, q1, q2, q3, q4, q5, q6, q7);
415*45818ee1SMatthew Ahrens 
416*45818ee1SMatthew Ahrens 		/* Fourth row of quasigroup e-transformations */
417*45818ee1SMatthew Ahrens 		LS1_512(defix, d(7), d(6), d(5), d(4), d(3), d(2), d(1), d(0));
418*45818ee1SMatthew Ahrens 		LS2_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
419*45818ee1SMatthew Ahrens 		quasi_exform512(p0, p1, p2, p3, p4, p5, p6, p7);
420*45818ee1SMatthew Ahrens 
421*45818ee1SMatthew Ahrens 		LS1_512(defix, p0, p1, p2, p3, p4, p5, p6, p7);
422*45818ee1SMatthew Ahrens 		LS2_512(defix, q0, q1, q2, q3, q4, q5, q6, q7);
423*45818ee1SMatthew Ahrens 		quasi_exform512(q0, q1, q2, q3, q4, q5, q6, q7);
424*45818ee1SMatthew Ahrens 
425*45818ee1SMatthew Ahrens 		/* Edon-R tweak on the original SHA-3 Edon-R submission. */
426*45818ee1SMatthew Ahrens 		p[0] ^= d(8) ^ p0;
427*45818ee1SMatthew Ahrens 		p[1] ^= d(9) ^ p1;
428*45818ee1SMatthew Ahrens 		p[2] ^= d(10) ^ p2;
429*45818ee1SMatthew Ahrens 		p[3] ^= d(11) ^ p3;
430*45818ee1SMatthew Ahrens 		p[4] ^= d(12) ^ p4;
431*45818ee1SMatthew Ahrens 		p[5] ^= d(13) ^ p5;
432*45818ee1SMatthew Ahrens 		p[6] ^= d(14) ^ p6;
433*45818ee1SMatthew Ahrens 		p[7] ^= d(15) ^ p7;
434*45818ee1SMatthew Ahrens 		p[8] ^= d(0) ^ q0;
435*45818ee1SMatthew Ahrens 		p[9] ^= d(1) ^ q1;
436*45818ee1SMatthew Ahrens 		p[10] ^= d(2) ^ q2;
437*45818ee1SMatthew Ahrens 		p[11] ^= d(3) ^ q3;
438*45818ee1SMatthew Ahrens 		p[12] ^= d(4) ^ q4;
439*45818ee1SMatthew Ahrens 		p[13] ^= d(5) ^ q5;
440*45818ee1SMatthew Ahrens 		p[14] ^= d(6) ^ q6;
441*45818ee1SMatthew Ahrens 		p[15] ^= d(7) ^ q7;
442*45818ee1SMatthew Ahrens 	}
443*45818ee1SMatthew Ahrens 
444*45818ee1SMatthew Ahrens #undef d
445*45818ee1SMatthew Ahrens 	return (bitlen - bl);
446*45818ee1SMatthew Ahrens }
447*45818ee1SMatthew Ahrens 
448*45818ee1SMatthew Ahrens void
449*45818ee1SMatthew Ahrens EdonRInit(EdonRState *state, size_t hashbitlen)
450*45818ee1SMatthew Ahrens {
451*45818ee1SMatthew Ahrens 	ASSERT(EDONR_VALID_HASHBITLEN(hashbitlen));
452*45818ee1SMatthew Ahrens 	switch (hashbitlen) {
453*45818ee1SMatthew Ahrens 	case 224:
454*45818ee1SMatthew Ahrens 		state->hashbitlen = 224;
455*45818ee1SMatthew Ahrens 		state->bits_processed = 0;
456*45818ee1SMatthew Ahrens 		state->unprocessed_bits = 0;
457*45818ee1SMatthew Ahrens 		bcopy(i224p2, hashState224(state)->DoublePipe,
458*45818ee1SMatthew Ahrens 		    16 * sizeof (uint32_t));
459*45818ee1SMatthew Ahrens 		break;
460*45818ee1SMatthew Ahrens 
461*45818ee1SMatthew Ahrens 	case 256:
462*45818ee1SMatthew Ahrens 		state->hashbitlen = 256;
463*45818ee1SMatthew Ahrens 		state->bits_processed = 0;
464*45818ee1SMatthew Ahrens 		state->unprocessed_bits = 0;
465*45818ee1SMatthew Ahrens 		bcopy(i256p2, hashState256(state)->DoublePipe,
466*45818ee1SMatthew Ahrens 		    16 * sizeof (uint32_t));
467*45818ee1SMatthew Ahrens 		break;
468*45818ee1SMatthew Ahrens 
469*45818ee1SMatthew Ahrens 	case 384:
470*45818ee1SMatthew Ahrens 		state->hashbitlen = 384;
471*45818ee1SMatthew Ahrens 		state->bits_processed = 0;
472*45818ee1SMatthew Ahrens 		state->unprocessed_bits = 0;
473*45818ee1SMatthew Ahrens 		bcopy(i384p2, hashState384(state)->DoublePipe,
474*45818ee1SMatthew Ahrens 		    16 * sizeof (uint64_t));
475*45818ee1SMatthew Ahrens 		break;
476*45818ee1SMatthew Ahrens 
477*45818ee1SMatthew Ahrens 	case 512:
478*45818ee1SMatthew Ahrens 		state->hashbitlen = 512;
479*45818ee1SMatthew Ahrens 		state->bits_processed = 0;
480*45818ee1SMatthew Ahrens 		state->unprocessed_bits = 0;
481*45818ee1SMatthew Ahrens 		bcopy(i512p2, hashState224(state)->DoublePipe,
482*45818ee1SMatthew Ahrens 		    16 * sizeof (uint64_t));
483*45818ee1SMatthew Ahrens 		break;
484*45818ee1SMatthew Ahrens 	}
485*45818ee1SMatthew Ahrens }
486*45818ee1SMatthew Ahrens 
487*45818ee1SMatthew Ahrens 
488*45818ee1SMatthew Ahrens void
489*45818ee1SMatthew Ahrens EdonRUpdate(EdonRState *state, const uint8_t *data, size_t databitlen)
490*45818ee1SMatthew Ahrens {
491*45818ee1SMatthew Ahrens 	uint32_t *data32;
492*45818ee1SMatthew Ahrens 	uint64_t *data64;
493*45818ee1SMatthew Ahrens 
494*45818ee1SMatthew Ahrens 	size_t bits_processed;
495*45818ee1SMatthew Ahrens 
496*45818ee1SMatthew Ahrens 	ASSERT(EDONR_VALID_HASHBITLEN(state->hashbitlen));
497*45818ee1SMatthew Ahrens 	switch (state->hashbitlen) {
498*45818ee1SMatthew Ahrens 	case 224:
499*45818ee1SMatthew Ahrens 	case 256:
500*45818ee1SMatthew Ahrens 		if (state->unprocessed_bits > 0) {
501*45818ee1SMatthew Ahrens 			/* LastBytes = databitlen / 8 */
502*45818ee1SMatthew Ahrens 			int LastBytes = (int)databitlen >> 3;
503*45818ee1SMatthew Ahrens 
504*45818ee1SMatthew Ahrens 			ASSERT(state->unprocessed_bits + databitlen <=
505*45818ee1SMatthew Ahrens 			    EdonR256_BLOCK_SIZE * 8);
506*45818ee1SMatthew Ahrens 
507*45818ee1SMatthew Ahrens 			bcopy(data, hashState256(state)->LastPart
508*45818ee1SMatthew Ahrens 			    + (state->unprocessed_bits >> 3), LastBytes);
509*45818ee1SMatthew Ahrens 			state->unprocessed_bits += (int)databitlen;
510*45818ee1SMatthew Ahrens 			databitlen = state->unprocessed_bits;
511*45818ee1SMatthew Ahrens 			/* LINTED E_BAD_PTR_CAST_ALIGN */
512*45818ee1SMatthew Ahrens 			data32 = (uint32_t *)hashState256(state)->LastPart;
513*45818ee1SMatthew Ahrens 		} else
514*45818ee1SMatthew Ahrens 			/* LINTED E_BAD_PTR_CAST_ALIGN */
515*45818ee1SMatthew Ahrens 			data32 = (uint32_t *)data;
516*45818ee1SMatthew Ahrens 
517*45818ee1SMatthew Ahrens 		bits_processed = Q256(databitlen, data32,
518*45818ee1SMatthew Ahrens 		    hashState256(state)->DoublePipe);
519*45818ee1SMatthew Ahrens 		state->bits_processed += bits_processed;
520*45818ee1SMatthew Ahrens 		databitlen -= bits_processed;
521*45818ee1SMatthew Ahrens 		state->unprocessed_bits = (int)databitlen;
522*45818ee1SMatthew Ahrens 		if (databitlen > 0) {
523*45818ee1SMatthew Ahrens 			/* LastBytes = Ceil(databitlen / 8) */
524*45818ee1SMatthew Ahrens 			int LastBytes =
525*45818ee1SMatthew Ahrens 			    ((~(((-(int)databitlen) >> 3) & 0x01ff)) +
526*45818ee1SMatthew Ahrens 			    1) & 0x01ff;
527*45818ee1SMatthew Ahrens 
528*45818ee1SMatthew Ahrens 			data32 += bits_processed >> 5;	/* byte size update */
529*45818ee1SMatthew Ahrens 			bcopy(data32, hashState256(state)->LastPart, LastBytes);
530*45818ee1SMatthew Ahrens 		}
531*45818ee1SMatthew Ahrens 		break;
532*45818ee1SMatthew Ahrens 
533*45818ee1SMatthew Ahrens 	case 384:
534*45818ee1SMatthew Ahrens 	case 512:
535*45818ee1SMatthew Ahrens 		if (state->unprocessed_bits > 0) {
536*45818ee1SMatthew Ahrens 			/* LastBytes = databitlen / 8 */
537*45818ee1SMatthew Ahrens 			int LastBytes = (int)databitlen >> 3;
538*45818ee1SMatthew Ahrens 
539*45818ee1SMatthew Ahrens 			ASSERT(state->unprocessed_bits + databitlen <=
540*45818ee1SMatthew Ahrens 			    EdonR512_BLOCK_SIZE * 8);
541*45818ee1SMatthew Ahrens 
542*45818ee1SMatthew Ahrens 			bcopy(data, hashState512(state)->LastPart
543*45818ee1SMatthew Ahrens 			    + (state->unprocessed_bits >> 3), LastBytes);
544*45818ee1SMatthew Ahrens 			state->unprocessed_bits += (int)databitlen;
545*45818ee1SMatthew Ahrens 			databitlen = state->unprocessed_bits;
546*45818ee1SMatthew Ahrens 			/* LINTED E_BAD_PTR_CAST_ALIGN */
547*45818ee1SMatthew Ahrens 			data64 = (uint64_t *)hashState512(state)->LastPart;
548*45818ee1SMatthew Ahrens 		} else
549*45818ee1SMatthew Ahrens 			/* LINTED E_BAD_PTR_CAST_ALIGN */
550*45818ee1SMatthew Ahrens 			data64 = (uint64_t *)data;
551*45818ee1SMatthew Ahrens 
552*45818ee1SMatthew Ahrens 		bits_processed = Q512(databitlen, data64,
553*45818ee1SMatthew Ahrens 		    hashState512(state)->DoublePipe);
554*45818ee1SMatthew Ahrens 		state->bits_processed += bits_processed;
555*45818ee1SMatthew Ahrens 		databitlen -= bits_processed;
556*45818ee1SMatthew Ahrens 		state->unprocessed_bits = (int)databitlen;
557*45818ee1SMatthew Ahrens 		if (databitlen > 0) {
558*45818ee1SMatthew Ahrens 			/* LastBytes = Ceil(databitlen / 8) */
559*45818ee1SMatthew Ahrens 			int LastBytes =
560*45818ee1SMatthew Ahrens 			    ((~(((-(int)databitlen) >> 3) & 0x03ff)) +
561*45818ee1SMatthew Ahrens 			    1) & 0x03ff;
562*45818ee1SMatthew Ahrens 
563*45818ee1SMatthew Ahrens 			data64 += bits_processed >> 6;	/* byte size update */
564*45818ee1SMatthew Ahrens 			bcopy(data64, hashState512(state)->LastPart, LastBytes);
565*45818ee1SMatthew Ahrens 		}
566*45818ee1SMatthew Ahrens 		break;
567*45818ee1SMatthew Ahrens 	}
568*45818ee1SMatthew Ahrens }
569*45818ee1SMatthew Ahrens 
570*45818ee1SMatthew Ahrens void
571*45818ee1SMatthew Ahrens EdonRFinal(EdonRState *state, uint8_t *hashval)
572*45818ee1SMatthew Ahrens {
573*45818ee1SMatthew Ahrens 	uint32_t *data32;
574*45818ee1SMatthew Ahrens 	uint64_t *data64, num_bits;
575*45818ee1SMatthew Ahrens 
576*45818ee1SMatthew Ahrens 	size_t databitlen;
577*45818ee1SMatthew Ahrens 	int LastByte, PadOnePosition;
578*45818ee1SMatthew Ahrens 
579*45818ee1SMatthew Ahrens 	num_bits = state->bits_processed + state->unprocessed_bits;
580*45818ee1SMatthew Ahrens 	ASSERT(EDONR_VALID_HASHBITLEN(state->hashbitlen));
581*45818ee1SMatthew Ahrens 	switch (state->hashbitlen) {
582*45818ee1SMatthew Ahrens 	case 224:
583*45818ee1SMatthew Ahrens 	case 256:
584*45818ee1SMatthew Ahrens 		LastByte = (int)state->unprocessed_bits >> 3;
585*45818ee1SMatthew Ahrens 		PadOnePosition = 7 - (state->unprocessed_bits & 0x07);
586*45818ee1SMatthew Ahrens 		hashState256(state)->LastPart[LastByte] =
587*45818ee1SMatthew Ahrens 		    (hashState256(state)->LastPart[LastByte]
588*45818ee1SMatthew Ahrens 		    & (0xff << (PadOnePosition + 1))) ^
589*45818ee1SMatthew Ahrens 		    (0x01 << PadOnePosition);
590*45818ee1SMatthew Ahrens 		/* LINTED E_BAD_PTR_CAST_ALIGN */
591*45818ee1SMatthew Ahrens 		data64 = (uint64_t *)hashState256(state)->LastPart;
592*45818ee1SMatthew Ahrens 
593*45818ee1SMatthew Ahrens 		if (state->unprocessed_bits < 448) {
594*45818ee1SMatthew Ahrens 			(void) memset((hashState256(state)->LastPart) +
595*45818ee1SMatthew Ahrens 			    LastByte + 1, 0x00,
596*45818ee1SMatthew Ahrens 			    EdonR256_BLOCK_SIZE - LastByte - 9);
597*45818ee1SMatthew Ahrens 			databitlen = EdonR256_BLOCK_SIZE * 8;
598*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
599*45818ee1SMatthew Ahrens 			st_swap64(num_bits, data64 + 7);
600*45818ee1SMatthew Ahrens #else
601*45818ee1SMatthew Ahrens 			data64[7] = num_bits;
602*45818ee1SMatthew Ahrens #endif
603*45818ee1SMatthew Ahrens 		} else {
604*45818ee1SMatthew Ahrens 			(void) memset((hashState256(state)->LastPart) +
605*45818ee1SMatthew Ahrens 			    LastByte + 1, 0x00,
606*45818ee1SMatthew Ahrens 			    EdonR256_BLOCK_SIZE * 2 - LastByte - 9);
607*45818ee1SMatthew Ahrens 			databitlen = EdonR256_BLOCK_SIZE * 16;
608*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
609*45818ee1SMatthew Ahrens 			st_swap64(num_bits, data64 + 15);
610*45818ee1SMatthew Ahrens #else
611*45818ee1SMatthew Ahrens 			data64[15] = num_bits;
612*45818ee1SMatthew Ahrens #endif
613*45818ee1SMatthew Ahrens 		}
614*45818ee1SMatthew Ahrens 
615*45818ee1SMatthew Ahrens 		/* LINTED E_BAD_PTR_CAST_ALIGN */
616*45818ee1SMatthew Ahrens 		data32 = (uint32_t *)hashState256(state)->LastPart;
617*45818ee1SMatthew Ahrens 		state->bits_processed += Q256(databitlen, data32,
618*45818ee1SMatthew Ahrens 		    hashState256(state)->DoublePipe);
619*45818ee1SMatthew Ahrens 		break;
620*45818ee1SMatthew Ahrens 
621*45818ee1SMatthew Ahrens 	case 384:
622*45818ee1SMatthew Ahrens 	case 512:
623*45818ee1SMatthew Ahrens 		LastByte = (int)state->unprocessed_bits >> 3;
624*45818ee1SMatthew Ahrens 		PadOnePosition = 7 - (state->unprocessed_bits & 0x07);
625*45818ee1SMatthew Ahrens 		hashState512(state)->LastPart[LastByte] =
626*45818ee1SMatthew Ahrens 		    (hashState512(state)->LastPart[LastByte]
627*45818ee1SMatthew Ahrens 		    & (0xff << (PadOnePosition + 1))) ^
628*45818ee1SMatthew Ahrens 		    (0x01 << PadOnePosition);
629*45818ee1SMatthew Ahrens 		/* LINTED E_BAD_PTR_CAST_ALIGN */
630*45818ee1SMatthew Ahrens 		data64 = (uint64_t *)hashState512(state)->LastPart;
631*45818ee1SMatthew Ahrens 
632*45818ee1SMatthew Ahrens 		if (state->unprocessed_bits < 960) {
633*45818ee1SMatthew Ahrens 			(void) memset((hashState512(state)->LastPart) +
634*45818ee1SMatthew Ahrens 			    LastByte + 1, 0x00,
635*45818ee1SMatthew Ahrens 			    EdonR512_BLOCK_SIZE - LastByte - 9);
636*45818ee1SMatthew Ahrens 			databitlen = EdonR512_BLOCK_SIZE * 8;
637*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
638*45818ee1SMatthew Ahrens 			st_swap64(num_bits, data64 + 15);
639*45818ee1SMatthew Ahrens #else
640*45818ee1SMatthew Ahrens 			data64[15] = num_bits;
641*45818ee1SMatthew Ahrens #endif
642*45818ee1SMatthew Ahrens 		} else {
643*45818ee1SMatthew Ahrens 			(void) memset((hashState512(state)->LastPart) +
644*45818ee1SMatthew Ahrens 			    LastByte + 1, 0x00,
645*45818ee1SMatthew Ahrens 			    EdonR512_BLOCK_SIZE * 2 - LastByte - 9);
646*45818ee1SMatthew Ahrens 			databitlen = EdonR512_BLOCK_SIZE * 16;
647*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
648*45818ee1SMatthew Ahrens 			st_swap64(num_bits, data64 + 31);
649*45818ee1SMatthew Ahrens #else
650*45818ee1SMatthew Ahrens 			data64[31] = num_bits;
651*45818ee1SMatthew Ahrens #endif
652*45818ee1SMatthew Ahrens 		}
653*45818ee1SMatthew Ahrens 
654*45818ee1SMatthew Ahrens 		state->bits_processed += Q512(databitlen, data64,
655*45818ee1SMatthew Ahrens 		    hashState512(state)->DoublePipe);
656*45818ee1SMatthew Ahrens 		break;
657*45818ee1SMatthew Ahrens 	}
658*45818ee1SMatthew Ahrens 
659*45818ee1SMatthew Ahrens 	switch (state->hashbitlen) {
660*45818ee1SMatthew Ahrens 	case 224: {
661*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
662*45818ee1SMatthew Ahrens 		uint32_t *d32 = (uint32_t *)hashval;
663*45818ee1SMatthew Ahrens 		uint32_t *s32 = hashState224(state)->DoublePipe + 9;
664*45818ee1SMatthew Ahrens 		int j;
665*45818ee1SMatthew Ahrens 
666*45818ee1SMatthew Ahrens 		for (j = 0; j < EdonR224_DIGEST_SIZE >> 2; j++)
667*45818ee1SMatthew Ahrens 			st_swap32(s32[j], d32 + j);
668*45818ee1SMatthew Ahrens #else
669*45818ee1SMatthew Ahrens 		bcopy(hashState256(state)->DoublePipe + 9, hashval,
670*45818ee1SMatthew Ahrens 		    EdonR224_DIGEST_SIZE);
671*45818ee1SMatthew Ahrens #endif
672*45818ee1SMatthew Ahrens 		break;
673*45818ee1SMatthew Ahrens 	}
674*45818ee1SMatthew Ahrens 	case 256: {
675*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
676*45818ee1SMatthew Ahrens 		uint32_t *d32 = (uint32_t *)hashval;
677*45818ee1SMatthew Ahrens 		uint32_t *s32 = hashState224(state)->DoublePipe + 8;
678*45818ee1SMatthew Ahrens 		int j;
679*45818ee1SMatthew Ahrens 
680*45818ee1SMatthew Ahrens 		for (j = 0; j < EdonR256_DIGEST_SIZE >> 2; j++)
681*45818ee1SMatthew Ahrens 			st_swap32(s32[j], d32 + j);
682*45818ee1SMatthew Ahrens #else
683*45818ee1SMatthew Ahrens 		bcopy(hashState256(state)->DoublePipe + 8, hashval,
684*45818ee1SMatthew Ahrens 		    EdonR256_DIGEST_SIZE);
685*45818ee1SMatthew Ahrens #endif
686*45818ee1SMatthew Ahrens 		break;
687*45818ee1SMatthew Ahrens 	}
688*45818ee1SMatthew Ahrens 	case 384: {
689*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
690*45818ee1SMatthew Ahrens 		uint64_t *d64 = (uint64_t *)hashval;
691*45818ee1SMatthew Ahrens 		uint64_t *s64 = hashState384(state)->DoublePipe + 10;
692*45818ee1SMatthew Ahrens 		int j;
693*45818ee1SMatthew Ahrens 
694*45818ee1SMatthew Ahrens 		for (j = 0; j < EdonR384_DIGEST_SIZE >> 3; j++)
695*45818ee1SMatthew Ahrens 			st_swap64(s64[j], d64 + j);
696*45818ee1SMatthew Ahrens #else
697*45818ee1SMatthew Ahrens 		bcopy(hashState384(state)->DoublePipe + 10, hashval,
698*45818ee1SMatthew Ahrens 		    EdonR384_DIGEST_SIZE);
699*45818ee1SMatthew Ahrens #endif
700*45818ee1SMatthew Ahrens 		break;
701*45818ee1SMatthew Ahrens 	}
702*45818ee1SMatthew Ahrens 	case 512: {
703*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
704*45818ee1SMatthew Ahrens 		uint64_t *d64 = (uint64_t *)hashval;
705*45818ee1SMatthew Ahrens 		uint64_t *s64 = hashState512(state)->DoublePipe + 8;
706*45818ee1SMatthew Ahrens 		int j;
707*45818ee1SMatthew Ahrens 
708*45818ee1SMatthew Ahrens 		for (j = 0; j < EdonR512_DIGEST_SIZE >> 3; j++)
709*45818ee1SMatthew Ahrens 			st_swap64(s64[j], d64 + j);
710*45818ee1SMatthew Ahrens #else
711*45818ee1SMatthew Ahrens 		bcopy(hashState512(state)->DoublePipe + 8, hashval,
712*45818ee1SMatthew Ahrens 		    EdonR512_DIGEST_SIZE);
713*45818ee1SMatthew Ahrens #endif
714*45818ee1SMatthew Ahrens 		break;
715*45818ee1SMatthew Ahrens 	}
716*45818ee1SMatthew Ahrens 	}
717*45818ee1SMatthew Ahrens }
718*45818ee1SMatthew Ahrens 
719*45818ee1SMatthew Ahrens 
720*45818ee1SMatthew Ahrens void
721*45818ee1SMatthew Ahrens EdonRHash(size_t hashbitlen, const uint8_t *data, size_t databitlen,
722*45818ee1SMatthew Ahrens     uint8_t *hashval)
723*45818ee1SMatthew Ahrens {
724*45818ee1SMatthew Ahrens 	EdonRState state;
725*45818ee1SMatthew Ahrens 
726*45818ee1SMatthew Ahrens 	EdonRInit(&state, hashbitlen);
727*45818ee1SMatthew Ahrens 	EdonRUpdate(&state, data, databitlen);
728*45818ee1SMatthew Ahrens 	EdonRFinal(&state, hashval);
729*45818ee1SMatthew Ahrens }
730