xref: /freebsd/lib/libmd/rmd160c.c (revision 5190e6341ce5121cb17df109f9dc56ab8674aa5c)
1d00ed299SGarrett Wollman /* crypto/ripemd/rmd_dgst.c */
2d00ed299SGarrett Wollman /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
3d00ed299SGarrett Wollman  * All rights reserved.
4d00ed299SGarrett Wollman  *
5d00ed299SGarrett Wollman  * This package is an SSL implementation written
6d00ed299SGarrett Wollman  * by Eric Young (eay@cryptsoft.com).
7d00ed299SGarrett Wollman  * The implementation was written so as to conform with Netscapes SSL.
8d00ed299SGarrett Wollman  *
9d00ed299SGarrett Wollman  * This library is free for commercial and non-commercial use as long as
10d00ed299SGarrett Wollman  * the following conditions are aheared to.  The following conditions
11d00ed299SGarrett Wollman  * apply to all code found in this distribution, be it the RC4, RSA,
12d00ed299SGarrett Wollman  * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
13d00ed299SGarrett Wollman  * included with this distribution is covered by the same copyright terms
14d00ed299SGarrett Wollman  * except that the holder is Tim Hudson (tjh@cryptsoft.com).
15d00ed299SGarrett Wollman  *
16d00ed299SGarrett Wollman  * Copyright remains Eric Young's, and as such any Copyright notices in
17d00ed299SGarrett Wollman  * the code are not to be removed.
18d00ed299SGarrett Wollman  * If this package is used in a product, Eric Young should be given attribution
19d00ed299SGarrett Wollman  * as the author of the parts of the library used.
20d00ed299SGarrett Wollman  * This can be in the form of a textual message at program startup or
21d00ed299SGarrett Wollman  * in documentation (online or textual) provided with the package.
22d00ed299SGarrett Wollman  *
23d00ed299SGarrett Wollman  * Redistribution and use in source and binary forms, with or without
24d00ed299SGarrett Wollman  * modification, are permitted provided that the following conditions
25d00ed299SGarrett Wollman  * are met:
26d00ed299SGarrett Wollman  * 1. Redistributions of source code must retain the copyright
27d00ed299SGarrett Wollman  *    notice, this list of conditions and the following disclaimer.
28d00ed299SGarrett Wollman  * 2. Redistributions in binary form must reproduce the above copyright
29d00ed299SGarrett Wollman  *    notice, this list of conditions and the following disclaimer in the
30d00ed299SGarrett Wollman  *    documentation and/or other materials provided with the distribution.
31d00ed299SGarrett Wollman  * 3. All advertising materials mentioning features or use of this software
32d00ed299SGarrett Wollman  *    must display the following acknowledgement:
33d00ed299SGarrett Wollman  *    "This product includes cryptographic software written by
34d00ed299SGarrett Wollman  *     Eric Young (eay@cryptsoft.com)"
353e5a11d5SGordon Bergling  *    The word 'cryptographic' can be left out if the routines from the library
36d00ed299SGarrett Wollman  *    being used are not cryptographic related :-).
37d00ed299SGarrett Wollman  * 4. If you include any Windows specific code (or a derivative thereof) from
38d00ed299SGarrett Wollman  *    the apps directory (application code) you must include an acknowledgement:
39d00ed299SGarrett Wollman  *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
40d00ed299SGarrett Wollman  *
41d00ed299SGarrett Wollman  * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
42d00ed299SGarrett Wollman  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
43d00ed299SGarrett Wollman  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
44d00ed299SGarrett Wollman  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
45d00ed299SGarrett Wollman  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
46d00ed299SGarrett Wollman  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
47d00ed299SGarrett Wollman  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
48d00ed299SGarrett Wollman  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
49d00ed299SGarrett Wollman  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
50d00ed299SGarrett Wollman  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
51d00ed299SGarrett Wollman  * SUCH DAMAGE.
52d00ed299SGarrett Wollman  *
53d00ed299SGarrett Wollman  * The licence and distribution terms for any publically available version or
54d00ed299SGarrett Wollman  * derivative of this code cannot be changed.  i.e. this code cannot simply be
55d00ed299SGarrett Wollman  * copied and put under another distribution licence
56d00ed299SGarrett Wollman  * [including the GNU Public Licence.]
57d00ed299SGarrett Wollman  */
58d00ed299SGarrett Wollman 
59d00ed299SGarrett Wollman #include <sys/types.h>
60d00ed299SGarrett Wollman 
61d00ed299SGarrett Wollman #include <stdio.h>
62a5f6fb06SMatt Jacob #include <string.h>
63d00ed299SGarrett Wollman 
64d00ed299SGarrett Wollman #if 0
65d00ed299SGarrett Wollman #include <machine/ansi.h>	/* we use the __ variants of bit-sized types */
66d00ed299SGarrett Wollman #endif
67d00ed299SGarrett Wollman #include <machine/endian.h>
68d00ed299SGarrett Wollman 
69d00ed299SGarrett Wollman #include "rmd_locl.h"
70d00ed299SGarrett Wollman 
71d00ed299SGarrett Wollman /*
72d00ed299SGarrett Wollman  * The assembly-language code is not position-independent, so don't
73d00ed299SGarrett Wollman  * try to use it in a shared library.
74d00ed299SGarrett Wollman  */
75294246bbSEd Maste #ifdef PIC
76d00ed299SGarrett Wollman #undef RMD160_ASM
77d00ed299SGarrett Wollman #endif
78d00ed299SGarrett Wollman 
79*5190e634SKyle Evans static char *RMD160_version="RIPEMD160 part of SSLeay 0.9.0b 11-Oct-1998";
80d00ed299SGarrett Wollman 
81d00ed299SGarrett Wollman #ifdef RMD160_ASM
82d00ed299SGarrett Wollman void ripemd160_block_x86(RIPEMD160_CTX *c, const u_int32_t *p,int num);
83d00ed299SGarrett Wollman #define ripemd160_block ripemd160_block_x86
84d00ed299SGarrett Wollman #else
85d00ed299SGarrett Wollman void ripemd160_block(RIPEMD160_CTX *c, const u_int32_t *p,int num);
86d00ed299SGarrett Wollman #endif
87d00ed299SGarrett Wollman 
RIPEMD160_Init(RIPEMD160_CTX * c)88b38688dfSMateusz Guzik void RIPEMD160_Init(RIPEMD160_CTX *c)
89d00ed299SGarrett Wollman 	{
90d00ed299SGarrett Wollman 	c->A=RIPEMD160_A;
91d00ed299SGarrett Wollman 	c->B=RIPEMD160_B;
92d00ed299SGarrett Wollman 	c->C=RIPEMD160_C;
93d00ed299SGarrett Wollman 	c->D=RIPEMD160_D;
94d00ed299SGarrett Wollman 	c->E=RIPEMD160_E;
95d00ed299SGarrett Wollman 	c->Nl=0;
96d00ed299SGarrett Wollman 	c->Nh=0;
97d00ed299SGarrett Wollman 	c->num=0;
98d00ed299SGarrett Wollman 	}
99d00ed299SGarrett Wollman 
RIPEMD160_Update(RIPEMD160_CTX * c,const void * in,size_t len)100b38688dfSMateusz Guzik void RIPEMD160_Update(RIPEMD160_CTX *c, const void *in, size_t len)
101d00ed299SGarrett Wollman 	{
102be04b6d1SDavid E. O'Brien 	u_int32_t *p;
103d00ed299SGarrett Wollman 	int sw,sc;
104d00ed299SGarrett Wollman 	u_int32_t l;
10525a14196SPoul-Henning Kamp 	const unsigned char *data = in;
106d00ed299SGarrett Wollman 
107d00ed299SGarrett Wollman 	if (len == 0) return;
108d00ed299SGarrett Wollman 
109d00ed299SGarrett Wollman 	l=(c->Nl+(len<<3))&0xffffffffL;
110d00ed299SGarrett Wollman 	if (l < c->Nl) /* overflow */
111d00ed299SGarrett Wollman 		c->Nh++;
112d00ed299SGarrett Wollman 	c->Nh+=(len>>29);
113d00ed299SGarrett Wollman 	c->Nl=l;
114d00ed299SGarrett Wollman 
115d00ed299SGarrett Wollman 	if (c->num != 0)
116d00ed299SGarrett Wollman 		{
117d00ed299SGarrett Wollman 		p=c->data;
118d00ed299SGarrett Wollman 		sw=c->num>>2;
119d00ed299SGarrett Wollman 		sc=c->num&0x03;
120d00ed299SGarrett Wollman 
121d00ed299SGarrett Wollman 		if ((c->num+len) >= RIPEMD160_CBLOCK)
122d00ed299SGarrett Wollman 			{
123d00ed299SGarrett Wollman 			l= p[sw];
124d00ed299SGarrett Wollman 			p_c2l(data,l,sc);
125d00ed299SGarrett Wollman 			p[sw++]=l;
126d00ed299SGarrett Wollman 			for (; sw<RIPEMD160_LBLOCK; sw++)
127d00ed299SGarrett Wollman 				{
128d00ed299SGarrett Wollman 				c2l(data,l);
129d00ed299SGarrett Wollman 				p[sw]=l;
130d00ed299SGarrett Wollman 				}
131d00ed299SGarrett Wollman 			len-=(RIPEMD160_CBLOCK-c->num);
132d00ed299SGarrett Wollman 
133d00ed299SGarrett Wollman 			ripemd160_block(c,p,64);
134d00ed299SGarrett Wollman 			c->num=0;
135d00ed299SGarrett Wollman 			/* drop through and do the rest */
136d00ed299SGarrett Wollman 			}
137d00ed299SGarrett Wollman 		else
138d00ed299SGarrett Wollman 			{
139d00ed299SGarrett Wollman 			int ew,ec;
140d00ed299SGarrett Wollman 
141d00ed299SGarrett Wollman 			c->num+=(int)len;
142d00ed299SGarrett Wollman 			if ((sc+len) < 4) /* ugly, add char's to a word */
143d00ed299SGarrett Wollman 				{
144d00ed299SGarrett Wollman 				l= p[sw];
145d00ed299SGarrett Wollman 				p_c2l_p(data,l,sc,len);
146d00ed299SGarrett Wollman 				p[sw]=l;
147d00ed299SGarrett Wollman 				}
148d00ed299SGarrett Wollman 			else
149d00ed299SGarrett Wollman 				{
150d00ed299SGarrett Wollman 				ew=(c->num>>2);
151d00ed299SGarrett Wollman 				ec=(c->num&0x03);
152d00ed299SGarrett Wollman 				l= p[sw];
153d00ed299SGarrett Wollman 				p_c2l(data,l,sc);
154d00ed299SGarrett Wollman 				p[sw++]=l;
155d00ed299SGarrett Wollman 				for (; sw < ew; sw++)
156d00ed299SGarrett Wollman 					{ c2l(data,l); p[sw]=l; }
157d00ed299SGarrett Wollman 				if (ec)
158d00ed299SGarrett Wollman 					{
159d00ed299SGarrett Wollman 					c2l_p(data,l,ec);
160d00ed299SGarrett Wollman 					p[sw]=l;
161d00ed299SGarrett Wollman 					}
162d00ed299SGarrett Wollman 				}
163d00ed299SGarrett Wollman 			return;
164d00ed299SGarrett Wollman 			}
165d00ed299SGarrett Wollman 		}
166d00ed299SGarrett Wollman 	/* we now can process the input data in blocks of RIPEMD160_CBLOCK
167d00ed299SGarrett Wollman 	 * chars and save the leftovers to c->data. */
168d00ed299SGarrett Wollman #if BYTE_ORDER == LITTLE_ENDIAN
169d00ed299SGarrett Wollman 	if ((((unsigned long)data)%sizeof(u_int32_t)) == 0)
170d00ed299SGarrett Wollman 		{
171d00ed299SGarrett Wollman 		sw=(int)len/RIPEMD160_CBLOCK;
172d00ed299SGarrett Wollman 		if (sw > 0)
173d00ed299SGarrett Wollman 			{
174d00ed299SGarrett Wollman 			sw*=RIPEMD160_CBLOCK;
175d00ed299SGarrett Wollman 			ripemd160_block(c,(u_int32_t *)data,sw);
176d00ed299SGarrett Wollman 			data+=sw;
177d00ed299SGarrett Wollman 			len-=sw;
178d00ed299SGarrett Wollman 			}
179d00ed299SGarrett Wollman 		}
180d00ed299SGarrett Wollman #endif
181d00ed299SGarrett Wollman 	p=c->data;
182d00ed299SGarrett Wollman 	while (len >= RIPEMD160_CBLOCK)
183d00ed299SGarrett Wollman 		{
184d00ed299SGarrett Wollman #if BYTE_ORDER == LITTLE_ENDIAN || BYTE_ORDER == BIG_ENDIAN
185d00ed299SGarrett Wollman 		if (p != (u_int32_t *)data)
186d00ed299SGarrett Wollman 			memcpy(p,data,RIPEMD160_CBLOCK);
187d00ed299SGarrett Wollman 		data+=RIPEMD160_CBLOCK;
188d00ed299SGarrett Wollman #if BYTE_ORDER == BIG_ENDIAN
189d00ed299SGarrett Wollman 		for (sw=(RIPEMD160_LBLOCK/4); sw; sw--)
190d00ed299SGarrett Wollman 			{
191d00ed299SGarrett Wollman 			Endian_Reverse32(p[0]);
192d00ed299SGarrett Wollman 			Endian_Reverse32(p[1]);
193d00ed299SGarrett Wollman 			Endian_Reverse32(p[2]);
194d00ed299SGarrett Wollman 			Endian_Reverse32(p[3]);
195d00ed299SGarrett Wollman 			p+=4;
196d00ed299SGarrett Wollman 			}
197d00ed299SGarrett Wollman #endif
198d00ed299SGarrett Wollman #else
199d00ed299SGarrett Wollman 		for (sw=(RIPEMD160_LBLOCK/4); sw; sw--)
200d00ed299SGarrett Wollman 			{
201d00ed299SGarrett Wollman 			c2l(data,l); *(p++)=l;
202d00ed299SGarrett Wollman 			c2l(data,l); *(p++)=l;
203d00ed299SGarrett Wollman 			c2l(data,l); *(p++)=l;
204d00ed299SGarrett Wollman 			c2l(data,l); *(p++)=l;
205d00ed299SGarrett Wollman 			}
206d00ed299SGarrett Wollman #endif
207d00ed299SGarrett Wollman 		p=c->data;
208d00ed299SGarrett Wollman 		ripemd160_block(c,p,64);
209d00ed299SGarrett Wollman 		len-=RIPEMD160_CBLOCK;
210d00ed299SGarrett Wollman 		}
211d00ed299SGarrett Wollman 	sc=(int)len;
212d00ed299SGarrett Wollman 	c->num=sc;
213d00ed299SGarrett Wollman 	if (sc)
214d00ed299SGarrett Wollman 		{
215d00ed299SGarrett Wollman 		sw=sc>>2;	/* words to copy */
216d00ed299SGarrett Wollman #if BYTE_ORDER == LITTLE_ENDIAN
217d00ed299SGarrett Wollman 		p[sw]=0;
218d00ed299SGarrett Wollman 		memcpy(p,data,sc);
219d00ed299SGarrett Wollman #else
220d00ed299SGarrett Wollman 		sc&=0x03;
221d00ed299SGarrett Wollman 		for ( ; sw; sw--)
222d00ed299SGarrett Wollman 			{ c2l(data,l); *(p++)=l; }
223d00ed299SGarrett Wollman 		c2l_p(data,l,sc);
224d00ed299SGarrett Wollman 		*p=l;
225d00ed299SGarrett Wollman #endif
226d00ed299SGarrett Wollman 		}
227d00ed299SGarrett Wollman 	}
228d00ed299SGarrett Wollman 
RIPEMD160_Transform(RIPEMD160_CTX * c,unsigned char * b)22981de655aSKyle Evans static void RIPEMD160_Transform(RIPEMD160_CTX *c, unsigned char *b)
230d00ed299SGarrett Wollman 	{
231d00ed299SGarrett Wollman 	u_int32_t p[16];
232d00ed299SGarrett Wollman #if BYTE_ORDER != LITTLE_ENDIAN
233d00ed299SGarrett Wollman 	u_int32_t *q;
234d00ed299SGarrett Wollman 	int i;
235d00ed299SGarrett Wollman #endif
236d00ed299SGarrett Wollman 
237d00ed299SGarrett Wollman #if BYTE_ORDER == BIG_ENDIAN || BYTE_ORDER == LITTLE_ENDIAN
238d00ed299SGarrett Wollman 	memcpy(p,b,64);
239d00ed299SGarrett Wollman #if BYTE_ORDER == BIG_ENDIAN
240d00ed299SGarrett Wollman 	q=p;
241d00ed299SGarrett Wollman 	for (i=(RIPEMD160_LBLOCK/4); i; i--)
242d00ed299SGarrett Wollman 		{
243d00ed299SGarrett Wollman 		Endian_Reverse32(q[0]);
244d00ed299SGarrett Wollman 		Endian_Reverse32(q[1]);
245d00ed299SGarrett Wollman 		Endian_Reverse32(q[2]);
246d00ed299SGarrett Wollman 		Endian_Reverse32(q[3]);
247d00ed299SGarrett Wollman 		q+=4;
248d00ed299SGarrett Wollman 		}
249d00ed299SGarrett Wollman #endif
250d00ed299SGarrett Wollman #else
251d00ed299SGarrett Wollman 	q=p;
252d00ed299SGarrett Wollman 	for (i=(RIPEMD160_LBLOCK/4); i; i--)
253d00ed299SGarrett Wollman 		{
254d00ed299SGarrett Wollman 		u_int32_t l;
255d00ed299SGarrett Wollman 		c2l(b,l); *(q++)=l;
256d00ed299SGarrett Wollman 		c2l(b,l); *(q++)=l;
257d00ed299SGarrett Wollman 		c2l(b,l); *(q++)=l;
258d00ed299SGarrett Wollman 		c2l(b,l); *(q++)=l;
259d00ed299SGarrett Wollman 		}
260d00ed299SGarrett Wollman #endif
261d00ed299SGarrett Wollman 	ripemd160_block(c,p,64);
262d00ed299SGarrett Wollman 	}
263d00ed299SGarrett Wollman 
264d00ed299SGarrett Wollman #ifndef RMD160_ASM
265d00ed299SGarrett Wollman 
ripemd160_block(RIPEMD160_CTX * ctx,const u_int32_t * X,int num)266b38688dfSMateusz Guzik void ripemd160_block(RIPEMD160_CTX *ctx, const u_int32_t *X, int num)
267d00ed299SGarrett Wollman 	{
268be04b6d1SDavid E. O'Brien 	u_int32_t A,B,C,D,E;
269d00ed299SGarrett Wollman 	u_int32_t a,b,c,d,e;
270d00ed299SGarrett Wollman 
271d00ed299SGarrett Wollman 	for (;;)
272d00ed299SGarrett Wollman 		{
273d00ed299SGarrett Wollman 		A=ctx->A; B=ctx->B; C=ctx->C; D=ctx->D; E=ctx->E;
274d00ed299SGarrett Wollman 
275d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WL00,SL00);
276d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WL01,SL01);
277d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WL02,SL02);
278d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WL03,SL03);
279d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WL04,SL04);
280d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WL05,SL05);
281d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WL06,SL06);
282d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WL07,SL07);
283d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WL08,SL08);
284d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WL09,SL09);
285d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WL10,SL10);
286d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WL11,SL11);
287d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WL12,SL12);
288d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WL13,SL13);
289d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WL14,SL14);
290d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WL15,SL15);
291d00ed299SGarrett Wollman 
292d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WL16,SL16,KL1);
293d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WL17,SL17,KL1);
294d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WL18,SL18,KL1);
295d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WL19,SL19,KL1);
296d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WL20,SL20,KL1);
297d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WL21,SL21,KL1);
298d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WL22,SL22,KL1);
299d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WL23,SL23,KL1);
300d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WL24,SL24,KL1);
301d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WL25,SL25,KL1);
302d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WL26,SL26,KL1);
303d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WL27,SL27,KL1);
304d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WL28,SL28,KL1);
305d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WL29,SL29,KL1);
306d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WL30,SL30,KL1);
307d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WL31,SL31,KL1);
308d00ed299SGarrett Wollman 
309d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WL32,SL32,KL2);
310d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WL33,SL33,KL2);
311d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WL34,SL34,KL2);
312d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WL35,SL35,KL2);
313d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WL36,SL36,KL2);
314d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WL37,SL37,KL2);
315d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WL38,SL38,KL2);
316d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WL39,SL39,KL2);
317d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WL40,SL40,KL2);
318d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WL41,SL41,KL2);
319d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WL42,SL42,KL2);
320d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WL43,SL43,KL2);
321d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WL44,SL44,KL2);
322d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WL45,SL45,KL2);
323d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WL46,SL46,KL2);
324d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WL47,SL47,KL2);
325d00ed299SGarrett Wollman 
326d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WL48,SL48,KL3);
327d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WL49,SL49,KL3);
328d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WL50,SL50,KL3);
329d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WL51,SL51,KL3);
330d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WL52,SL52,KL3);
331d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WL53,SL53,KL3);
332d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WL54,SL54,KL3);
333d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WL55,SL55,KL3);
334d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WL56,SL56,KL3);
335d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WL57,SL57,KL3);
336d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WL58,SL58,KL3);
337d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WL59,SL59,KL3);
338d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WL60,SL60,KL3);
339d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WL61,SL61,KL3);
340d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WL62,SL62,KL3);
341d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WL63,SL63,KL3);
342d00ed299SGarrett Wollman 
343d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WL64,SL64,KL4);
344d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WL65,SL65,KL4);
345d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WL66,SL66,KL4);
346d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WL67,SL67,KL4);
347d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WL68,SL68,KL4);
348d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WL69,SL69,KL4);
349d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WL70,SL70,KL4);
350d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WL71,SL71,KL4);
351d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WL72,SL72,KL4);
352d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WL73,SL73,KL4);
353d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WL74,SL74,KL4);
354d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WL75,SL75,KL4);
355d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WL76,SL76,KL4);
356d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WL77,SL77,KL4);
357d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WL78,SL78,KL4);
358d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WL79,SL79,KL4);
359d00ed299SGarrett Wollman 
360d00ed299SGarrett Wollman 	a=A; b=B; c=C; d=D; e=E;
361d00ed299SGarrett Wollman 	/* Do other half */
362d00ed299SGarrett Wollman 	A=ctx->A; B=ctx->B; C=ctx->C; D=ctx->D; E=ctx->E;
363d00ed299SGarrett Wollman 
364d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WR00,SR00,KR0);
365d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WR01,SR01,KR0);
366d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WR02,SR02,KR0);
367d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WR03,SR03,KR0);
368d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WR04,SR04,KR0);
369d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WR05,SR05,KR0);
370d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WR06,SR06,KR0);
371d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WR07,SR07,KR0);
372d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WR08,SR08,KR0);
373d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WR09,SR09,KR0);
374d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WR10,SR10,KR0);
375d00ed299SGarrett Wollman 	RIP5(E,A,B,C,D,WR11,SR11,KR0);
376d00ed299SGarrett Wollman 	RIP5(D,E,A,B,C,WR12,SR12,KR0);
377d00ed299SGarrett Wollman 	RIP5(C,D,E,A,B,WR13,SR13,KR0);
378d00ed299SGarrett Wollman 	RIP5(B,C,D,E,A,WR14,SR14,KR0);
379d00ed299SGarrett Wollman 	RIP5(A,B,C,D,E,WR15,SR15,KR0);
380d00ed299SGarrett Wollman 
381d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WR16,SR16,KR1);
382d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WR17,SR17,KR1);
383d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WR18,SR18,KR1);
384d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WR19,SR19,KR1);
385d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WR20,SR20,KR1);
386d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WR21,SR21,KR1);
387d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WR22,SR22,KR1);
388d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WR23,SR23,KR1);
389d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WR24,SR24,KR1);
390d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WR25,SR25,KR1);
391d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WR26,SR26,KR1);
392d00ed299SGarrett Wollman 	RIP4(D,E,A,B,C,WR27,SR27,KR1);
393d00ed299SGarrett Wollman 	RIP4(C,D,E,A,B,WR28,SR28,KR1);
394d00ed299SGarrett Wollman 	RIP4(B,C,D,E,A,WR29,SR29,KR1);
395d00ed299SGarrett Wollman 	RIP4(A,B,C,D,E,WR30,SR30,KR1);
396d00ed299SGarrett Wollman 	RIP4(E,A,B,C,D,WR31,SR31,KR1);
397d00ed299SGarrett Wollman 
398d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WR32,SR32,KR2);
399d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WR33,SR33,KR2);
400d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WR34,SR34,KR2);
401d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WR35,SR35,KR2);
402d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WR36,SR36,KR2);
403d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WR37,SR37,KR2);
404d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WR38,SR38,KR2);
405d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WR39,SR39,KR2);
406d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WR40,SR40,KR2);
407d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WR41,SR41,KR2);
408d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WR42,SR42,KR2);
409d00ed299SGarrett Wollman 	RIP3(C,D,E,A,B,WR43,SR43,KR2);
410d00ed299SGarrett Wollman 	RIP3(B,C,D,E,A,WR44,SR44,KR2);
411d00ed299SGarrett Wollman 	RIP3(A,B,C,D,E,WR45,SR45,KR2);
412d00ed299SGarrett Wollman 	RIP3(E,A,B,C,D,WR46,SR46,KR2);
413d00ed299SGarrett Wollman 	RIP3(D,E,A,B,C,WR47,SR47,KR2);
414d00ed299SGarrett Wollman 
415d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WR48,SR48,KR3);
416d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WR49,SR49,KR3);
417d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WR50,SR50,KR3);
418d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WR51,SR51,KR3);
419d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WR52,SR52,KR3);
420d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WR53,SR53,KR3);
421d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WR54,SR54,KR3);
422d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WR55,SR55,KR3);
423d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WR56,SR56,KR3);
424d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WR57,SR57,KR3);
425d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WR58,SR58,KR3);
426d00ed299SGarrett Wollman 	RIP2(B,C,D,E,A,WR59,SR59,KR3);
427d00ed299SGarrett Wollman 	RIP2(A,B,C,D,E,WR60,SR60,KR3);
428d00ed299SGarrett Wollman 	RIP2(E,A,B,C,D,WR61,SR61,KR3);
429d00ed299SGarrett Wollman 	RIP2(D,E,A,B,C,WR62,SR62,KR3);
430d00ed299SGarrett Wollman 	RIP2(C,D,E,A,B,WR63,SR63,KR3);
431d00ed299SGarrett Wollman 
432d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WR64,SR64);
433d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WR65,SR65);
434d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WR66,SR66);
435d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WR67,SR67);
436d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WR68,SR68);
437d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WR69,SR69);
438d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WR70,SR70);
439d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WR71,SR71);
440d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WR72,SR72);
441d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WR73,SR73);
442d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WR74,SR74);
443d00ed299SGarrett Wollman 	RIP1(A,B,C,D,E,WR75,SR75);
444d00ed299SGarrett Wollman 	RIP1(E,A,B,C,D,WR76,SR76);
445d00ed299SGarrett Wollman 	RIP1(D,E,A,B,C,WR77,SR77);
446d00ed299SGarrett Wollman 	RIP1(C,D,E,A,B,WR78,SR78);
447d00ed299SGarrett Wollman 	RIP1(B,C,D,E,A,WR79,SR79);
448d00ed299SGarrett Wollman 
449d00ed299SGarrett Wollman 	D     =ctx->B+c+D;
450d00ed299SGarrett Wollman 	ctx->B=ctx->C+d+E;
451d00ed299SGarrett Wollman 	ctx->C=ctx->D+e+A;
452d00ed299SGarrett Wollman 	ctx->D=ctx->E+a+B;
453d00ed299SGarrett Wollman 	ctx->E=ctx->A+b+C;
454d00ed299SGarrett Wollman 	ctx->A=D;
455d00ed299SGarrett Wollman 
456d00ed299SGarrett Wollman 	X+=16;
457d00ed299SGarrett Wollman 	num-=64;
458d00ed299SGarrett Wollman 	if (num <= 0) break;
459d00ed299SGarrett Wollman 		}
460d00ed299SGarrett Wollman 	}
461d00ed299SGarrett Wollman #endif
462d00ed299SGarrett Wollman 
RIPEMD160_Final(unsigned char * md,RIPEMD160_CTX * c)463b38688dfSMateusz Guzik void RIPEMD160_Final(unsigned char *md, RIPEMD160_CTX *c)
464d00ed299SGarrett Wollman 	{
465be04b6d1SDavid E. O'Brien 	int i,j;
466be04b6d1SDavid E. O'Brien 	u_int32_t l;
467be04b6d1SDavid E. O'Brien 	u_int32_t *p;
468d00ed299SGarrett Wollman 	static unsigned char end[4]={0x80,0x00,0x00,0x00};
469d00ed299SGarrett Wollman 	unsigned char *cp=end;
470d00ed299SGarrett Wollman 
471d00ed299SGarrett Wollman 	/* c->num should definitly have room for at least one more byte. */
472d00ed299SGarrett Wollman 	p=c->data;
473d00ed299SGarrett Wollman 	j=c->num;
474d00ed299SGarrett Wollman 	i=j>>2;
475d00ed299SGarrett Wollman 
476d00ed299SGarrett Wollman 	/* purify often complains about the following line as an
477d00ed299SGarrett Wollman 	 * Uninitialized Memory Read.  While this can be true, the
478d00ed299SGarrett Wollman 	 * following p_c2l macro will reset l when that case is true.
479d00ed299SGarrett Wollman 	 * This is because j&0x03 contains the number of 'valid' bytes
480d00ed299SGarrett Wollman 	 * already in p[i].  If and only if j&0x03 == 0, the UMR will
481d00ed299SGarrett Wollman 	 * occur but this is also the only time p_c2l will do
482d00ed299SGarrett Wollman 	 * l= *(cp++) instead of l|= *(cp++)
483d00ed299SGarrett Wollman 	 * Many thanks to Alex Tang <altitude@cic.net> for pickup this
484d00ed299SGarrett Wollman 	 * 'potential bug' */
485d00ed299SGarrett Wollman #ifdef PURIFY
486d00ed299SGarrett Wollman 	if ((j&0x03) == 0) p[i]=0;
487d00ed299SGarrett Wollman #endif
488d00ed299SGarrett Wollman 	l=p[i];
489d00ed299SGarrett Wollman 	p_c2l(cp,l,j&0x03);
490d00ed299SGarrett Wollman 	p[i]=l;
491d00ed299SGarrett Wollman 	i++;
492d00ed299SGarrett Wollman 	/* i is the next 'undefined word' */
493d00ed299SGarrett Wollman 	if (c->num >= RIPEMD160_LAST_BLOCK)
494d00ed299SGarrett Wollman 		{
495d00ed299SGarrett Wollman 		for (; i<RIPEMD160_LBLOCK; i++)
496d00ed299SGarrett Wollman 			p[i]=0;
497d00ed299SGarrett Wollman 		ripemd160_block(c,p,64);
498d00ed299SGarrett Wollman 		i=0;
499d00ed299SGarrett Wollman 		}
500d00ed299SGarrett Wollman 	for (; i<(RIPEMD160_LBLOCK-2); i++)
501d00ed299SGarrett Wollman 		p[i]=0;
502d00ed299SGarrett Wollman 	p[RIPEMD160_LBLOCK-2]=c->Nl;
503d00ed299SGarrett Wollman 	p[RIPEMD160_LBLOCK-1]=c->Nh;
504d00ed299SGarrett Wollman 	ripemd160_block(c,p,64);
505d00ed299SGarrett Wollman 	cp=md;
506d00ed299SGarrett Wollman 	l=c->A; l2c(l,cp);
507d00ed299SGarrett Wollman 	l=c->B; l2c(l,cp);
508d00ed299SGarrett Wollman 	l=c->C; l2c(l,cp);
509d00ed299SGarrett Wollman 	l=c->D; l2c(l,cp);
510d00ed299SGarrett Wollman 	l=c->E; l2c(l,cp);
511d00ed299SGarrett Wollman 
51266bdf50fSXin LI 	/* Clear the context state */
51366bdf50fSXin LI 	explicit_bzero(&c, sizeof(c));
514d00ed299SGarrett Wollman 	}
515d00ed299SGarrett Wollman 
516d00ed299SGarrett Wollman #ifdef undef
printit(unsigned long * l)517b38688dfSMateusz Guzik int printit(unsigned long *l)
518d00ed299SGarrett Wollman 	{
519d00ed299SGarrett Wollman 	int i,ii;
520d00ed299SGarrett Wollman 
521d00ed299SGarrett Wollman 	for (i=0; i<2; i++)
522d00ed299SGarrett Wollman 		{
523d00ed299SGarrett Wollman 		for (ii=0; ii<8; ii++)
524d00ed299SGarrett Wollman 			{
525d00ed299SGarrett Wollman 			fprintf(stderr,"%08lx ",l[i*8+ii]);
526d00ed299SGarrett Wollman 			}
527d00ed299SGarrett Wollman 		fprintf(stderr,"\n");
528d00ed299SGarrett Wollman 		}
529d00ed299SGarrett Wollman 	}
530d00ed299SGarrett Wollman #endif
5319d8b6686SThomas Quinot 
5322dd0a89cSThomas Quinot #ifdef WEAK_REFS
5332dd0a89cSThomas Quinot /* When building libmd, provide weak references. Note: this is not
5342dd0a89cSThomas Quinot    activated in the context of compiling these sources for internal
5352dd0a89cSThomas Quinot    use in libcrypt.
5362dd0a89cSThomas Quinot  */
5379d8b6686SThomas Quinot #undef RIPEMD160_Init
5389d8b6686SThomas Quinot __weak_reference(_libmd_RIPEMD160_Init, RIPEMD160_Init);
5399d8b6686SThomas Quinot #undef RIPEMD160_Update
5409d8b6686SThomas Quinot __weak_reference(_libmd_RIPEMD160_Update, RIPEMD160_Update);
5419d8b6686SThomas Quinot #undef RIPEMD160_Final
5429d8b6686SThomas Quinot __weak_reference(_libmd_RIPEMD160_Final, RIPEMD160_Final);
5432dd0a89cSThomas Quinot #endif
544