1 /*
2 * Copyright (C) 2021 - This file is part of libecc project
3 *
4 * Authors:
5 * Ryad BENADJILA <ryadbenadjila@gmail.com>
6 * Arnaud EBALARD <arnaud.ebalard@ssi.gouv.fr>
7 *
8 * This software is licensed under a dual BSD and GPL v2 license.
9 * See LICENSE file at the root folder of the project.
10 */
11 #include "sha0.h"
12
13 #define ROTL_SHA0(x, n) ((((u32)(x)) << (n)) | (((u32)(x)) >> (32-(n))))
14
15 /* All the inner SHA-0 operations */
16 #define K1_SHA0 0x5a827999
17 #define K2_SHA0 0x6ed9eba1
18 #define K3_SHA0 0x8f1bbcdc
19 #define K4_SHA0 0xca62c1d6
20
21 #define F1_SHA0(x, y, z) ((z) ^ ((x) & ((y) ^ (z))))
22 #define F2_SHA0(x, y, z) ((x) ^ (y) ^ (z))
23 #define F3_SHA0(x, y, z) (((x) & (y)) | ((z) & ((x) | (y))))
24 #define F4_SHA0(x, y, z) ((x) ^ (y) ^ (z))
25
26 #define SHA0_EXPAND(W, i) (W[i & 15] = (W[i & 15] ^ W[(i - 14) & 15] ^ W[(i - 8) & 15] ^ W[(i - 3) & 15]))
27
28 #define SHA0_SUBROUND(a, b, c, d, e, F, K, data) do { \
29 u32 A_, B_, C_, D_, E_; \
30 A_ = (e + ROTL_SHA0(a, 5) + F(b, c, d) + K + data); \
31 B_ = a; \
32 C_ = ROTL_SHA0(b, 30); \
33 D_ = c; \
34 E_ = d; \
35 /**/ \
36 a = A_; b = B_; c = C_; d = D_; e = E_; \
37 } while(0)
38
39 /* SHA-0 core processing. Returns 0 on success, -1 on error. */
sha0_process(sha0_context * ctx,const u8 data[SHA0_BLOCK_SIZE])40 ATTRIBUTE_WARN_UNUSED_RET static inline int sha0_process(sha0_context *ctx,
41 const u8 data[SHA0_BLOCK_SIZE])
42 {
43 u32 A, B, C, D, E;
44 u32 W[16];
45 int ret;
46 unsigned int i;
47
48 MUST_HAVE((data != NULL), ret, err);
49 SHA0_HASH_CHECK_INITIALIZED(ctx, ret, err);
50
51 /* Init our inner variables */
52 A = ctx->sha0_state[0];
53 B = ctx->sha0_state[1];
54 C = ctx->sha0_state[2];
55 D = ctx->sha0_state[3];
56 E = ctx->sha0_state[4];
57
58 /* Load data */
59 for (i = 0; i < 16; i++) {
60 GET_UINT32_BE(W[i], data, (4 * i));
61 }
62 for (i = 0; i < 80; i++) {
63 if(i <= 15){
64 SHA0_SUBROUND(A, B, C, D, E, F1_SHA0, K1_SHA0, W[i]);
65 }
66 else if((i >= 16) && (i <= 19)){
67 SHA0_SUBROUND(A, B, C, D, E, F1_SHA0, K1_SHA0, SHA0_EXPAND(W, i));
68 }
69 else if((i >= 20) && (i <= 39)){
70 SHA0_SUBROUND(A, B, C, D, E, F2_SHA0, K2_SHA0, SHA0_EXPAND(W, i));
71 }
72 else if((i >= 40) && (i <= 59)){
73 SHA0_SUBROUND(A, B, C, D, E, F3_SHA0, K3_SHA0, SHA0_EXPAND(W, i));
74 }
75 else{
76 SHA0_SUBROUND(A, B, C, D, E, F4_SHA0, K4_SHA0, SHA0_EXPAND(W, i));
77 }
78 }
79
80 /* Update state */
81 ctx->sha0_state[0] += A;
82 ctx->sha0_state[1] += B;
83 ctx->sha0_state[2] += C;
84 ctx->sha0_state[3] += D;
85 ctx->sha0_state[4] += E;
86
87 ret = 0;
88
89 err:
90 return ret;
91 }
92
93 /* Init hash function. Returns 0 on success, -1 on error. */
sha0_init(sha0_context * ctx)94 ATTRIBUTE_WARN_UNUSED_RET int sha0_init(sha0_context *ctx)
95 {
96 int ret;
97
98 MUST_HAVE((ctx != NULL), ret, err);
99
100 /* Sanity check on size */
101 MUST_HAVE((SHA0_DIGEST_SIZE <= MAX_DIGEST_SIZE), ret, err);
102
103 ctx->sha0_total = 0;
104 ctx->sha0_state[0] = 0x67452301;
105 ctx->sha0_state[1] = 0xefcdab89;
106 ctx->sha0_state[2] = 0x98badcfe;
107 ctx->sha0_state[3] = 0x10325476;
108 ctx->sha0_state[4] = 0xc3d2e1f0;
109
110 /* Tell that we are initialized */
111 ctx->magic = SHA0_HASH_MAGIC;
112
113 ret = 0;
114
115 err:
116 return ret;
117 }
118
sha0_update(sha0_context * ctx,const u8 * input,u32 ilen)119 ATTRIBUTE_WARN_UNUSED_RET int sha0_update(sha0_context *ctx, const u8 *input, u32 ilen)
120 {
121 const u8 *data_ptr = input;
122 u32 remain_ilen = ilen;
123 u16 fill;
124 u8 left;
125 int ret;
126
127 MUST_HAVE((input != NULL) || (ilen == 0), ret, err);
128 SHA0_HASH_CHECK_INITIALIZED(ctx, ret, err);
129
130 /* Nothing to process, return */
131 if (ilen == 0) {
132 ret = 0;
133 goto err;
134 }
135
136 /* Get what's left in our local buffer */
137 left = (ctx->sha0_total & 0x3F);
138 fill = (u16)(SHA0_BLOCK_SIZE - left);
139
140 ctx->sha0_total += ilen;
141
142 if ((left > 0) && (remain_ilen >= fill)) {
143 /* Copy data at the end of the buffer */
144 ret = local_memcpy(ctx->sha0_buffer + left, data_ptr, fill); EG(ret, err);
145 ret = sha0_process(ctx, ctx->sha0_buffer); EG(ret, err);
146 data_ptr += fill;
147 remain_ilen -= fill;
148 left = 0;
149 }
150
151 while (remain_ilen >= SHA0_BLOCK_SIZE) {
152 ret = sha0_process(ctx, data_ptr); EG(ret, err);
153 data_ptr += SHA0_BLOCK_SIZE;
154 remain_ilen -= SHA0_BLOCK_SIZE;
155 }
156
157 if (remain_ilen > 0) {
158 ret = local_memcpy(ctx->sha0_buffer + left, data_ptr, remain_ilen); EG(ret, err);
159 }
160
161 ret = 0;
162
163 err:
164 return ret;
165 }
166
167 /* Finalize. Returns 0 on success, -1 on error.*/
sha0_final(sha0_context * ctx,u8 output[SHA0_DIGEST_SIZE])168 ATTRIBUTE_WARN_UNUSED_RET int sha0_final(sha0_context *ctx, u8 output[SHA0_DIGEST_SIZE])
169 {
170 unsigned int block_present = 0;
171 u8 last_padded_block[2 * SHA0_BLOCK_SIZE];
172 int ret;
173
174 MUST_HAVE((output != NULL), ret, err);
175 SHA0_HASH_CHECK_INITIALIZED(ctx, ret, err);
176
177 /* Fill in our last block with zeroes */
178 ret = local_memset(last_padded_block, 0, sizeof(last_padded_block)); EG(ret, err);
179
180 /* This is our final step, so we proceed with the padding */
181 block_present = ctx->sha0_total % SHA0_BLOCK_SIZE;
182 if (block_present != 0) {
183 /* Copy what's left in our temporary context buffer */
184 ret = local_memcpy(last_padded_block, ctx->sha0_buffer,
185 block_present); EG(ret, err);
186 }
187
188 /* Put the 0x80 byte, beginning of padding */
189 last_padded_block[block_present] = 0x80;
190
191 /* Handle possible additional block */
192 if (block_present > (SHA0_BLOCK_SIZE - 1 - sizeof(u64))) {
193 /* We need an additional block */
194 PUT_UINT64_BE(8 * ctx->sha0_total, last_padded_block,
195 (2 * SHA0_BLOCK_SIZE) - sizeof(u64));
196 ret = sha0_process(ctx, last_padded_block); EG(ret, err);
197 ret = sha0_process(ctx, last_padded_block + SHA0_BLOCK_SIZE); EG(ret, err);
198 } else {
199 /* We do not need an additional block */
200 PUT_UINT64_BE(8 * ctx->sha0_total, last_padded_block,
201 SHA0_BLOCK_SIZE - sizeof(u64));
202 ret = sha0_process(ctx, last_padded_block); EG(ret, err);
203 }
204
205 /* Output the hash result */
206 PUT_UINT32_BE(ctx->sha0_state[0], output, 0);
207 PUT_UINT32_BE(ctx->sha0_state[1], output, 4);
208 PUT_UINT32_BE(ctx->sha0_state[2], output, 8);
209 PUT_UINT32_BE(ctx->sha0_state[3], output, 12);
210 PUT_UINT32_BE(ctx->sha0_state[4], output, 16);
211
212 /* Tell that we are uninitialized */
213 ctx->magic = WORD(0);
214
215 ret = 0;
216
217 err:
218 return ret;
219 }
220
221
222 /*
223 * Scattered version performing init/update/finalize on a vector of buffers
224 * 'inputs' with the length of each buffer passed via 'ilens'. The function
225 * loops on pointers in 'inputs' until it finds a NULL pointer. The function
226 * returns 0 on success, -1 on error.
227 */
sha0_scattered(const u8 ** inputs,const u32 * ilens,u8 output[SHA0_DIGEST_SIZE])228 ATTRIBUTE_WARN_UNUSED_RET int sha0_scattered(const u8 **inputs, const u32 *ilens,
229 u8 output[SHA0_DIGEST_SIZE])
230 {
231 sha0_context ctx;
232 int ret, pos = 0;
233
234 MUST_HAVE((inputs != NULL) && (ilens != NULL) && (output != NULL), ret, err);
235
236 ret = sha0_init(&ctx); EG(ret, err);
237
238 while (inputs[pos] != NULL) {
239 ret = sha0_update(&ctx, inputs[pos], ilens[pos]); EG(ret, err);
240 pos += 1;
241 }
242
243 ret = sha0_final(&ctx, output);
244
245 err:
246 return ret;
247 }
248
249 /*
250 * Single call version performing init/update/final on given input.
251 * Returns 0 on success, -1 on error.
252 */
sha0(const u8 * input,u32 ilen,u8 output[SHA0_DIGEST_SIZE])253 ATTRIBUTE_WARN_UNUSED_RET int sha0(const u8 *input, u32 ilen, u8 output[SHA0_DIGEST_SIZE])
254 {
255 sha0_context ctx;
256 int ret;
257
258 ret = sha0_init(&ctx); EG(ret, err);
259 ret = sha0_update(&ctx, input, ilen); EG(ret, err);
260 ret = sha0_final(&ctx, output);
261
262 err:
263 return ret;
264 }
265