xref: /linux/lib/siphash.c (revision 74ce1896c6c65b2f8cccbf59162d542988835835)
1 /* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
2  *
3  * This file is provided under a dual BSD/GPLv2 license.
4  *
5  * SipHash: a fast short-input PRF
6  * https://131002.net/siphash/
7  *
8  * This implementation is specifically for SipHash2-4 for a secure PRF
9  * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
10  * hashtables.
11  */
12 
13 #include <linux/siphash.h>
14 #include <asm/unaligned.h>
15 
16 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
17 #include <linux/dcache.h>
18 #include <asm/word-at-a-time.h>
19 #endif
20 
21 #define SIPROUND \
22 	do { \
23 	v0 += v1; v1 = rol64(v1, 13); v1 ^= v0; v0 = rol64(v0, 32); \
24 	v2 += v3; v3 = rol64(v3, 16); v3 ^= v2; \
25 	v0 += v3; v3 = rol64(v3, 21); v3 ^= v0; \
26 	v2 += v1; v1 = rol64(v1, 17); v1 ^= v2; v2 = rol64(v2, 32); \
27 	} while (0)
28 
29 #define PREAMBLE(len) \
30 	u64 v0 = 0x736f6d6570736575ULL; \
31 	u64 v1 = 0x646f72616e646f6dULL; \
32 	u64 v2 = 0x6c7967656e657261ULL; \
33 	u64 v3 = 0x7465646279746573ULL; \
34 	u64 b = ((u64)(len)) << 56; \
35 	v3 ^= key->key[1]; \
36 	v2 ^= key->key[0]; \
37 	v1 ^= key->key[1]; \
38 	v0 ^= key->key[0];
39 
40 #define POSTAMBLE \
41 	v3 ^= b; \
42 	SIPROUND; \
43 	SIPROUND; \
44 	v0 ^= b; \
45 	v2 ^= 0xff; \
46 	SIPROUND; \
47 	SIPROUND; \
48 	SIPROUND; \
49 	SIPROUND; \
50 	return (v0 ^ v1) ^ (v2 ^ v3);
51 
52 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
53 {
54 	const u8 *end = data + len - (len % sizeof(u64));
55 	const u8 left = len & (sizeof(u64) - 1);
56 	u64 m;
57 	PREAMBLE(len)
58 	for (; data != end; data += sizeof(u64)) {
59 		m = le64_to_cpup(data);
60 		v3 ^= m;
61 		SIPROUND;
62 		SIPROUND;
63 		v0 ^= m;
64 	}
65 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
66 	if (left)
67 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
68 						  bytemask_from_count(left)));
69 #else
70 	switch (left) {
71 	case 7: b |= ((u64)end[6]) << 48;
72 	case 6: b |= ((u64)end[5]) << 40;
73 	case 5: b |= ((u64)end[4]) << 32;
74 	case 4: b |= le32_to_cpup(data); break;
75 	case 3: b |= ((u64)end[2]) << 16;
76 	case 2: b |= le16_to_cpup(data); break;
77 	case 1: b |= end[0];
78 	}
79 #endif
80 	POSTAMBLE
81 }
82 EXPORT_SYMBOL(__siphash_aligned);
83 
84 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
85 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
86 {
87 	const u8 *end = data + len - (len % sizeof(u64));
88 	const u8 left = len & (sizeof(u64) - 1);
89 	u64 m;
90 	PREAMBLE(len)
91 	for (; data != end; data += sizeof(u64)) {
92 		m = get_unaligned_le64(data);
93 		v3 ^= m;
94 		SIPROUND;
95 		SIPROUND;
96 		v0 ^= m;
97 	}
98 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
99 	if (left)
100 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
101 						  bytemask_from_count(left)));
102 #else
103 	switch (left) {
104 	case 7: b |= ((u64)end[6]) << 48;
105 	case 6: b |= ((u64)end[5]) << 40;
106 	case 5: b |= ((u64)end[4]) << 32;
107 	case 4: b |= get_unaligned_le32(end); break;
108 	case 3: b |= ((u64)end[2]) << 16;
109 	case 2: b |= get_unaligned_le16(end); break;
110 	case 1: b |= end[0];
111 	}
112 #endif
113 	POSTAMBLE
114 }
115 EXPORT_SYMBOL(__siphash_unaligned);
116 #endif
117 
118 /**
119  * siphash_1u64 - compute 64-bit siphash PRF value of a u64
120  * @first: first u64
121  * @key: the siphash key
122  */
123 u64 siphash_1u64(const u64 first, const siphash_key_t *key)
124 {
125 	PREAMBLE(8)
126 	v3 ^= first;
127 	SIPROUND;
128 	SIPROUND;
129 	v0 ^= first;
130 	POSTAMBLE
131 }
132 EXPORT_SYMBOL(siphash_1u64);
133 
134 /**
135  * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
136  * @first: first u64
137  * @second: second u64
138  * @key: the siphash key
139  */
140 u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
141 {
142 	PREAMBLE(16)
143 	v3 ^= first;
144 	SIPROUND;
145 	SIPROUND;
146 	v0 ^= first;
147 	v3 ^= second;
148 	SIPROUND;
149 	SIPROUND;
150 	v0 ^= second;
151 	POSTAMBLE
152 }
153 EXPORT_SYMBOL(siphash_2u64);
154 
155 /**
156  * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
157  * @first: first u64
158  * @second: second u64
159  * @third: third u64
160  * @key: the siphash key
161  */
162 u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
163 		 const siphash_key_t *key)
164 {
165 	PREAMBLE(24)
166 	v3 ^= first;
167 	SIPROUND;
168 	SIPROUND;
169 	v0 ^= first;
170 	v3 ^= second;
171 	SIPROUND;
172 	SIPROUND;
173 	v0 ^= second;
174 	v3 ^= third;
175 	SIPROUND;
176 	SIPROUND;
177 	v0 ^= third;
178 	POSTAMBLE
179 }
180 EXPORT_SYMBOL(siphash_3u64);
181 
182 /**
183  * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
184  * @first: first u64
185  * @second: second u64
186  * @third: third u64
187  * @forth: forth u64
188  * @key: the siphash key
189  */
190 u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
191 		 const u64 forth, const siphash_key_t *key)
192 {
193 	PREAMBLE(32)
194 	v3 ^= first;
195 	SIPROUND;
196 	SIPROUND;
197 	v0 ^= first;
198 	v3 ^= second;
199 	SIPROUND;
200 	SIPROUND;
201 	v0 ^= second;
202 	v3 ^= third;
203 	SIPROUND;
204 	SIPROUND;
205 	v0 ^= third;
206 	v3 ^= forth;
207 	SIPROUND;
208 	SIPROUND;
209 	v0 ^= forth;
210 	POSTAMBLE
211 }
212 EXPORT_SYMBOL(siphash_4u64);
213 
214 u64 siphash_1u32(const u32 first, const siphash_key_t *key)
215 {
216 	PREAMBLE(4)
217 	b |= first;
218 	POSTAMBLE
219 }
220 EXPORT_SYMBOL(siphash_1u32);
221 
222 u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
223 		 const siphash_key_t *key)
224 {
225 	u64 combined = (u64)second << 32 | first;
226 	PREAMBLE(12)
227 	v3 ^= combined;
228 	SIPROUND;
229 	SIPROUND;
230 	v0 ^= combined;
231 	b |= third;
232 	POSTAMBLE
233 }
234 EXPORT_SYMBOL(siphash_3u32);
235 
236 #if BITS_PER_LONG == 64
237 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
238  * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
239  */
240 
241 #define HSIPROUND SIPROUND
242 #define HPREAMBLE(len) PREAMBLE(len)
243 #define HPOSTAMBLE \
244 	v3 ^= b; \
245 	HSIPROUND; \
246 	v0 ^= b; \
247 	v2 ^= 0xff; \
248 	HSIPROUND; \
249 	HSIPROUND; \
250 	HSIPROUND; \
251 	return (v0 ^ v1) ^ (v2 ^ v3);
252 
253 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
254 {
255 	const u8 *end = data + len - (len % sizeof(u64));
256 	const u8 left = len & (sizeof(u64) - 1);
257 	u64 m;
258 	HPREAMBLE(len)
259 	for (; data != end; data += sizeof(u64)) {
260 		m = le64_to_cpup(data);
261 		v3 ^= m;
262 		HSIPROUND;
263 		v0 ^= m;
264 	}
265 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
266 	if (left)
267 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
268 						  bytemask_from_count(left)));
269 #else
270 	switch (left) {
271 	case 7: b |= ((u64)end[6]) << 48;
272 	case 6: b |= ((u64)end[5]) << 40;
273 	case 5: b |= ((u64)end[4]) << 32;
274 	case 4: b |= le32_to_cpup(data); break;
275 	case 3: b |= ((u64)end[2]) << 16;
276 	case 2: b |= le16_to_cpup(data); break;
277 	case 1: b |= end[0];
278 	}
279 #endif
280 	HPOSTAMBLE
281 }
282 EXPORT_SYMBOL(__hsiphash_aligned);
283 
284 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
285 u32 __hsiphash_unaligned(const void *data, size_t len,
286 			 const hsiphash_key_t *key)
287 {
288 	const u8 *end = data + len - (len % sizeof(u64));
289 	const u8 left = len & (sizeof(u64) - 1);
290 	u64 m;
291 	HPREAMBLE(len)
292 	for (; data != end; data += sizeof(u64)) {
293 		m = get_unaligned_le64(data);
294 		v3 ^= m;
295 		HSIPROUND;
296 		v0 ^= m;
297 	}
298 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
299 	if (left)
300 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
301 						  bytemask_from_count(left)));
302 #else
303 	switch (left) {
304 	case 7: b |= ((u64)end[6]) << 48;
305 	case 6: b |= ((u64)end[5]) << 40;
306 	case 5: b |= ((u64)end[4]) << 32;
307 	case 4: b |= get_unaligned_le32(end); break;
308 	case 3: b |= ((u64)end[2]) << 16;
309 	case 2: b |= get_unaligned_le16(end); break;
310 	case 1: b |= end[0];
311 	}
312 #endif
313 	HPOSTAMBLE
314 }
315 EXPORT_SYMBOL(__hsiphash_unaligned);
316 #endif
317 
318 /**
319  * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
320  * @first: first u32
321  * @key: the hsiphash key
322  */
323 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
324 {
325 	HPREAMBLE(4)
326 	b |= first;
327 	HPOSTAMBLE
328 }
329 EXPORT_SYMBOL(hsiphash_1u32);
330 
331 /**
332  * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
333  * @first: first u32
334  * @second: second u32
335  * @key: the hsiphash key
336  */
337 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
338 {
339 	u64 combined = (u64)second << 32 | first;
340 	HPREAMBLE(8)
341 	v3 ^= combined;
342 	HSIPROUND;
343 	v0 ^= combined;
344 	HPOSTAMBLE
345 }
346 EXPORT_SYMBOL(hsiphash_2u32);
347 
348 /**
349  * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
350  * @first: first u32
351  * @second: second u32
352  * @third: third u32
353  * @key: the hsiphash key
354  */
355 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
356 		  const hsiphash_key_t *key)
357 {
358 	u64 combined = (u64)second << 32 | first;
359 	HPREAMBLE(12)
360 	v3 ^= combined;
361 	HSIPROUND;
362 	v0 ^= combined;
363 	b |= third;
364 	HPOSTAMBLE
365 }
366 EXPORT_SYMBOL(hsiphash_3u32);
367 
368 /**
369  * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
370  * @first: first u32
371  * @second: second u32
372  * @third: third u32
373  * @forth: forth u32
374  * @key: the hsiphash key
375  */
376 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
377 		  const u32 forth, const hsiphash_key_t *key)
378 {
379 	u64 combined = (u64)second << 32 | first;
380 	HPREAMBLE(16)
381 	v3 ^= combined;
382 	HSIPROUND;
383 	v0 ^= combined;
384 	combined = (u64)forth << 32 | third;
385 	v3 ^= combined;
386 	HSIPROUND;
387 	v0 ^= combined;
388 	HPOSTAMBLE
389 }
390 EXPORT_SYMBOL(hsiphash_4u32);
391 #else
392 #define HSIPROUND \
393 	do { \
394 	v0 += v1; v1 = rol32(v1, 5); v1 ^= v0; v0 = rol32(v0, 16); \
395 	v2 += v3; v3 = rol32(v3, 8); v3 ^= v2; \
396 	v0 += v3; v3 = rol32(v3, 7); v3 ^= v0; \
397 	v2 += v1; v1 = rol32(v1, 13); v1 ^= v2; v2 = rol32(v2, 16); \
398 	} while (0)
399 
400 #define HPREAMBLE(len) \
401 	u32 v0 = 0; \
402 	u32 v1 = 0; \
403 	u32 v2 = 0x6c796765U; \
404 	u32 v3 = 0x74656462U; \
405 	u32 b = ((u32)(len)) << 24; \
406 	v3 ^= key->key[1]; \
407 	v2 ^= key->key[0]; \
408 	v1 ^= key->key[1]; \
409 	v0 ^= key->key[0];
410 
411 #define HPOSTAMBLE \
412 	v3 ^= b; \
413 	HSIPROUND; \
414 	v0 ^= b; \
415 	v2 ^= 0xff; \
416 	HSIPROUND; \
417 	HSIPROUND; \
418 	HSIPROUND; \
419 	return v1 ^ v3;
420 
421 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
422 {
423 	const u8 *end = data + len - (len % sizeof(u32));
424 	const u8 left = len & (sizeof(u32) - 1);
425 	u32 m;
426 	HPREAMBLE(len)
427 	for (; data != end; data += sizeof(u32)) {
428 		m = le32_to_cpup(data);
429 		v3 ^= m;
430 		HSIPROUND;
431 		v0 ^= m;
432 	}
433 	switch (left) {
434 	case 3: b |= ((u32)end[2]) << 16;
435 	case 2: b |= le16_to_cpup(data); break;
436 	case 1: b |= end[0];
437 	}
438 	HPOSTAMBLE
439 }
440 EXPORT_SYMBOL(__hsiphash_aligned);
441 
442 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
443 u32 __hsiphash_unaligned(const void *data, size_t len,
444 			 const hsiphash_key_t *key)
445 {
446 	const u8 *end = data + len - (len % sizeof(u32));
447 	const u8 left = len & (sizeof(u32) - 1);
448 	u32 m;
449 	HPREAMBLE(len)
450 	for (; data != end; data += sizeof(u32)) {
451 		m = get_unaligned_le32(data);
452 		v3 ^= m;
453 		HSIPROUND;
454 		v0 ^= m;
455 	}
456 	switch (left) {
457 	case 3: b |= ((u32)end[2]) << 16;
458 	case 2: b |= get_unaligned_le16(end); break;
459 	case 1: b |= end[0];
460 	}
461 	HPOSTAMBLE
462 }
463 EXPORT_SYMBOL(__hsiphash_unaligned);
464 #endif
465 
466 /**
467  * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
468  * @first: first u32
469  * @key: the hsiphash key
470  */
471 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
472 {
473 	HPREAMBLE(4)
474 	v3 ^= first;
475 	HSIPROUND;
476 	v0 ^= first;
477 	HPOSTAMBLE
478 }
479 EXPORT_SYMBOL(hsiphash_1u32);
480 
481 /**
482  * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
483  * @first: first u32
484  * @second: second u32
485  * @key: the hsiphash key
486  */
487 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
488 {
489 	HPREAMBLE(8)
490 	v3 ^= first;
491 	HSIPROUND;
492 	v0 ^= first;
493 	v3 ^= second;
494 	HSIPROUND;
495 	v0 ^= second;
496 	HPOSTAMBLE
497 }
498 EXPORT_SYMBOL(hsiphash_2u32);
499 
500 /**
501  * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
502  * @first: first u32
503  * @second: second u32
504  * @third: third u32
505  * @key: the hsiphash key
506  */
507 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
508 		  const hsiphash_key_t *key)
509 {
510 	HPREAMBLE(12)
511 	v3 ^= first;
512 	HSIPROUND;
513 	v0 ^= first;
514 	v3 ^= second;
515 	HSIPROUND;
516 	v0 ^= second;
517 	v3 ^= third;
518 	HSIPROUND;
519 	v0 ^= third;
520 	HPOSTAMBLE
521 }
522 EXPORT_SYMBOL(hsiphash_3u32);
523 
524 /**
525  * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
526  * @first: first u32
527  * @second: second u32
528  * @third: third u32
529  * @forth: forth u32
530  * @key: the hsiphash key
531  */
532 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
533 		  const u32 forth, const hsiphash_key_t *key)
534 {
535 	HPREAMBLE(16)
536 	v3 ^= first;
537 	HSIPROUND;
538 	v0 ^= first;
539 	v3 ^= second;
540 	HSIPROUND;
541 	v0 ^= second;
542 	v3 ^= third;
543 	HSIPROUND;
544 	v0 ^= third;
545 	v3 ^= forth;
546 	HSIPROUND;
547 	v0 ^= forth;
548 	HPOSTAMBLE
549 }
550 EXPORT_SYMBOL(hsiphash_4u32);
551 #endif
552