1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 /* Copyright (C) 2016-2022 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
3 *
4 * SipHash: a fast short-input PRF
5 * https://131002.net/siphash/
6 *
7 * This implementation is specifically for SipHash2-4 for a secure PRF
8 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
9 * hashtables.
10 */
11
12 #include <linux/siphash.h>
13 #include <asm/unaligned.h>
14
15 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
16 #include <linux/dcache.h>
17 #include <asm/word-at-a-time.h>
18 #endif
19
20 #define EXPORT_SYMBOL(name)
21
22 #define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
23
24 #define PREAMBLE(len) \
25 u64 v0 = SIPHASH_CONST_0; \
26 u64 v1 = SIPHASH_CONST_1; \
27 u64 v2 = SIPHASH_CONST_2; \
28 u64 v3 = SIPHASH_CONST_3; \
29 u64 b = ((u64)(len)) << 56; \
30 v3 ^= key->key[1]; \
31 v2 ^= key->key[0]; \
32 v1 ^= key->key[1]; \
33 v0 ^= key->key[0];
34
35 #define POSTAMBLE \
36 v3 ^= b; \
37 SIPROUND; \
38 SIPROUND; \
39 v0 ^= b; \
40 v2 ^= 0xff; \
41 SIPROUND; \
42 SIPROUND; \
43 SIPROUND; \
44 SIPROUND; \
45 return (v0 ^ v1) ^ (v2 ^ v3);
46
47 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
__siphash_aligned(const void * _data,size_t len,const siphash_key_t * key)48 u64 __siphash_aligned(const void *_data, size_t len, const siphash_key_t *key)
49 {
50 const u8 *data = _data;
51 const u8 *end = data + len - (len % sizeof(u64));
52 const u8 left = len & (sizeof(u64) - 1);
53 u64 m;
54 PREAMBLE(len)
55 for (; data != end; data += sizeof(u64)) {
56 m = le64_to_cpup(data);
57 v3 ^= m;
58 SIPROUND;
59 SIPROUND;
60 v0 ^= m;
61 }
62 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
63 if (left)
64 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
65 bytemask_from_count(left)));
66 #else
67 switch (left) {
68 case 7: b |= ((u64)end[6]) << 48; fallthrough;
69 case 6: b |= ((u64)end[5]) << 40; fallthrough;
70 case 5: b |= ((u64)end[4]) << 32; fallthrough;
71 case 4: b |= le32_to_cpup(data); break;
72 case 3: b |= ((u64)end[2]) << 16; fallthrough;
73 case 2: b |= le16_to_cpup(data); break;
74 case 1: b |= end[0];
75 }
76 #endif
77 POSTAMBLE
78 }
79 EXPORT_SYMBOL(__siphash_aligned);
80 #endif
81
__siphash_unaligned(const void * _data,size_t len,const siphash_key_t * key)82 u64 __siphash_unaligned(const void *_data, size_t len, const siphash_key_t *key)
83 {
84 const u8 *data = _data;
85 const u8 *end = data + len - (len % sizeof(u64));
86 const u8 left = len & (sizeof(u64) - 1);
87 u64 m;
88 PREAMBLE(len)
89 for (; data != end; data += sizeof(u64)) {
90 m = get_unaligned_le64(data);
91 v3 ^= m;
92 SIPROUND;
93 SIPROUND;
94 v0 ^= m;
95 }
96 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
97 if (left)
98 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
99 bytemask_from_count(left)));
100 #else
101 switch (left) {
102 case 7: b |= ((u64)end[6]) << 48; fallthrough;
103 case 6: b |= ((u64)end[5]) << 40; fallthrough;
104 case 5: b |= ((u64)end[4]) << 32; fallthrough;
105 case 4: b |= get_unaligned_le32(end); break;
106 case 3: b |= ((u64)end[2]) << 16; fallthrough;
107 case 2: b |= get_unaligned_le16(end); break;
108 case 1: b |= end[0];
109 }
110 #endif
111 POSTAMBLE
112 }
113 EXPORT_SYMBOL(__siphash_unaligned);
114
115 /**
116 * siphash_1u64 - compute 64-bit siphash PRF value of a u64
117 * @first: first u64
118 * @key: the siphash key
119 */
siphash_1u64(const u64 first,const siphash_key_t * key)120 u64 siphash_1u64(const u64 first, const siphash_key_t *key)
121 {
122 PREAMBLE(8)
123 v3 ^= first;
124 SIPROUND;
125 SIPROUND;
126 v0 ^= first;
127 POSTAMBLE
128 }
129 EXPORT_SYMBOL(siphash_1u64);
130
131 /**
132 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
133 * @first: first u64
134 * @second: second u64
135 * @key: the siphash key
136 */
siphash_2u64(const u64 first,const u64 second,const siphash_key_t * key)137 u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
138 {
139 PREAMBLE(16)
140 v3 ^= first;
141 SIPROUND;
142 SIPROUND;
143 v0 ^= first;
144 v3 ^= second;
145 SIPROUND;
146 SIPROUND;
147 v0 ^= second;
148 POSTAMBLE
149 }
150 EXPORT_SYMBOL(siphash_2u64);
151
152 /**
153 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
154 * @first: first u64
155 * @second: second u64
156 * @third: third u64
157 * @key: the siphash key
158 */
siphash_3u64(const u64 first,const u64 second,const u64 third,const siphash_key_t * key)159 u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
160 const siphash_key_t *key)
161 {
162 PREAMBLE(24)
163 v3 ^= first;
164 SIPROUND;
165 SIPROUND;
166 v0 ^= first;
167 v3 ^= second;
168 SIPROUND;
169 SIPROUND;
170 v0 ^= second;
171 v3 ^= third;
172 SIPROUND;
173 SIPROUND;
174 v0 ^= third;
175 POSTAMBLE
176 }
177 EXPORT_SYMBOL(siphash_3u64);
178
179 /**
180 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
181 * @first: first u64
182 * @second: second u64
183 * @third: third u64
184 * @forth: forth u64
185 * @key: the siphash key
186 */
siphash_4u64(const u64 first,const u64 second,const u64 third,const u64 forth,const siphash_key_t * key)187 u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
188 const u64 forth, const siphash_key_t *key)
189 {
190 PREAMBLE(32)
191 v3 ^= first;
192 SIPROUND;
193 SIPROUND;
194 v0 ^= first;
195 v3 ^= second;
196 SIPROUND;
197 SIPROUND;
198 v0 ^= second;
199 v3 ^= third;
200 SIPROUND;
201 SIPROUND;
202 v0 ^= third;
203 v3 ^= forth;
204 SIPROUND;
205 SIPROUND;
206 v0 ^= forth;
207 POSTAMBLE
208 }
209 EXPORT_SYMBOL(siphash_4u64);
210
siphash_1u32(const u32 first,const siphash_key_t * key)211 u64 siphash_1u32(const u32 first, const siphash_key_t *key)
212 {
213 PREAMBLE(4)
214 b |= first;
215 POSTAMBLE
216 }
217 EXPORT_SYMBOL(siphash_1u32);
218
siphash_3u32(const u32 first,const u32 second,const u32 third,const siphash_key_t * key)219 u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
220 const siphash_key_t *key)
221 {
222 u64 combined = (u64)second << 32 | first;
223 PREAMBLE(12)
224 v3 ^= combined;
225 SIPROUND;
226 SIPROUND;
227 v0 ^= combined;
228 b |= third;
229 POSTAMBLE
230 }
231 EXPORT_SYMBOL(siphash_3u32);
232
233 #if BITS_PER_LONG == 64
234 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
235 * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
236 */
237
238 #define HSIPROUND SIPROUND
239 #define HPREAMBLE(len) PREAMBLE(len)
240 #define HPOSTAMBLE \
241 v3 ^= b; \
242 HSIPROUND; \
243 v0 ^= b; \
244 v2 ^= 0xff; \
245 HSIPROUND; \
246 HSIPROUND; \
247 HSIPROUND; \
248 return (v0 ^ v1) ^ (v2 ^ v3);
249
250 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
__hsiphash_aligned(const void * _data,size_t len,const hsiphash_key_t * key)251 u32 __hsiphash_aligned(const void *_data, size_t len, const hsiphash_key_t *key)
252 {
253 const u8 *data = _data;
254 const u8 *end = data + len - (len % sizeof(u64));
255 const u8 left = len & (sizeof(u64) - 1);
256 u64 m;
257 HPREAMBLE(len)
258 for (; data != end; data += sizeof(u64)) {
259 m = le64_to_cpup(data);
260 v3 ^= m;
261 HSIPROUND;
262 v0 ^= m;
263 }
264 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
265 if (left)
266 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
267 bytemask_from_count(left)));
268 #else
269 switch (left) {
270 case 7: b |= ((u64)end[6]) << 48; fallthrough;
271 case 6: b |= ((u64)end[5]) << 40; fallthrough;
272 case 5: b |= ((u64)end[4]) << 32; fallthrough;
273 case 4: b |= le32_to_cpup(data); break;
274 case 3: b |= ((u64)end[2]) << 16; fallthrough;
275 case 2: b |= le16_to_cpup(data); break;
276 case 1: b |= end[0];
277 }
278 #endif
279 HPOSTAMBLE
280 }
281 EXPORT_SYMBOL(__hsiphash_aligned);
282 #endif
283
__hsiphash_unaligned(const void * _data,size_t len,const hsiphash_key_t * key)284 u32 __hsiphash_unaligned(const void *_data, size_t len,
285 const hsiphash_key_t *key)
286 {
287 const u8 *data = _data;
288 const u8 *end = data + len - (len % sizeof(u64));
289 const u8 left = len & (sizeof(u64) - 1);
290 u64 m;
291 HPREAMBLE(len)
292 for (; data != end; data += sizeof(u64)) {
293 m = get_unaligned_le64(data);
294 v3 ^= m;
295 HSIPROUND;
296 v0 ^= m;
297 }
298 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
299 if (left)
300 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
301 bytemask_from_count(left)));
302 #else
303 switch (left) {
304 case 7: b |= ((u64)end[6]) << 48; fallthrough;
305 case 6: b |= ((u64)end[5]) << 40; fallthrough;
306 case 5: b |= ((u64)end[4]) << 32; fallthrough;
307 case 4: b |= get_unaligned_le32(end); break;
308 case 3: b |= ((u64)end[2]) << 16; fallthrough;
309 case 2: b |= get_unaligned_le16(end); break;
310 case 1: b |= end[0];
311 }
312 #endif
313 HPOSTAMBLE
314 }
315 EXPORT_SYMBOL(__hsiphash_unaligned);
316
317 /**
318 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
319 * @first: first u32
320 * @key: the hsiphash key
321 */
hsiphash_1u32(const u32 first,const hsiphash_key_t * key)322 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
323 {
324 HPREAMBLE(4)
325 b |= first;
326 HPOSTAMBLE
327 }
328 EXPORT_SYMBOL(hsiphash_1u32);
329
330 /**
331 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
332 * @first: first u32
333 * @second: second u32
334 * @key: the hsiphash key
335 */
hsiphash_2u32(const u32 first,const u32 second,const hsiphash_key_t * key)336 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
337 {
338 u64 combined = (u64)second << 32 | first;
339 HPREAMBLE(8)
340 v3 ^= combined;
341 HSIPROUND;
342 v0 ^= combined;
343 HPOSTAMBLE
344 }
345 EXPORT_SYMBOL(hsiphash_2u32);
346
347 /**
348 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
349 * @first: first u32
350 * @second: second u32
351 * @third: third u32
352 * @key: the hsiphash key
353 */
hsiphash_3u32(const u32 first,const u32 second,const u32 third,const hsiphash_key_t * key)354 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
355 const hsiphash_key_t *key)
356 {
357 u64 combined = (u64)second << 32 | first;
358 HPREAMBLE(12)
359 v3 ^= combined;
360 HSIPROUND;
361 v0 ^= combined;
362 b |= third;
363 HPOSTAMBLE
364 }
365 EXPORT_SYMBOL(hsiphash_3u32);
366
367 /**
368 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
369 * @first: first u32
370 * @second: second u32
371 * @third: third u32
372 * @forth: forth u32
373 * @key: the hsiphash key
374 */
hsiphash_4u32(const u32 first,const u32 second,const u32 third,const u32 forth,const hsiphash_key_t * key)375 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
376 const u32 forth, const hsiphash_key_t *key)
377 {
378 u64 combined = (u64)second << 32 | first;
379 HPREAMBLE(16)
380 v3 ^= combined;
381 HSIPROUND;
382 v0 ^= combined;
383 combined = (u64)forth << 32 | third;
384 v3 ^= combined;
385 HSIPROUND;
386 v0 ^= combined;
387 HPOSTAMBLE
388 }
389 EXPORT_SYMBOL(hsiphash_4u32);
390 #else
391 #define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
392
393 #define HPREAMBLE(len) \
394 u32 v0 = HSIPHASH_CONST_0; \
395 u32 v1 = HSIPHASH_CONST_1; \
396 u32 v2 = HSIPHASH_CONST_2; \
397 u32 v3 = HSIPHASH_CONST_3; \
398 u32 b = ((u32)(len)) << 24; \
399 v3 ^= key->key[1]; \
400 v2 ^= key->key[0]; \
401 v1 ^= key->key[1]; \
402 v0 ^= key->key[0];
403
404 #define HPOSTAMBLE \
405 v3 ^= b; \
406 HSIPROUND; \
407 v0 ^= b; \
408 v2 ^= 0xff; \
409 HSIPROUND; \
410 HSIPROUND; \
411 HSIPROUND; \
412 return v1 ^ v3;
413
414 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
__hsiphash_aligned(const void * _data,size_t len,const hsiphash_key_t * key)415 u32 __hsiphash_aligned(const void *_data, size_t len, const hsiphash_key_t *key)
416 {
417 const u8 *data = _data;
418 const u8 *end = data + len - (len % sizeof(u32));
419 const u8 left = len & (sizeof(u32) - 1);
420 u32 m;
421 HPREAMBLE(len)
422 for (; data != end; data += sizeof(u32)) {
423 m = le32_to_cpup(data);
424 v3 ^= m;
425 HSIPROUND;
426 v0 ^= m;
427 }
428 switch (left) {
429 case 3: b |= ((u32)end[2]) << 16; fallthrough;
430 case 2: b |= le16_to_cpup(data); break;
431 case 1: b |= end[0];
432 }
433 HPOSTAMBLE
434 }
435 EXPORT_SYMBOL(__hsiphash_aligned);
436 #endif
437
__hsiphash_unaligned(const void * _data,size_t len,const hsiphash_key_t * key)438 u32 __hsiphash_unaligned(const void *_data, size_t len,
439 const hsiphash_key_t *key)
440 {
441 const u8 *data = _data;
442 const u8 *end = data + len - (len % sizeof(u32));
443 const u8 left = len & (sizeof(u32) - 1);
444 u32 m;
445 HPREAMBLE(len)
446 for (; data != end; data += sizeof(u32)) {
447 m = get_unaligned_le32(data);
448 v3 ^= m;
449 HSIPROUND;
450 v0 ^= m;
451 }
452 switch (left) {
453 case 3: b |= ((u32)end[2]) << 16; fallthrough;
454 case 2: b |= get_unaligned_le16(end); break;
455 case 1: b |= end[0];
456 }
457 HPOSTAMBLE
458 }
459 EXPORT_SYMBOL(__hsiphash_unaligned);
460
461 /**
462 * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
463 * @first: first u32
464 * @key: the hsiphash key
465 */
hsiphash_1u32(const u32 first,const hsiphash_key_t * key)466 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
467 {
468 HPREAMBLE(4)
469 v3 ^= first;
470 HSIPROUND;
471 v0 ^= first;
472 HPOSTAMBLE
473 }
474 EXPORT_SYMBOL(hsiphash_1u32);
475
476 /**
477 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
478 * @first: first u32
479 * @second: second u32
480 * @key: the hsiphash key
481 */
hsiphash_2u32(const u32 first,const u32 second,const hsiphash_key_t * key)482 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
483 {
484 HPREAMBLE(8)
485 v3 ^= first;
486 HSIPROUND;
487 v0 ^= first;
488 v3 ^= second;
489 HSIPROUND;
490 v0 ^= second;
491 HPOSTAMBLE
492 }
493 EXPORT_SYMBOL(hsiphash_2u32);
494
495 /**
496 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
497 * @first: first u32
498 * @second: second u32
499 * @third: third u32
500 * @key: the hsiphash key
501 */
hsiphash_3u32(const u32 first,const u32 second,const u32 third,const hsiphash_key_t * key)502 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
503 const hsiphash_key_t *key)
504 {
505 HPREAMBLE(12)
506 v3 ^= first;
507 HSIPROUND;
508 v0 ^= first;
509 v3 ^= second;
510 HSIPROUND;
511 v0 ^= second;
512 v3 ^= third;
513 HSIPROUND;
514 v0 ^= third;
515 HPOSTAMBLE
516 }
517 EXPORT_SYMBOL(hsiphash_3u32);
518
519 /**
520 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
521 * @first: first u32
522 * @second: second u32
523 * @third: third u32
524 * @forth: forth u32
525 * @key: the hsiphash key
526 */
hsiphash_4u32(const u32 first,const u32 second,const u32 third,const u32 forth,const hsiphash_key_t * key)527 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
528 const u32 forth, const hsiphash_key_t *key)
529 {
530 HPREAMBLE(16)
531 v3 ^= first;
532 HSIPROUND;
533 v0 ^= first;
534 v3 ^= second;
535 HSIPROUND;
536 v0 ^= second;
537 v3 ^= third;
538 HSIPROUND;
539 v0 ^= third;
540 v3 ^= forth;
541 HSIPROUND;
542 v0 ^= forth;
543 HPOSTAMBLE
544 }
545 EXPORT_SYMBOL(hsiphash_4u32);
546 #endif
547