1 /* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved. 2 * 3 * This file is provided under a dual BSD/GPLv2 license. 4 * 5 * SipHash: a fast short-input PRF 6 * https://131002.net/siphash/ 7 * 8 * This implementation is specifically for SipHash2-4 for a secure PRF 9 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for 10 * hashtables. 11 */ 12 13 #include <linux/siphash.h> 14 #include <asm/unaligned.h> 15 16 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 17 #include <linux/dcache.h> 18 #include <asm/word-at-a-time.h> 19 #endif 20 21 #define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3) 22 23 #define PREAMBLE(len) \ 24 u64 v0 = SIPHASH_CONST_0; \ 25 u64 v1 = SIPHASH_CONST_1; \ 26 u64 v2 = SIPHASH_CONST_2; \ 27 u64 v3 = SIPHASH_CONST_3; \ 28 u64 b = ((u64)(len)) << 56; \ 29 v3 ^= key->key[1]; \ 30 v2 ^= key->key[0]; \ 31 v1 ^= key->key[1]; \ 32 v0 ^= key->key[0]; 33 34 #define POSTAMBLE \ 35 v3 ^= b; \ 36 SIPROUND; \ 37 SIPROUND; \ 38 v0 ^= b; \ 39 v2 ^= 0xff; \ 40 SIPROUND; \ 41 SIPROUND; \ 42 SIPROUND; \ 43 SIPROUND; \ 44 return (v0 ^ v1) ^ (v2 ^ v3); 45 46 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS 47 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key) 48 { 49 const u8 *end = data + len - (len % sizeof(u64)); 50 const u8 left = len & (sizeof(u64) - 1); 51 u64 m; 52 PREAMBLE(len) 53 for (; data != end; data += sizeof(u64)) { 54 m = le64_to_cpup(data); 55 v3 ^= m; 56 SIPROUND; 57 SIPROUND; 58 v0 ^= m; 59 } 60 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 61 if (left) 62 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & 63 bytemask_from_count(left))); 64 #else 65 switch (left) { 66 case 7: b |= ((u64)end[6]) << 48; fallthrough; 67 case 6: b |= ((u64)end[5]) << 40; fallthrough; 68 case 5: b |= ((u64)end[4]) << 32; fallthrough; 69 case 4: b |= le32_to_cpup(data); break; 70 case 3: b |= ((u64)end[2]) << 16; fallthrough; 71 case 2: b |= le16_to_cpup(data); break; 72 case 1: b |= end[0]; 73 } 74 #endif 75 POSTAMBLE 76 } 77 EXPORT_SYMBOL(__siphash_aligned); 78 #endif 79 80 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key) 81 { 82 const u8 *end = data + len - (len % sizeof(u64)); 83 const u8 left = len & (sizeof(u64) - 1); 84 u64 m; 85 PREAMBLE(len) 86 for (; data != end; data += sizeof(u64)) { 87 m = get_unaligned_le64(data); 88 v3 ^= m; 89 SIPROUND; 90 SIPROUND; 91 v0 ^= m; 92 } 93 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 94 if (left) 95 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & 96 bytemask_from_count(left))); 97 #else 98 switch (left) { 99 case 7: b |= ((u64)end[6]) << 48; fallthrough; 100 case 6: b |= ((u64)end[5]) << 40; fallthrough; 101 case 5: b |= ((u64)end[4]) << 32; fallthrough; 102 case 4: b |= get_unaligned_le32(end); break; 103 case 3: b |= ((u64)end[2]) << 16; fallthrough; 104 case 2: b |= get_unaligned_le16(end); break; 105 case 1: b |= end[0]; 106 } 107 #endif 108 POSTAMBLE 109 } 110 EXPORT_SYMBOL(__siphash_unaligned); 111 112 /** 113 * siphash_1u64 - compute 64-bit siphash PRF value of a u64 114 * @first: first u64 115 * @key: the siphash key 116 */ 117 u64 siphash_1u64(const u64 first, const siphash_key_t *key) 118 { 119 PREAMBLE(8) 120 v3 ^= first; 121 SIPROUND; 122 SIPROUND; 123 v0 ^= first; 124 POSTAMBLE 125 } 126 EXPORT_SYMBOL(siphash_1u64); 127 128 /** 129 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64 130 * @first: first u64 131 * @second: second u64 132 * @key: the siphash key 133 */ 134 u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key) 135 { 136 PREAMBLE(16) 137 v3 ^= first; 138 SIPROUND; 139 SIPROUND; 140 v0 ^= first; 141 v3 ^= second; 142 SIPROUND; 143 SIPROUND; 144 v0 ^= second; 145 POSTAMBLE 146 } 147 EXPORT_SYMBOL(siphash_2u64); 148 149 /** 150 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64 151 * @first: first u64 152 * @second: second u64 153 * @third: third u64 154 * @key: the siphash key 155 */ 156 u64 siphash_3u64(const u64 first, const u64 second, const u64 third, 157 const siphash_key_t *key) 158 { 159 PREAMBLE(24) 160 v3 ^= first; 161 SIPROUND; 162 SIPROUND; 163 v0 ^= first; 164 v3 ^= second; 165 SIPROUND; 166 SIPROUND; 167 v0 ^= second; 168 v3 ^= third; 169 SIPROUND; 170 SIPROUND; 171 v0 ^= third; 172 POSTAMBLE 173 } 174 EXPORT_SYMBOL(siphash_3u64); 175 176 /** 177 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64 178 * @first: first u64 179 * @second: second u64 180 * @third: third u64 181 * @forth: forth u64 182 * @key: the siphash key 183 */ 184 u64 siphash_4u64(const u64 first, const u64 second, const u64 third, 185 const u64 forth, const siphash_key_t *key) 186 { 187 PREAMBLE(32) 188 v3 ^= first; 189 SIPROUND; 190 SIPROUND; 191 v0 ^= first; 192 v3 ^= second; 193 SIPROUND; 194 SIPROUND; 195 v0 ^= second; 196 v3 ^= third; 197 SIPROUND; 198 SIPROUND; 199 v0 ^= third; 200 v3 ^= forth; 201 SIPROUND; 202 SIPROUND; 203 v0 ^= forth; 204 POSTAMBLE 205 } 206 EXPORT_SYMBOL(siphash_4u64); 207 208 u64 siphash_1u32(const u32 first, const siphash_key_t *key) 209 { 210 PREAMBLE(4) 211 b |= first; 212 POSTAMBLE 213 } 214 EXPORT_SYMBOL(siphash_1u32); 215 216 u64 siphash_3u32(const u32 first, const u32 second, const u32 third, 217 const siphash_key_t *key) 218 { 219 u64 combined = (u64)second << 32 | first; 220 PREAMBLE(12) 221 v3 ^= combined; 222 SIPROUND; 223 SIPROUND; 224 v0 ^= combined; 225 b |= third; 226 POSTAMBLE 227 } 228 EXPORT_SYMBOL(siphash_3u32); 229 230 #if BITS_PER_LONG == 64 231 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for 232 * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3. 233 */ 234 235 #define HSIPROUND SIPROUND 236 #define HPREAMBLE(len) PREAMBLE(len) 237 #define HPOSTAMBLE \ 238 v3 ^= b; \ 239 HSIPROUND; \ 240 v0 ^= b; \ 241 v2 ^= 0xff; \ 242 HSIPROUND; \ 243 HSIPROUND; \ 244 HSIPROUND; \ 245 return (v0 ^ v1) ^ (v2 ^ v3); 246 247 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS 248 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key) 249 { 250 const u8 *end = data + len - (len % sizeof(u64)); 251 const u8 left = len & (sizeof(u64) - 1); 252 u64 m; 253 HPREAMBLE(len) 254 for (; data != end; data += sizeof(u64)) { 255 m = le64_to_cpup(data); 256 v3 ^= m; 257 HSIPROUND; 258 v0 ^= m; 259 } 260 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 261 if (left) 262 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & 263 bytemask_from_count(left))); 264 #else 265 switch (left) { 266 case 7: b |= ((u64)end[6]) << 48; fallthrough; 267 case 6: b |= ((u64)end[5]) << 40; fallthrough; 268 case 5: b |= ((u64)end[4]) << 32; fallthrough; 269 case 4: b |= le32_to_cpup(data); break; 270 case 3: b |= ((u64)end[2]) << 16; fallthrough; 271 case 2: b |= le16_to_cpup(data); break; 272 case 1: b |= end[0]; 273 } 274 #endif 275 HPOSTAMBLE 276 } 277 EXPORT_SYMBOL(__hsiphash_aligned); 278 #endif 279 280 u32 __hsiphash_unaligned(const void *data, size_t len, 281 const hsiphash_key_t *key) 282 { 283 const u8 *end = data + len - (len % sizeof(u64)); 284 const u8 left = len & (sizeof(u64) - 1); 285 u64 m; 286 HPREAMBLE(len) 287 for (; data != end; data += sizeof(u64)) { 288 m = get_unaligned_le64(data); 289 v3 ^= m; 290 HSIPROUND; 291 v0 ^= m; 292 } 293 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64 294 if (left) 295 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & 296 bytemask_from_count(left))); 297 #else 298 switch (left) { 299 case 7: b |= ((u64)end[6]) << 48; fallthrough; 300 case 6: b |= ((u64)end[5]) << 40; fallthrough; 301 case 5: b |= ((u64)end[4]) << 32; fallthrough; 302 case 4: b |= get_unaligned_le32(end); break; 303 case 3: b |= ((u64)end[2]) << 16; fallthrough; 304 case 2: b |= get_unaligned_le16(end); break; 305 case 1: b |= end[0]; 306 } 307 #endif 308 HPOSTAMBLE 309 } 310 EXPORT_SYMBOL(__hsiphash_unaligned); 311 312 /** 313 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32 314 * @first: first u32 315 * @key: the hsiphash key 316 */ 317 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key) 318 { 319 HPREAMBLE(4) 320 b |= first; 321 HPOSTAMBLE 322 } 323 EXPORT_SYMBOL(hsiphash_1u32); 324 325 /** 326 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32 327 * @first: first u32 328 * @second: second u32 329 * @key: the hsiphash key 330 */ 331 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key) 332 { 333 u64 combined = (u64)second << 32 | first; 334 HPREAMBLE(8) 335 v3 ^= combined; 336 HSIPROUND; 337 v0 ^= combined; 338 HPOSTAMBLE 339 } 340 EXPORT_SYMBOL(hsiphash_2u32); 341 342 /** 343 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32 344 * @first: first u32 345 * @second: second u32 346 * @third: third u32 347 * @key: the hsiphash key 348 */ 349 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third, 350 const hsiphash_key_t *key) 351 { 352 u64 combined = (u64)second << 32 | first; 353 HPREAMBLE(12) 354 v3 ^= combined; 355 HSIPROUND; 356 v0 ^= combined; 357 b |= third; 358 HPOSTAMBLE 359 } 360 EXPORT_SYMBOL(hsiphash_3u32); 361 362 /** 363 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32 364 * @first: first u32 365 * @second: second u32 366 * @third: third u32 367 * @forth: forth u32 368 * @key: the hsiphash key 369 */ 370 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third, 371 const u32 forth, const hsiphash_key_t *key) 372 { 373 u64 combined = (u64)second << 32 | first; 374 HPREAMBLE(16) 375 v3 ^= combined; 376 HSIPROUND; 377 v0 ^= combined; 378 combined = (u64)forth << 32 | third; 379 v3 ^= combined; 380 HSIPROUND; 381 v0 ^= combined; 382 HPOSTAMBLE 383 } 384 EXPORT_SYMBOL(hsiphash_4u32); 385 #else 386 #define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3) 387 388 #define HPREAMBLE(len) \ 389 u32 v0 = HSIPHASH_CONST_0; \ 390 u32 v1 = HSIPHASH_CONST_1; \ 391 u32 v2 = HSIPHASH_CONST_2; \ 392 u32 v3 = HSIPHASH_CONST_3; \ 393 u32 b = ((u32)(len)) << 24; \ 394 v3 ^= key->key[1]; \ 395 v2 ^= key->key[0]; \ 396 v1 ^= key->key[1]; \ 397 v0 ^= key->key[0]; 398 399 #define HPOSTAMBLE \ 400 v3 ^= b; \ 401 HSIPROUND; \ 402 v0 ^= b; \ 403 v2 ^= 0xff; \ 404 HSIPROUND; \ 405 HSIPROUND; \ 406 HSIPROUND; \ 407 return v1 ^ v3; 408 409 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS 410 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key) 411 { 412 const u8 *end = data + len - (len % sizeof(u32)); 413 const u8 left = len & (sizeof(u32) - 1); 414 u32 m; 415 HPREAMBLE(len) 416 for (; data != end; data += sizeof(u32)) { 417 m = le32_to_cpup(data); 418 v3 ^= m; 419 HSIPROUND; 420 v0 ^= m; 421 } 422 switch (left) { 423 case 3: b |= ((u32)end[2]) << 16; fallthrough; 424 case 2: b |= le16_to_cpup(data); break; 425 case 1: b |= end[0]; 426 } 427 HPOSTAMBLE 428 } 429 EXPORT_SYMBOL(__hsiphash_aligned); 430 #endif 431 432 u32 __hsiphash_unaligned(const void *data, size_t len, 433 const hsiphash_key_t *key) 434 { 435 const u8 *end = data + len - (len % sizeof(u32)); 436 const u8 left = len & (sizeof(u32) - 1); 437 u32 m; 438 HPREAMBLE(len) 439 for (; data != end; data += sizeof(u32)) { 440 m = get_unaligned_le32(data); 441 v3 ^= m; 442 HSIPROUND; 443 v0 ^= m; 444 } 445 switch (left) { 446 case 3: b |= ((u32)end[2]) << 16; fallthrough; 447 case 2: b |= get_unaligned_le16(end); break; 448 case 1: b |= end[0]; 449 } 450 HPOSTAMBLE 451 } 452 EXPORT_SYMBOL(__hsiphash_unaligned); 453 454 /** 455 * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32 456 * @first: first u32 457 * @key: the hsiphash key 458 */ 459 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key) 460 { 461 HPREAMBLE(4) 462 v3 ^= first; 463 HSIPROUND; 464 v0 ^= first; 465 HPOSTAMBLE 466 } 467 EXPORT_SYMBOL(hsiphash_1u32); 468 469 /** 470 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32 471 * @first: first u32 472 * @second: second u32 473 * @key: the hsiphash key 474 */ 475 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key) 476 { 477 HPREAMBLE(8) 478 v3 ^= first; 479 HSIPROUND; 480 v0 ^= first; 481 v3 ^= second; 482 HSIPROUND; 483 v0 ^= second; 484 HPOSTAMBLE 485 } 486 EXPORT_SYMBOL(hsiphash_2u32); 487 488 /** 489 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32 490 * @first: first u32 491 * @second: second u32 492 * @third: third u32 493 * @key: the hsiphash key 494 */ 495 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third, 496 const hsiphash_key_t *key) 497 { 498 HPREAMBLE(12) 499 v3 ^= first; 500 HSIPROUND; 501 v0 ^= first; 502 v3 ^= second; 503 HSIPROUND; 504 v0 ^= second; 505 v3 ^= third; 506 HSIPROUND; 507 v0 ^= third; 508 HPOSTAMBLE 509 } 510 EXPORT_SYMBOL(hsiphash_3u32); 511 512 /** 513 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32 514 * @first: first u32 515 * @second: second u32 516 * @third: third u32 517 * @forth: forth u32 518 * @key: the hsiphash key 519 */ 520 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third, 521 const u32 forth, const hsiphash_key_t *key) 522 { 523 HPREAMBLE(16) 524 v3 ^= first; 525 HSIPROUND; 526 v0 ^= first; 527 v3 ^= second; 528 HSIPROUND; 529 v0 ^= second; 530 v3 ^= third; 531 HSIPROUND; 532 v0 ^= third; 533 v3 ^= forth; 534 HSIPROUND; 535 v0 ^= forth; 536 HPOSTAMBLE 537 } 538 EXPORT_SYMBOL(hsiphash_4u32); 539 #endif 540