linux/lib/siphash.c
<<
>>
Prefs
   1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
   2/* Copyright (C) 2016-2022 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
   3 *
   4 * SipHash: a fast short-input PRF
   5 * https://131002.net/siphash/
   6 *
   7 * This implementation is specifically for SipHash2-4 for a secure PRF
   8 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
   9 * hashtables.
  10 */
  11
  12#include <linux/siphash.h>
  13#include <asm/unaligned.h>
  14
  15#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  16#include <linux/dcache.h>
  17#include <asm/word-at-a-time.h>
  18#endif
  19
  20#define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
  21
  22#define PREAMBLE(len) \
  23        u64 v0 = SIPHASH_CONST_0; \
  24        u64 v1 = SIPHASH_CONST_1; \
  25        u64 v2 = SIPHASH_CONST_2; \
  26        u64 v3 = SIPHASH_CONST_3; \
  27        u64 b = ((u64)(len)) << 56; \
  28        v3 ^= key->key[1]; \
  29        v2 ^= key->key[0]; \
  30        v1 ^= key->key[1]; \
  31        v0 ^= key->key[0];
  32
  33#define POSTAMBLE \
  34        v3 ^= b; \
  35        SIPROUND; \
  36        SIPROUND; \
  37        v0 ^= b; \
  38        v2 ^= 0xff; \
  39        SIPROUND; \
  40        SIPROUND; \
  41        SIPROUND; \
  42        SIPROUND; \
  43        return (v0 ^ v1) ^ (v2 ^ v3);
  44
  45#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  46u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
  47{
  48        const u8 *end = data + len - (len % sizeof(u64));
  49        const u8 left = len & (sizeof(u64) - 1);
  50        u64 m;
  51        PREAMBLE(len)
  52        for (; data != end; data += sizeof(u64)) {
  53                m = le64_to_cpup(data);
  54                v3 ^= m;
  55                SIPROUND;
  56                SIPROUND;
  57                v0 ^= m;
  58        }
  59#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  60        if (left)
  61                b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  62                                                  bytemask_from_count(left)));
  63#else
  64        switch (left) {
  65        case 7: b |= ((u64)end[6]) << 48; fallthrough;
  66        case 6: b |= ((u64)end[5]) << 40; fallthrough;
  67        case 5: b |= ((u64)end[4]) << 32; fallthrough;
  68        case 4: b |= le32_to_cpup(data); break;
  69        case 3: b |= ((u64)end[2]) << 16; fallthrough;
  70        case 2: b |= le16_to_cpup(data); break;
  71        case 1: b |= end[0];
  72        }
  73#endif
  74        POSTAMBLE
  75}
  76EXPORT_SYMBOL(__siphash_aligned);
  77#endif
  78
  79u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
  80{
  81        const u8 *end = data + len - (len % sizeof(u64));
  82        const u8 left = len & (sizeof(u64) - 1);
  83        u64 m;
  84        PREAMBLE(len)
  85        for (; data != end; data += sizeof(u64)) {
  86                m = get_unaligned_le64(data);
  87                v3 ^= m;
  88                SIPROUND;
  89                SIPROUND;
  90                v0 ^= m;
  91        }
  92#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  93        if (left)
  94                b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  95                                                  bytemask_from_count(left)));
  96#else
  97        switch (left) {
  98        case 7: b |= ((u64)end[6]) << 48; fallthrough;
  99        case 6: b |= ((u64)end[5]) << 40; fallthrough;
 100        case 5: b |= ((u64)end[4]) << 32; fallthrough;
 101        case 4: b |= get_unaligned_le32(end); break;
 102        case 3: b |= ((u64)end[2]) << 16; fallthrough;
 103        case 2: b |= get_unaligned_le16(end); break;
 104        case 1: b |= end[0];
 105        }
 106#endif
 107        POSTAMBLE
 108}
 109EXPORT_SYMBOL(__siphash_unaligned);
 110
 111/**
 112 * siphash_1u64 - compute 64-bit siphash PRF value of a u64
 113 * @first: first u64
 114 * @key: the siphash key
 115 */
 116u64 siphash_1u64(const u64 first, const siphash_key_t *key)
 117{
 118        PREAMBLE(8)
 119        v3 ^= first;
 120        SIPROUND;
 121        SIPROUND;
 122        v0 ^= first;
 123        POSTAMBLE
 124}
 125EXPORT_SYMBOL(siphash_1u64);
 126
 127/**
 128 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
 129 * @first: first u64
 130 * @second: second u64
 131 * @key: the siphash key
 132 */
 133u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
 134{
 135        PREAMBLE(16)
 136        v3 ^= first;
 137        SIPROUND;
 138        SIPROUND;
 139        v0 ^= first;
 140        v3 ^= second;
 141        SIPROUND;
 142        SIPROUND;
 143        v0 ^= second;
 144        POSTAMBLE
 145}
 146EXPORT_SYMBOL(siphash_2u64);
 147
 148/**
 149 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
 150 * @first: first u64
 151 * @second: second u64
 152 * @third: third u64
 153 * @key: the siphash key
 154 */
 155u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
 156                 const siphash_key_t *key)
 157{
 158        PREAMBLE(24)
 159        v3 ^= first;
 160        SIPROUND;
 161        SIPROUND;
 162        v0 ^= first;
 163        v3 ^= second;
 164        SIPROUND;
 165        SIPROUND;
 166        v0 ^= second;
 167        v3 ^= third;
 168        SIPROUND;
 169        SIPROUND;
 170        v0 ^= third;
 171        POSTAMBLE
 172}
 173EXPORT_SYMBOL(siphash_3u64);
 174
 175/**
 176 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
 177 * @first: first u64
 178 * @second: second u64
 179 * @third: third u64
 180 * @forth: forth u64
 181 * @key: the siphash key
 182 */
 183u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
 184                 const u64 forth, const siphash_key_t *key)
 185{
 186        PREAMBLE(32)
 187        v3 ^= first;
 188        SIPROUND;
 189        SIPROUND;
 190        v0 ^= first;
 191        v3 ^= second;
 192        SIPROUND;
 193        SIPROUND;
 194        v0 ^= second;
 195        v3 ^= third;
 196        SIPROUND;
 197        SIPROUND;
 198        v0 ^= third;
 199        v3 ^= forth;
 200        SIPROUND;
 201        SIPROUND;
 202        v0 ^= forth;
 203        POSTAMBLE
 204}
 205EXPORT_SYMBOL(siphash_4u64);
 206
 207u64 siphash_1u32(const u32 first, const siphash_key_t *key)
 208{
 209        PREAMBLE(4)
 210        b |= first;
 211        POSTAMBLE
 212}
 213EXPORT_SYMBOL(siphash_1u32);
 214
 215u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
 216                 const siphash_key_t *key)
 217{
 218        u64 combined = (u64)second << 32 | first;
 219        PREAMBLE(12)
 220        v3 ^= combined;
 221        SIPROUND;
 222        SIPROUND;
 223        v0 ^= combined;
 224        b |= third;
 225        POSTAMBLE
 226}
 227EXPORT_SYMBOL(siphash_3u32);
 228
 229#if BITS_PER_LONG == 64
 230/* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
 231 * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
 232 */
 233
 234#define HSIPROUND SIPROUND
 235#define HPREAMBLE(len) PREAMBLE(len)
 236#define HPOSTAMBLE \
 237        v3 ^= b; \
 238        HSIPROUND; \
 239        v0 ^= b; \
 240        v2 ^= 0xff; \
 241        HSIPROUND; \
 242        HSIPROUND; \
 243        HSIPROUND; \
 244        return (v0 ^ v1) ^ (v2 ^ v3);
 245
 246#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
 247u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
 248{
 249        const u8 *end = data + len - (len % sizeof(u64));
 250        const u8 left = len & (sizeof(u64) - 1);
 251        u64 m;
 252        HPREAMBLE(len)
 253        for (; data != end; data += sizeof(u64)) {
 254                m = le64_to_cpup(data);
 255                v3 ^= m;
 256                HSIPROUND;
 257                v0 ^= m;
 258        }
 259#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
 260        if (left)
 261                b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
 262                                                  bytemask_from_count(left)));
 263#else
 264        switch (left) {
 265        case 7: b |= ((u64)end[6]) << 48; fallthrough;
 266        case 6: b |= ((u64)end[5]) << 40; fallthrough;
 267        case 5: b |= ((u64)end[4]) << 32; fallthrough;
 268        case 4: b |= le32_to_cpup(data); break;
 269        case 3: b |= ((u64)end[2]) << 16; fallthrough;
 270        case 2: b |= le16_to_cpup(data); break;
 271        case 1: b |= end[0];
 272        }
 273#endif
 274        HPOSTAMBLE
 275}
 276EXPORT_SYMBOL(__hsiphash_aligned);
 277#endif
 278
 279u32 __hsiphash_unaligned(const void *data, size_t len,
 280                         const hsiphash_key_t *key)
 281{
 282        const u8 *end = data + len - (len % sizeof(u64));
 283        const u8 left = len & (sizeof(u64) - 1);
 284        u64 m;
 285        HPREAMBLE(len)
 286        for (; data != end; data += sizeof(u64)) {
 287                m = get_unaligned_le64(data);
 288                v3 ^= m;
 289                HSIPROUND;
 290                v0 ^= m;
 291        }
 292#if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
 293        if (left)
 294                b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
 295                                                  bytemask_from_count(left)));
 296#else
 297        switch (left) {
 298        case 7: b |= ((u64)end[6]) << 48; fallthrough;
 299        case 6: b |= ((u64)end[5]) << 40; fallthrough;
 300        case 5: b |= ((u64)end[4]) << 32; fallthrough;
 301        case 4: b |= get_unaligned_le32(end); break;
 302        case 3: b |= ((u64)end[2]) << 16; fallthrough;
 303        case 2: b |= get_unaligned_le16(end); break;
 304        case 1: b |= end[0];
 305        }
 306#endif
 307        HPOSTAMBLE
 308}
 309EXPORT_SYMBOL(__hsiphash_unaligned);
 310
 311/**
 312 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
 313 * @first: first u32
 314 * @key: the hsiphash key
 315 */
 316u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
 317{
 318        HPREAMBLE(4)
 319        b |= first;
 320        HPOSTAMBLE
 321}
 322EXPORT_SYMBOL(hsiphash_1u32);
 323
 324/**
 325 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
 326 * @first: first u32
 327 * @second: second u32
 328 * @key: the hsiphash key
 329 */
 330u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
 331{
 332        u64 combined = (u64)second << 32 | first;
 333        HPREAMBLE(8)
 334        v3 ^= combined;
 335        HSIPROUND;
 336        v0 ^= combined;
 337        HPOSTAMBLE
 338}
 339EXPORT_SYMBOL(hsiphash_2u32);
 340
 341/**
 342 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
 343 * @first: first u32
 344 * @second: second u32
 345 * @third: third u32
 346 * @key: the hsiphash key
 347 */
 348u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
 349                  const hsiphash_key_t *key)
 350{
 351        u64 combined = (u64)second << 32 | first;
 352        HPREAMBLE(12)
 353        v3 ^= combined;
 354        HSIPROUND;
 355        v0 ^= combined;
 356        b |= third;
 357        HPOSTAMBLE
 358}
 359EXPORT_SYMBOL(hsiphash_3u32);
 360
 361/**
 362 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
 363 * @first: first u32
 364 * @second: second u32
 365 * @third: third u32
 366 * @forth: forth u32
 367 * @key: the hsiphash key
 368 */
 369u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
 370                  const u32 forth, const hsiphash_key_t *key)
 371{
 372        u64 combined = (u64)second << 32 | first;
 373        HPREAMBLE(16)
 374        v3 ^= combined;
 375        HSIPROUND;
 376        v0 ^= combined;
 377        combined = (u64)forth << 32 | third;
 378        v3 ^= combined;
 379        HSIPROUND;
 380        v0 ^= combined;
 381        HPOSTAMBLE
 382}
 383EXPORT_SYMBOL(hsiphash_4u32);
 384#else
 385#define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
 386
 387#define HPREAMBLE(len) \
 388        u32 v0 = HSIPHASH_CONST_0; \
 389        u32 v1 = HSIPHASH_CONST_1; \
 390        u32 v2 = HSIPHASH_CONST_2; \
 391        u32 v3 = HSIPHASH_CONST_3; \
 392        u32 b = ((u32)(len)) << 24; \
 393        v3 ^= key->key[1]; \
 394        v2 ^= key->key[0]; \
 395        v1 ^= key->key[1]; \
 396        v0 ^= key->key[0];
 397
 398#define HPOSTAMBLE \
 399        v3 ^= b; \
 400        HSIPROUND; \
 401        v0 ^= b; \
 402        v2 ^= 0xff; \
 403        HSIPROUND; \
 404        HSIPROUND; \
 405        HSIPROUND; \
 406        return v1 ^ v3;
 407
 408#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
 409u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
 410{
 411        const u8 *end = data + len - (len % sizeof(u32));
 412        const u8 left = len & (sizeof(u32) - 1);
 413        u32 m;
 414        HPREAMBLE(len)
 415        for (; data != end; data += sizeof(u32)) {
 416                m = le32_to_cpup(data);
 417                v3 ^= m;
 418                HSIPROUND;
 419                v0 ^= m;
 420        }
 421        switch (left) {
 422        case 3: b |= ((u32)end[2]) << 16; fallthrough;
 423        case 2: b |= le16_to_cpup(data); break;
 424        case 1: b |= end[0];
 425        }
 426        HPOSTAMBLE
 427}
 428EXPORT_SYMBOL(__hsiphash_aligned);
 429#endif
 430
 431u32 __hsiphash_unaligned(const void *data, size_t len,
 432                         const hsiphash_key_t *key)
 433{
 434        const u8 *end = data + len - (len % sizeof(u32));
 435        const u8 left = len & (sizeof(u32) - 1);
 436        u32 m;
 437        HPREAMBLE(len)
 438        for (; data != end; data += sizeof(u32)) {
 439                m = get_unaligned_le32(data);
 440                v3 ^= m;
 441                HSIPROUND;
 442                v0 ^= m;
 443        }
 444        switch (left) {
 445        case 3: b |= ((u32)end[2]) << 16; fallthrough;
 446        case 2: b |= get_unaligned_le16(end); break;
 447        case 1: b |= end[0];
 448        }
 449        HPOSTAMBLE
 450}
 451EXPORT_SYMBOL(__hsiphash_unaligned);
 452
 453/**
 454 * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
 455 * @first: first u32
 456 * @key: the hsiphash key
 457 */
 458u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
 459{
 460        HPREAMBLE(4)
 461        v3 ^= first;
 462        HSIPROUND;
 463        v0 ^= first;
 464        HPOSTAMBLE
 465}
 466EXPORT_SYMBOL(hsiphash_1u32);
 467
 468/**
 469 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
 470 * @first: first u32
 471 * @second: second u32
 472 * @key: the hsiphash key
 473 */
 474u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
 475{
 476        HPREAMBLE(8)
 477        v3 ^= first;
 478        HSIPROUND;
 479        v0 ^= first;
 480        v3 ^= second;
 481        HSIPROUND;
 482        v0 ^= second;
 483        HPOSTAMBLE
 484}
 485EXPORT_SYMBOL(hsiphash_2u32);
 486
 487/**
 488 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
 489 * @first: first u32
 490 * @second: second u32
 491 * @third: third u32
 492 * @key: the hsiphash key
 493 */
 494u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
 495                  const hsiphash_key_t *key)
 496{
 497        HPREAMBLE(12)
 498        v3 ^= first;
 499        HSIPROUND;
 500        v0 ^= first;
 501        v3 ^= second;
 502        HSIPROUND;
 503        v0 ^= second;
 504        v3 ^= third;
 505        HSIPROUND;
 506        v0 ^= third;
 507        HPOSTAMBLE
 508}
 509EXPORT_SYMBOL(hsiphash_3u32);
 510
 511/**
 512 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
 513 * @first: first u32
 514 * @second: second u32
 515 * @third: third u32
 516 * @forth: forth u32
 517 * @key: the hsiphash key
 518 */
 519u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
 520                  const u32 forth, const hsiphash_key_t *key)
 521{
 522        HPREAMBLE(16)
 523        v3 ^= first;
 524        HSIPROUND;
 525        v0 ^= first;
 526        v3 ^= second;
 527        HSIPROUND;
 528        v0 ^= second;
 529        v3 ^= third;
 530        HSIPROUND;
 531        v0 ^= third;
 532        v3 ^= forth;
 533        HSIPROUND;
 534        v0 ^= forth;
 535        HPOSTAMBLE
 536}
 537EXPORT_SYMBOL(hsiphash_4u32);
 538#endif
 539