0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <linux/siphash.h>
0013 #include <asm/unaligned.h>
0014
0015 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
0016 #include <linux/dcache.h>
0017 #include <asm/word-at-a-time.h>
0018 #endif
0019
0020 #define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
0021
0022 #define PREAMBLE(len) \
0023 u64 v0 = SIPHASH_CONST_0; \
0024 u64 v1 = SIPHASH_CONST_1; \
0025 u64 v2 = SIPHASH_CONST_2; \
0026 u64 v3 = SIPHASH_CONST_3; \
0027 u64 b = ((u64)(len)) << 56; \
0028 v3 ^= key->key[1]; \
0029 v2 ^= key->key[0]; \
0030 v1 ^= key->key[1]; \
0031 v0 ^= key->key[0];
0032
0033 #define POSTAMBLE \
0034 v3 ^= b; \
0035 SIPROUND; \
0036 SIPROUND; \
0037 v0 ^= b; \
0038 v2 ^= 0xff; \
0039 SIPROUND; \
0040 SIPROUND; \
0041 SIPROUND; \
0042 SIPROUND; \
0043 return (v0 ^ v1) ^ (v2 ^ v3);
0044
0045 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
0046 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
0047 {
0048 const u8 *end = data + len - (len % sizeof(u64));
0049 const u8 left = len & (sizeof(u64) - 1);
0050 u64 m;
0051 PREAMBLE(len)
0052 for (; data != end; data += sizeof(u64)) {
0053 m = le64_to_cpup(data);
0054 v3 ^= m;
0055 SIPROUND;
0056 SIPROUND;
0057 v0 ^= m;
0058 }
0059 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
0060 if (left)
0061 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
0062 bytemask_from_count(left)));
0063 #else
0064 switch (left) {
0065 case 7: b |= ((u64)end[6]) << 48; fallthrough;
0066 case 6: b |= ((u64)end[5]) << 40; fallthrough;
0067 case 5: b |= ((u64)end[4]) << 32; fallthrough;
0068 case 4: b |= le32_to_cpup(data); break;
0069 case 3: b |= ((u64)end[2]) << 16; fallthrough;
0070 case 2: b |= le16_to_cpup(data); break;
0071 case 1: b |= end[0];
0072 }
0073 #endif
0074 POSTAMBLE
0075 }
0076 EXPORT_SYMBOL(__siphash_aligned);
0077 #endif
0078
0079 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
0080 {
0081 const u8 *end = data + len - (len % sizeof(u64));
0082 const u8 left = len & (sizeof(u64) - 1);
0083 u64 m;
0084 PREAMBLE(len)
0085 for (; data != end; data += sizeof(u64)) {
0086 m = get_unaligned_le64(data);
0087 v3 ^= m;
0088 SIPROUND;
0089 SIPROUND;
0090 v0 ^= m;
0091 }
0092 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
0093 if (left)
0094 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
0095 bytemask_from_count(left)));
0096 #else
0097 switch (left) {
0098 case 7: b |= ((u64)end[6]) << 48; fallthrough;
0099 case 6: b |= ((u64)end[5]) << 40; fallthrough;
0100 case 5: b |= ((u64)end[4]) << 32; fallthrough;
0101 case 4: b |= get_unaligned_le32(end); break;
0102 case 3: b |= ((u64)end[2]) << 16; fallthrough;
0103 case 2: b |= get_unaligned_le16(end); break;
0104 case 1: b |= end[0];
0105 }
0106 #endif
0107 POSTAMBLE
0108 }
0109 EXPORT_SYMBOL(__siphash_unaligned);
0110
0111
0112
0113
0114
0115
0116 u64 siphash_1u64(const u64 first, const siphash_key_t *key)
0117 {
0118 PREAMBLE(8)
0119 v3 ^= first;
0120 SIPROUND;
0121 SIPROUND;
0122 v0 ^= first;
0123 POSTAMBLE
0124 }
0125 EXPORT_SYMBOL(siphash_1u64);
0126
0127
0128
0129
0130
0131
0132
0133 u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
0134 {
0135 PREAMBLE(16)
0136 v3 ^= first;
0137 SIPROUND;
0138 SIPROUND;
0139 v0 ^= first;
0140 v3 ^= second;
0141 SIPROUND;
0142 SIPROUND;
0143 v0 ^= second;
0144 POSTAMBLE
0145 }
0146 EXPORT_SYMBOL(siphash_2u64);
0147
0148
0149
0150
0151
0152
0153
0154
0155 u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
0156 const siphash_key_t *key)
0157 {
0158 PREAMBLE(24)
0159 v3 ^= first;
0160 SIPROUND;
0161 SIPROUND;
0162 v0 ^= first;
0163 v3 ^= second;
0164 SIPROUND;
0165 SIPROUND;
0166 v0 ^= second;
0167 v3 ^= third;
0168 SIPROUND;
0169 SIPROUND;
0170 v0 ^= third;
0171 POSTAMBLE
0172 }
0173 EXPORT_SYMBOL(siphash_3u64);
0174
0175
0176
0177
0178
0179
0180
0181
0182
0183 u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
0184 const u64 forth, const siphash_key_t *key)
0185 {
0186 PREAMBLE(32)
0187 v3 ^= first;
0188 SIPROUND;
0189 SIPROUND;
0190 v0 ^= first;
0191 v3 ^= second;
0192 SIPROUND;
0193 SIPROUND;
0194 v0 ^= second;
0195 v3 ^= third;
0196 SIPROUND;
0197 SIPROUND;
0198 v0 ^= third;
0199 v3 ^= forth;
0200 SIPROUND;
0201 SIPROUND;
0202 v0 ^= forth;
0203 POSTAMBLE
0204 }
0205 EXPORT_SYMBOL(siphash_4u64);
0206
0207 u64 siphash_1u32(const u32 first, const siphash_key_t *key)
0208 {
0209 PREAMBLE(4)
0210 b |= first;
0211 POSTAMBLE
0212 }
0213 EXPORT_SYMBOL(siphash_1u32);
0214
0215 u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
0216 const siphash_key_t *key)
0217 {
0218 u64 combined = (u64)second << 32 | first;
0219 PREAMBLE(12)
0220 v3 ^= combined;
0221 SIPROUND;
0222 SIPROUND;
0223 v0 ^= combined;
0224 b |= third;
0225 POSTAMBLE
0226 }
0227 EXPORT_SYMBOL(siphash_3u32);
0228
0229 #if BITS_PER_LONG == 64
0230
0231
0232
0233
0234 #define HSIPROUND SIPROUND
0235 #define HPREAMBLE(len) PREAMBLE(len)
0236 #define HPOSTAMBLE \
0237 v3 ^= b; \
0238 HSIPROUND; \
0239 v0 ^= b; \
0240 v2 ^= 0xff; \
0241 HSIPROUND; \
0242 HSIPROUND; \
0243 HSIPROUND; \
0244 return (v0 ^ v1) ^ (v2 ^ v3);
0245
0246 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
0247 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
0248 {
0249 const u8 *end = data + len - (len % sizeof(u64));
0250 const u8 left = len & (sizeof(u64) - 1);
0251 u64 m;
0252 HPREAMBLE(len)
0253 for (; data != end; data += sizeof(u64)) {
0254 m = le64_to_cpup(data);
0255 v3 ^= m;
0256 HSIPROUND;
0257 v0 ^= m;
0258 }
0259 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
0260 if (left)
0261 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
0262 bytemask_from_count(left)));
0263 #else
0264 switch (left) {
0265 case 7: b |= ((u64)end[6]) << 48; fallthrough;
0266 case 6: b |= ((u64)end[5]) << 40; fallthrough;
0267 case 5: b |= ((u64)end[4]) << 32; fallthrough;
0268 case 4: b |= le32_to_cpup(data); break;
0269 case 3: b |= ((u64)end[2]) << 16; fallthrough;
0270 case 2: b |= le16_to_cpup(data); break;
0271 case 1: b |= end[0];
0272 }
0273 #endif
0274 HPOSTAMBLE
0275 }
0276 EXPORT_SYMBOL(__hsiphash_aligned);
0277 #endif
0278
0279 u32 __hsiphash_unaligned(const void *data, size_t len,
0280 const hsiphash_key_t *key)
0281 {
0282 const u8 *end = data + len - (len % sizeof(u64));
0283 const u8 left = len & (sizeof(u64) - 1);
0284 u64 m;
0285 HPREAMBLE(len)
0286 for (; data != end; data += sizeof(u64)) {
0287 m = get_unaligned_le64(data);
0288 v3 ^= m;
0289 HSIPROUND;
0290 v0 ^= m;
0291 }
0292 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
0293 if (left)
0294 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
0295 bytemask_from_count(left)));
0296 #else
0297 switch (left) {
0298 case 7: b |= ((u64)end[6]) << 48; fallthrough;
0299 case 6: b |= ((u64)end[5]) << 40; fallthrough;
0300 case 5: b |= ((u64)end[4]) << 32; fallthrough;
0301 case 4: b |= get_unaligned_le32(end); break;
0302 case 3: b |= ((u64)end[2]) << 16; fallthrough;
0303 case 2: b |= get_unaligned_le16(end); break;
0304 case 1: b |= end[0];
0305 }
0306 #endif
0307 HPOSTAMBLE
0308 }
0309 EXPORT_SYMBOL(__hsiphash_unaligned);
0310
0311
0312
0313
0314
0315
0316 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
0317 {
0318 HPREAMBLE(4)
0319 b |= first;
0320 HPOSTAMBLE
0321 }
0322 EXPORT_SYMBOL(hsiphash_1u32);
0323
0324
0325
0326
0327
0328
0329
0330 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
0331 {
0332 u64 combined = (u64)second << 32 | first;
0333 HPREAMBLE(8)
0334 v3 ^= combined;
0335 HSIPROUND;
0336 v0 ^= combined;
0337 HPOSTAMBLE
0338 }
0339 EXPORT_SYMBOL(hsiphash_2u32);
0340
0341
0342
0343
0344
0345
0346
0347
0348 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
0349 const hsiphash_key_t *key)
0350 {
0351 u64 combined = (u64)second << 32 | first;
0352 HPREAMBLE(12)
0353 v3 ^= combined;
0354 HSIPROUND;
0355 v0 ^= combined;
0356 b |= third;
0357 HPOSTAMBLE
0358 }
0359 EXPORT_SYMBOL(hsiphash_3u32);
0360
0361
0362
0363
0364
0365
0366
0367
0368
0369 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
0370 const u32 forth, const hsiphash_key_t *key)
0371 {
0372 u64 combined = (u64)second << 32 | first;
0373 HPREAMBLE(16)
0374 v3 ^= combined;
0375 HSIPROUND;
0376 v0 ^= combined;
0377 combined = (u64)forth << 32 | third;
0378 v3 ^= combined;
0379 HSIPROUND;
0380 v0 ^= combined;
0381 HPOSTAMBLE
0382 }
0383 EXPORT_SYMBOL(hsiphash_4u32);
0384 #else
0385 #define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
0386
0387 #define HPREAMBLE(len) \
0388 u32 v0 = HSIPHASH_CONST_0; \
0389 u32 v1 = HSIPHASH_CONST_1; \
0390 u32 v2 = HSIPHASH_CONST_2; \
0391 u32 v3 = HSIPHASH_CONST_3; \
0392 u32 b = ((u32)(len)) << 24; \
0393 v3 ^= key->key[1]; \
0394 v2 ^= key->key[0]; \
0395 v1 ^= key->key[1]; \
0396 v0 ^= key->key[0];
0397
0398 #define HPOSTAMBLE \
0399 v3 ^= b; \
0400 HSIPROUND; \
0401 v0 ^= b; \
0402 v2 ^= 0xff; \
0403 HSIPROUND; \
0404 HSIPROUND; \
0405 HSIPROUND; \
0406 return v1 ^ v3;
0407
0408 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
0409 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
0410 {
0411 const u8 *end = data + len - (len % sizeof(u32));
0412 const u8 left = len & (sizeof(u32) - 1);
0413 u32 m;
0414 HPREAMBLE(len)
0415 for (; data != end; data += sizeof(u32)) {
0416 m = le32_to_cpup(data);
0417 v3 ^= m;
0418 HSIPROUND;
0419 v0 ^= m;
0420 }
0421 switch (left) {
0422 case 3: b |= ((u32)end[2]) << 16; fallthrough;
0423 case 2: b |= le16_to_cpup(data); break;
0424 case 1: b |= end[0];
0425 }
0426 HPOSTAMBLE
0427 }
0428 EXPORT_SYMBOL(__hsiphash_aligned);
0429 #endif
0430
0431 u32 __hsiphash_unaligned(const void *data, size_t len,
0432 const hsiphash_key_t *key)
0433 {
0434 const u8 *end = data + len - (len % sizeof(u32));
0435 const u8 left = len & (sizeof(u32) - 1);
0436 u32 m;
0437 HPREAMBLE(len)
0438 for (; data != end; data += sizeof(u32)) {
0439 m = get_unaligned_le32(data);
0440 v3 ^= m;
0441 HSIPROUND;
0442 v0 ^= m;
0443 }
0444 switch (left) {
0445 case 3: b |= ((u32)end[2]) << 16; fallthrough;
0446 case 2: b |= get_unaligned_le16(end); break;
0447 case 1: b |= end[0];
0448 }
0449 HPOSTAMBLE
0450 }
0451 EXPORT_SYMBOL(__hsiphash_unaligned);
0452
0453
0454
0455
0456
0457
0458 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
0459 {
0460 HPREAMBLE(4)
0461 v3 ^= first;
0462 HSIPROUND;
0463 v0 ^= first;
0464 HPOSTAMBLE
0465 }
0466 EXPORT_SYMBOL(hsiphash_1u32);
0467
0468
0469
0470
0471
0472
0473
0474 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
0475 {
0476 HPREAMBLE(8)
0477 v3 ^= first;
0478 HSIPROUND;
0479 v0 ^= first;
0480 v3 ^= second;
0481 HSIPROUND;
0482 v0 ^= second;
0483 HPOSTAMBLE
0484 }
0485 EXPORT_SYMBOL(hsiphash_2u32);
0486
0487
0488
0489
0490
0491
0492
0493
0494 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
0495 const hsiphash_key_t *key)
0496 {
0497 HPREAMBLE(12)
0498 v3 ^= first;
0499 HSIPROUND;
0500 v0 ^= first;
0501 v3 ^= second;
0502 HSIPROUND;
0503 v0 ^= second;
0504 v3 ^= third;
0505 HSIPROUND;
0506 v0 ^= third;
0507 HPOSTAMBLE
0508 }
0509 EXPORT_SYMBOL(hsiphash_3u32);
0510
0511
0512
0513
0514
0515
0516
0517
0518
0519 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
0520 const u32 forth, const hsiphash_key_t *key)
0521 {
0522 HPREAMBLE(16)
0523 v3 ^= first;
0524 HSIPROUND;
0525 v0 ^= first;
0526 v3 ^= second;
0527 HSIPROUND;
0528 v0 ^= second;
0529 v3 ^= third;
0530 HSIPROUND;
0531 v0 ^= third;
0532 v3 ^= forth;
0533 HSIPROUND;
0534 v0 ^= forth;
0535 HPOSTAMBLE
0536 }
0537 EXPORT_SYMBOL(hsiphash_4u32);
0538 #endif