0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #ifndef _LINUX_SIPHASH_H
0013 #define _LINUX_SIPHASH_H
0014
0015 #include <linux/types.h>
0016 #include <linux/kernel.h>
0017
0018 #define SIPHASH_ALIGNMENT __alignof__(u64)
0019 typedef struct {
0020 u64 key[2];
0021 } siphash_key_t;
0022
0023 #define siphash_aligned_key_t siphash_key_t __aligned(16)
0024
0025 static inline bool siphash_key_is_zero(const siphash_key_t *key)
0026 {
0027 return !(key->key[0] | key->key[1]);
0028 }
0029
0030 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key);
0031 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key);
0032
0033 u64 siphash_1u64(const u64 a, const siphash_key_t *key);
0034 u64 siphash_2u64(const u64 a, const u64 b, const siphash_key_t *key);
0035 u64 siphash_3u64(const u64 a, const u64 b, const u64 c,
0036 const siphash_key_t *key);
0037 u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
0038 const siphash_key_t *key);
0039 u64 siphash_1u32(const u32 a, const siphash_key_t *key);
0040 u64 siphash_3u32(const u32 a, const u32 b, const u32 c,
0041 const siphash_key_t *key);
0042
0043 static inline u64 siphash_2u32(const u32 a, const u32 b,
0044 const siphash_key_t *key)
0045 {
0046 return siphash_1u64((u64)b << 32 | a, key);
0047 }
0048 static inline u64 siphash_4u32(const u32 a, const u32 b, const u32 c,
0049 const u32 d, const siphash_key_t *key)
0050 {
0051 return siphash_2u64((u64)b << 32 | a, (u64)d << 32 | c, key);
0052 }
0053
0054
0055 static inline u64 ___siphash_aligned(const __le64 *data, size_t len,
0056 const siphash_key_t *key)
0057 {
0058 if (__builtin_constant_p(len) && len == 4)
0059 return siphash_1u32(le32_to_cpup((const __le32 *)data), key);
0060 if (__builtin_constant_p(len) && len == 8)
0061 return siphash_1u64(le64_to_cpu(data[0]), key);
0062 if (__builtin_constant_p(len) && len == 16)
0063 return siphash_2u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
0064 key);
0065 if (__builtin_constant_p(len) && len == 24)
0066 return siphash_3u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
0067 le64_to_cpu(data[2]), key);
0068 if (__builtin_constant_p(len) && len == 32)
0069 return siphash_4u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
0070 le64_to_cpu(data[2]), le64_to_cpu(data[3]),
0071 key);
0072 return __siphash_aligned(data, len, key);
0073 }
0074
0075
0076
0077
0078
0079
0080
0081 static inline u64 siphash(const void *data, size_t len,
0082 const siphash_key_t *key)
0083 {
0084 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
0085 !IS_ALIGNED((unsigned long)data, SIPHASH_ALIGNMENT))
0086 return __siphash_unaligned(data, len, key);
0087 return ___siphash_aligned(data, len, key);
0088 }
0089
0090 #define HSIPHASH_ALIGNMENT __alignof__(unsigned long)
0091 typedef struct {
0092 unsigned long key[2];
0093 } hsiphash_key_t;
0094
0095 u32 __hsiphash_aligned(const void *data, size_t len,
0096 const hsiphash_key_t *key);
0097 u32 __hsiphash_unaligned(const void *data, size_t len,
0098 const hsiphash_key_t *key);
0099
0100 u32 hsiphash_1u32(const u32 a, const hsiphash_key_t *key);
0101 u32 hsiphash_2u32(const u32 a, const u32 b, const hsiphash_key_t *key);
0102 u32 hsiphash_3u32(const u32 a, const u32 b, const u32 c,
0103 const hsiphash_key_t *key);
0104 u32 hsiphash_4u32(const u32 a, const u32 b, const u32 c, const u32 d,
0105 const hsiphash_key_t *key);
0106
0107 static inline u32 ___hsiphash_aligned(const __le32 *data, size_t len,
0108 const hsiphash_key_t *key)
0109 {
0110 if (__builtin_constant_p(len) && len == 4)
0111 return hsiphash_1u32(le32_to_cpu(data[0]), key);
0112 if (__builtin_constant_p(len) && len == 8)
0113 return hsiphash_2u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
0114 key);
0115 if (__builtin_constant_p(len) && len == 12)
0116 return hsiphash_3u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
0117 le32_to_cpu(data[2]), key);
0118 if (__builtin_constant_p(len) && len == 16)
0119 return hsiphash_4u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
0120 le32_to_cpu(data[2]), le32_to_cpu(data[3]),
0121 key);
0122 return __hsiphash_aligned(data, len, key);
0123 }
0124
0125
0126
0127
0128
0129
0130
0131 static inline u32 hsiphash(const void *data, size_t len,
0132 const hsiphash_key_t *key)
0133 {
0134 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
0135 !IS_ALIGNED((unsigned long)data, HSIPHASH_ALIGNMENT))
0136 return __hsiphash_unaligned(data, len, key);
0137 return ___hsiphash_aligned(data, len, key);
0138 }
0139
0140
0141
0142
0143
0144
0145
0146 #define SIPHASH_PERMUTATION(a, b, c, d) ( \
0147 (a) += (b), (b) = rol64((b), 13), (b) ^= (a), (a) = rol64((a), 32), \
0148 (c) += (d), (d) = rol64((d), 16), (d) ^= (c), \
0149 (a) += (d), (d) = rol64((d), 21), (d) ^= (a), \
0150 (c) += (b), (b) = rol64((b), 17), (b) ^= (c), (c) = rol64((c), 32))
0151
0152 #define SIPHASH_CONST_0 0x736f6d6570736575ULL
0153 #define SIPHASH_CONST_1 0x646f72616e646f6dULL
0154 #define SIPHASH_CONST_2 0x6c7967656e657261ULL
0155 #define SIPHASH_CONST_3 0x7465646279746573ULL
0156
0157 #define HSIPHASH_PERMUTATION(a, b, c, d) ( \
0158 (a) += (b), (b) = rol32((b), 5), (b) ^= (a), (a) = rol32((a), 16), \
0159 (c) += (d), (d) = rol32((d), 8), (d) ^= (c), \
0160 (a) += (d), (d) = rol32((d), 7), (d) ^= (a), \
0161 (c) += (b), (b) = rol32((b), 13), (b) ^= (c), (c) = rol32((c), 16))
0162
0163 #define HSIPHASH_CONST_0 0U
0164 #define HSIPHASH_CONST_1 0U
0165 #define HSIPHASH_CONST_2 0x6c796765U
0166 #define HSIPHASH_CONST_3 0x74656462U
0167
0168 #endif