This is a live mirror of the Perl 5 development currently hosted at https://github.com/perl/perl5
Restore "Add new hashing and "hash with state" infrastructure"
[perl5.git] / stadtx_hash.h
CommitLineData
9d5e3f1a
YO
1#ifndef STADTX_HASH_H
2#define STADTX_HASH_H
3
4#ifndef DEBUG_STADTX_HASH
5#define DEBUG_STADTX_HASH 0
6#endif
7
8
9#ifndef ROTL64
10#define _ROTL_SIZED(x,r,s) ( ((x) << (r)) | ((x) >> ((s) - (r))) )
11#define _ROTR_SIZED(x,r,s) ( ((x) << ((s) - (r))) | ((x) >> (r)) )
12#define ROTL64(x,r) _ROTL_SIZED(x,r,64)
13#define ROTR64(x,r) _ROTR_SIZED(x,r,64)
14#endif
15
16#ifndef PERL_SEEN_HV_FUNC_H
17
18#if !defined(U64)
19 #include <stdint.h>
20 #define U64 uint64_t
21#endif
22
23#if !defined(U32)
24 #define U32 uint32_t
25#endif
26
27#if !defined(U8)
28 #define U8 unsigned char
29#endif
30
31#if !defined(U16)
32 #define U16 uint16_t
33#endif
34
35#ifndef STRLEN
36#define STRLEN int
37#endif
38#endif
39
40#ifndef STADTX_STATIC_INLINE
41#ifdef PERL_STATIC_INLINE
42#define STADTX_STATIC_INLINE PERL_STATIC_INLINE
43#else
44#define STADTX_STATIC_INLINE static inline
45#endif
46#endif
47
48#ifndef STMT_START
49#define STMT_START do
50#define STMT_END while(0)
51#endif
52
53#ifndef STADTX_UNALIGNED_AND_LITTLE_ENDIAN
54#define STADTX_UNALIGNED_AND_LITTLE_ENDIAN 1
55#endif
56
57#if STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
58 #ifndef U8TO64_LE
59 #define U8TO64_LE(ptr) (*((const U64 *)(ptr)))
60 #endif
61 #ifndef U8TO32_LE
62 #define U8TO32_LE(ptr) (*((const U32 *)(ptr)))
63 #endif
64 #ifndef U8TO16_LE
65 #define U8TO16_LE(ptr) (*((const U16 *)(ptr)))
66 #endif
67#else
68 #ifndef U8TO64_LE
69 #define U8TO64_LE(ptr) (\
70 (U64)(ptr)[7] << 56 | \
71 (U64)(ptr)[6] << 48 | \
72 (U64)(ptr)[5] << 40 | \
73 (U64)(ptr)[4] << 32 | \
74 (U64)(ptr)[3] << 24 | \
75 (U64)(ptr)[2] << 16 | \
76 (U64)(ptr)[1] << 8 | \
77 (U64)(ptr)[0] \
78 )
79 #endif
80 #ifndef U8TO32_LE
81 #define U8TO32_LE(ptr) (\
82 (U32)(ptr)[3] << 24 | \
83 (U32)(ptr)[2] << 16 | \
84 (U32)(ptr)[1] << 8 | \
85 (U32)(ptr)[0] \
86 )
87 #endif
88 #ifndef U8TO16_LE
89 #define U8TO16_LE(ptr) (\
90 (U16)(ptr)[1] << 8 | \
91 (U16)(ptr)[0] \
92 )
93 #endif
94#endif
95
96/* do a marsaglia xor-shift permutation followed by a
97 * multiply by a prime (presumably large) and another
98 * marsaglia xor-shift permutation.
99 * One of these thoroughly changes the bits of the input.
100 * Two of these with different primes passes the Strict Avalanche Criteria
101 * in all the tests I did.
102 *
103 * Note that v cannot end up zero after a scramble64 unless it
104 * was zero in the first place.
105 */
106#define STADTX_SCRAMBLE64(v,prime) STMT_START { \
107 v ^= (v >> 13); \
108 v ^= (v << 35); \
109 v ^= (v >> 30); \
110 v *= prime; \
111 v ^= (v >> 19); \
112 v ^= (v << 15); \
113 v ^= (v >> 46); \
114} STMT_END
115
116
117STADTX_STATIC_INLINE void stadtx_seed_state (
118 const U8 *seed_ch,
119 U8 *state_ch
120) {
121 U64 *seed= (U64 *)seed_ch;
122 U64 *state= (U64 *)state_ch;
123 /* first we apply two masks to each word of the seed, this means that
124 * a) at least one of state[0] and state[2] is nonzero,
125 * b) at least one of state[1] and state[3] is nonzero
126 * c) that state[0] and state[2] are different
127 * d) that state[1] and state[3] are different
128 * e) that the replacement value for any zero's is a totally different from the seed value.
129 * (iow, if seed[0] is 0x43f6a8885a308d31UL then state[0] becomes 0, which is the replaced
130 * with 1, which is totally different.). */
131 /* hex expansion of pi, skipping first two digits. pi= 3.2[43f6...]*/
132 /* pi value in hex from here:
133 * http://turner.faculty.swau.edu/mathematics/materialslibrary/pi/pibases.html*/
134 state[0]= seed[0] ^ 0x43f6a8885a308d31UL;
135 state[1]= seed[1] ^ 0x3198a2e03707344aUL;
136 state[2]= seed[0] ^ 0x4093822299f31d00UL;
137 state[3]= seed[1] ^ 0x82efa98ec4e6c894UL;
138 if (!state[0]) state[0]=1;
139 if (!state[1]) state[1]=2;
140 if (!state[2]) state[2]=4;
141 if (!state[3]) state[3]=8;
142 /* and now for good measure we double scramble all four -
143 * a double scramble guarantees a complete avalanche of all the
144 * bits in the seed - IOW, by the time we are hashing the
145 * four state vectors should be completely different and utterly
146 * uncognizable from the input seed bits */
147 STADTX_SCRAMBLE64(state[0],0x801178846e899d17UL);
148 STADTX_SCRAMBLE64(state[0],0xdd51e5d1c9a5a151UL);
149 STADTX_SCRAMBLE64(state[1],0x93a7d6c8c62e4835UL);
150 STADTX_SCRAMBLE64(state[1],0x803340f36895c2b5UL);
151 STADTX_SCRAMBLE64(state[2],0xbea9344eb7565eebUL);
152 STADTX_SCRAMBLE64(state[2],0xcd95d1e509b995cdUL);
153 STADTX_SCRAMBLE64(state[3],0x9999791977e30c13UL);
154 STADTX_SCRAMBLE64(state[3],0xaab8b6b05abfc6cdUL);
155}
156
157#define STADTX_K0_U64 0xb89b0f8e1655514fUL
158#define STADTX_K1_U64 0x8c6f736011bd5127UL
159#define STADTX_K2_U64 0x8f29bd94edce7b39UL
160#define STADTX_K3_U64 0x9c1b8e1e9628323fUL
161
162#define STADTX_K2_U32 0x802910e3
163#define STADTX_K3_U32 0x819b13af
164#define STADTX_K4_U32 0x91cb27e5
165#define STADTX_K5_U32 0xc1a269c1
166
167STADTX_STATIC_INLINE U64 stadtx_hash_with_state(
168 const U8 *state_ch,
169 const U8 *key,
170 const STRLEN key_len
171) {
172 U64 *state= (U64 *)state_ch;
173 U64 len = key_len;
174 U64 v0= state[0] ^ ((key_len+1) * STADTX_K0_U64);
175 U64 v1= state[1] ^ ((key_len+2) * STADTX_K1_U64);
176 if (len < 32) {
177 switch(len >> 3) {
178 case 3:
179 v0 += U8TO64_LE(key) * STADTX_K3_U64;
180 v0= ROTR64(v0, 17) ^ v1;
181 v1= ROTR64(v1, 53) + v0;
182 key += 8;
183 case 2:
184 v0 += U8TO64_LE(key) * STADTX_K3_U64;
185 v0= ROTR64(v0, 17) ^ v1;
186 v1= ROTR64(v1, 53) + v0;
187 key += 8;
188 case 1:
189 v0 += U8TO64_LE(key) * STADTX_K3_U64;
190 v0= ROTR64(v0, 17) ^ v1;
191 v1= ROTR64(v1, 53) + v0;
192 key += 8;
193 case 0:
194 default: break;
195 }
196 switch ( len & 0x7 ) {
197 case 7: v0 += (U64)key[6] << 32;
198 case 6: v1 += (U64)key[5] << 48;
199 case 5: v0 += (U64)key[4] << 16;
200 case 4: v1 += (U64)U8TO32_LE(key);
201 break;
202 case 3: v0 += (U64)key[2] << 48;
203 case 2: v1 += (U64)U8TO16_LE(key);
204 break;
205 case 1: v0 += (U64)key[0];
206 case 0: v1 = ROTL64(v1, 32) ^ 0xFF;
207 break;
208 }
209 v1 ^= v0;
210 v0 = ROTR64(v0,33) + v1;
211 v1 = ROTL64(v1,17) ^ v0;
212 v0 = ROTL64(v0,43) + v1;
213 v1 = ROTL64(v1,31) - v0;
214 v0 = ROTL64(v0,13) ^ v1;
215 v1 -= v0;
216 v0 = ROTL64(v0,41) + v1;
217 v1 = ROTL64(v1,37) ^ v0;
218 v0 = ROTR64(v0,39) + v1;
219 v1 = ROTR64(v1,15) + v0;
220 v0 = ROTL64(v0,15) ^ v1;
221 v1 = ROTR64(v1, 5);
222 return v0 ^ v1;
223 } else {
224 U64 v2= state[2] ^ ((key_len+3) * STADTX_K2_U64);
225 U64 v3= state[3] ^ ((key_len+4) * STADTX_K3_U64);
226
227 do {
228 v0 += (U64)U8TO64_LE(key+ 0) * STADTX_K2_U32; v0= ROTL64(v0,57) ^ v3;
229 v1 += (U64)U8TO64_LE(key+ 8) * STADTX_K3_U32; v1= ROTL64(v1,63) ^ v2;
230 v2 += (U64)U8TO64_LE(key+16) * STADTX_K4_U32; v2= ROTR64(v2,47) + v0;
231 v3 += (U64)U8TO64_LE(key+24) * STADTX_K5_U32; v3= ROTR64(v3,11) - v1;
232 key += 32;
233 len -= 32;
234 } while ( len >= 32 );
235
236 switch ( len >> 3 ) {
237 case 3: v0 += ((U64)U8TO64_LE(key) * STADTX_K2_U32); key += 8; v0= ROTL64(v0,57) ^ v3;
238 case 2: v1 += ((U64)U8TO64_LE(key) * STADTX_K3_U32); key += 8; v1= ROTL64(v1,63) ^ v2;
239 case 1: v2 += ((U64)U8TO64_LE(key) * STADTX_K4_U32); key += 8; v2= ROTR64(v2,47) + v0;
240 case 0: v3 = ROTR64(v3,11) - v1;
241 }
242 v0 ^= (len+1) * STADTX_K3_U64;
243 switch ( len & 0x7 ) {
244 case 7: v1 += (U64)key[6];
245 case 6: v2 += (U64)U8TO16_LE(key+4);
246 v3 += (U64)U8TO32_LE(key);
247 break;
248 case 5: v1 += (U64)key[4];
249 case 4: v2 += (U64)U8TO32_LE(key);
250 break;
251 case 3: v3 += (U64)key[2];
252 case 2: v1 += (U64)U8TO16_LE(key);
253 break;
254 case 1: v2 += (U64)key[0];
255 case 0: v3 = ROTL64(v3, 32) ^ 0xFF;
256 break;
257 }
258
259 v1 -= v2;
260 v0 = ROTR64(v0,19);
261 v1 -= v0;
262 v1 = ROTR64(v1,53);
263 v3 ^= v1;
264 v0 -= v3;
265 v3 = ROTL64(v3,43);
266 v0 += v3;
267 v0 = ROTR64(v0, 3);
268 v3 -= v0;
269 v2 = ROTR64(v2,43) - v3;
270 v2 = ROTL64(v2,55) ^ v0;
271 v1 -= v2;
272 v3 = ROTR64(v3, 7) - v2;
273 v2 = ROTR64(v2,31);
274 v3 += v2;
275 v2 -= v1;
276 v3 = ROTR64(v3,39);
277 v2 ^= v3;
278 v3 = ROTR64(v3,17) ^ v2;
279 v1 += v3;
280 v1 = ROTR64(v1, 9);
281 v2 ^= v1;
282 v2 = ROTL64(v2,24);
283 v3 ^= v2;
284 v3 = ROTR64(v3,59);
285 v0 = ROTR64(v0, 1) - v1;
286
287 return v0 ^ v1 ^ v2 ^ v3;
288 }
289}
290
291STADTX_STATIC_INLINE U64 stadtx_hash(
292 const U8 *seed_ch,
293 const U8 *key,
294 const STRLEN key_len
295) {
296 U64 state[4];
297 stadtx_seed_state(seed_ch,(U8*)state);
298 return stadtx_hash_with_state((U8*)state,key,key_len);
299}
300
301#endif