xxhash.c
Go to the documentation of this file.
1 /*
2 xxHash - Fast Hash algorithm
3 Copyright (C) 2012-2014, Yann Collet.
4 BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
5 
6 Redistribution and use in source and binary forms, with or without
7 modification, are permitted provided that the following conditions are
8 met:
9 
10 * Redistributions of source code must retain the above copyright
11 notice, this list of conditions and the following disclaimer.
12 * Redistributions in binary form must reproduce the above
13 copyright notice, this list of conditions and the following disclaimer
14 in the documentation and/or other materials provided with the
15 distribution.
16 
17 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 
29 You can contact the author at :
30 - xxHash source repository : http://code.google.com/p/xxhash/
31 */
32 
33 
34 //**************************************
35 // Tuning parameters
36 //**************************************
37 // Unaligned memory access is automatically enabled for "common" CPU, such as x86.
38 // For others CPU, the compiler will be more cautious, and insert extra code to ensure aligned access is respected.
39 // If you know your target CPU supports unaligned memory access, you want to force this option manually to improve performance.
40 // You can also enable this parameter if you know your input data will always be aligned (boundaries of 4, for U32).
41 #if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
42 # define XXH_USE_UNALIGNED_ACCESS 1
43 #endif
44 
45 // XXH_ACCEPT_NULL_INPUT_POINTER :
46 // If the input pointer is a null pointer, xxHash default behavior is to trigger a memory access error, since it is a bad pointer.
47 // When this option is enabled, xxHash output for null input pointers will be the same as a null-length input.
48 // This option has a very small performance cost (only measurable on small inputs).
49 // By default, this option is disabled. To enable it, uncomment below define :
50 //#define XXH_ACCEPT_NULL_INPUT_POINTER 1
51 
52 // XXH_FORCE_NATIVE_FORMAT :
53 // By default, xxHash library provides endian-independent Hash values, based on little-endian convention.
54 // Results are therefore identical for little-endian and big-endian CPU.
55 // This comes at a performance cost for big-endian CPU, since some swapping is required to emulate little-endian format.
56 // Should endian-independence be of no importance for your application, you may set the #define below to 1.
57 // It will improve speed for Big-endian CPU.
58 // This option has no impact on Little_Endian CPU.
59 #define XXH_FORCE_NATIVE_FORMAT 0
60 
61 
62 //**************************************
63 // Compiler Specific Options
64 //**************************************
65 // Disable some Visual warning messages
66 #ifdef _MSC_VER // Visual Studio
67 # pragma warning(disable : 4127) // disable: C4127: conditional expression is constant
68 #endif
69 
70 #ifdef _MSC_VER // Visual Studio
71 # define FORCE_INLINE static __forceinline
72 #else
73 # ifdef __GNUC__
74 # define FORCE_INLINE static inline __attribute__((always_inline))
75 # else
76 # define FORCE_INLINE static inline
77 # endif
78 #endif
79 
80 
81 //**************************************
82 // Includes & Memory related functions
83 //**************************************
84 #include "xxhash.h"
85 // Modify the local functions below should you wish to use some other memory related routines
86 // for malloc(), free()
87 #include <stdlib.h>
88 FORCE_INLINE void* XXH_malloc(size_t s) { return malloc(s); }
89 FORCE_INLINE void XXH_free (void* p) { free(p); }
90 // for memcpy()
91 #include <string.h>
92 FORCE_INLINE void* XXH_memcpy(void* dest, const void* src, size_t size) { return memcpy(dest,src,size); }
93 
94 
95 //**************************************
96 // Basic Types
97 //**************************************
98 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L // C99
99 # include <stdint.h>
100  typedef uint8_t BYTE;
101  typedef uint16_t U16;
102  typedef uint32_t U32;
103  typedef int32_t S32;
104  typedef uint64_t U64;
105 #else
106  typedef unsigned char BYTE;
107  typedef unsigned short U16;
108  typedef unsigned int U32;
109  typedef signed int S32;
110  typedef unsigned long long U64;
111 #endif
112 
113 #if defined(__GNUC__) && !defined(XXH_USE_UNALIGNED_ACCESS)
114 # define _PACKED __attribute__ ((packed))
115 #else
116 # define _PACKED
117 #endif
118 
119 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
120 # ifdef __IBMC__
121 # pragma pack(1)
122 # else
123 # pragma pack(push, 1)
124 # endif
125 #endif
126 
127 typedef struct _U32_S { U32 v; } _PACKED U32_S;
128 
129 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
130 # pragma pack(pop)
131 #endif
132 
133 #define A32(x) (((U32_S *)(x))->v)
134 
135 
136 //***************************************
137 // Compiler-specific Functions and Macros
138 //***************************************
139 #define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
140 
141 // Note : although _rotl exists for minGW (GCC under windows), performance seems poor
142 #if defined(_MSC_VER)
143 # define XXH_rotl32(x,r) _rotl(x,r)
144 #else
145 # define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
146 #endif
147 
148 #if defined(_MSC_VER) // Visual Studio
149 # define XXH_swap32 _byteswap_ulong
150 #elif GCC_VERSION >= 403
151 # define XXH_swap32 __builtin_bswap32
152 #else
153 static inline U32 XXH_swap32 (U32 x) {
154  return ((x << 24) & 0xff000000 ) |
155  ((x << 8) & 0x00ff0000 ) |
156  ((x >> 8) & 0x0000ff00 ) |
157  ((x >> 24) & 0x000000ff );}
158 #endif
159 
160 
161 //**************************************
162 // Constants
163 //**************************************
164 #define PRIME32_1 2654435761U
165 #define PRIME32_2 2246822519U
166 #define PRIME32_3 3266489917U
167 #define PRIME32_4 668265263U
168 #define PRIME32_5 374761393U
169 
170 
171 //**************************************
172 // Architecture Macros
173 //**************************************
175 #ifndef XXH_CPU_LITTLE_ENDIAN // It is possible to define XXH_CPU_LITTLE_ENDIAN externally, for example using a compiler switch
176  static const int one = 1;
177 # define XXH_CPU_LITTLE_ENDIAN (*(char*)(&one))
178 #endif
179 
180 
181 //**************************************
182 // Macros
183 //**************************************
184 #define XXH_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(!!(c)) }; } // use only *after* variable declarations
185 
186 
187 //****************************
188 // Memory reads
189 //****************************
191 
193 {
194  if (align==XXH_unaligned)
195  return endian==XXH_littleEndian ? A32(ptr) : XXH_swap32(A32(ptr));
196  else
197  return endian==XXH_littleEndian ? *ptr : XXH_swap32(*ptr);
198 }
199 
200 FORCE_INLINE U32 XXH_readLE32(const U32* ptr, XXH_endianess endian) { return XXH_readLE32_align(ptr, endian, XXH_unaligned); }
201 
202 
203 //****************************
204 // Simple Hash Functions
205 //****************************
206 FORCE_INLINE U32 XXH32_endian_align(const void* input, int len, U32 seed, XXH_endianess endian, XXH_alignment align)
207 {
208  const BYTE* p = (const BYTE*)input;
209  const BYTE* const bEnd = p + len;
210  U32 h32;
211 
212 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
213  if (p==NULL) { len=0; p=(const BYTE*)(size_t)16; }
214 #endif
215 
216  if (len>=16)
217  {
218  const BYTE* const limit = bEnd - 16;
219  U32 v1 = seed + PRIME32_1 + PRIME32_2;
220  U32 v2 = seed + PRIME32_2;
221  U32 v3 = seed + 0;
222  U32 v4 = seed - PRIME32_1;
223 
224  do
225  {
226  v1 += XXH_readLE32_align((const U32*)p, endian, align) * PRIME32_2; v1 = XXH_rotl32(v1, 13); v1 *= PRIME32_1; p+=4;
227  v2 += XXH_readLE32_align((const U32*)p, endian, align) * PRIME32_2; v2 = XXH_rotl32(v2, 13); v2 *= PRIME32_1; p+=4;
228  v3 += XXH_readLE32_align((const U32*)p, endian, align) * PRIME32_2; v3 = XXH_rotl32(v3, 13); v3 *= PRIME32_1; p+=4;
229  v4 += XXH_readLE32_align((const U32*)p, endian, align) * PRIME32_2; v4 = XXH_rotl32(v4, 13); v4 *= PRIME32_1; p+=4;
230  } while (p<=limit);
231 
232  h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7) + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
233  }
234  else
235  {
236  h32 = seed + PRIME32_5;
237  }
238 
239  h32 += (U32) len;
240 
241  while (p<=bEnd-4)
242  {
243  h32 += XXH_readLE32_align((const U32*)p, endian, align) * PRIME32_3;
244  h32 = XXH_rotl32(h32, 17) * PRIME32_4 ;
245  p+=4;
246  }
247 
248  while (p<bEnd)
249  {
250  h32 += (*p) * PRIME32_5;
251  h32 = XXH_rotl32(h32, 11) * PRIME32_1 ;
252  p++;
253  }
254 
255  h32 ^= h32 >> 15;
256  h32 *= PRIME32_2;
257  h32 ^= h32 >> 13;
258  h32 *= PRIME32_3;
259  h32 ^= h32 >> 16;
260 
261  return h32;
262 }
263 
264 
265 U32 XXH32(const void* input, int len, U32 seed)
266 {
267 #if 0
268  // Simple version, good for code maintenance, but unfortunately slow for small inputs
269  void* state = XXH32_init(seed);
270  XXH32_update(state, input, len);
271  return XXH32_digest(state);
272 #else
274 
275 # if !defined(XXH_USE_UNALIGNED_ACCESS)
276  if ((((size_t)input) & 3)) // Input is aligned, let's leverage the speed advantage
277  {
278  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
279  return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned);
280  else
281  return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned);
282  }
283 # endif
284 
285  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
286  return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_unaligned);
287  else
288  return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned);
289 #endif
290 }
291 
292 
293 //****************************
294 // Advanced Hash Functions
295 //****************************
296 
298 {
305  int memsize;
306  char memory[16];
307 };
308 
309 
311 {
312  XXH_STATIC_ASSERT(XXH32_SIZEOFSTATE >= sizeof(struct XXH_state32_t)); // A compilation error here means XXH32_SIZEOFSTATE is not large enough
313  return sizeof(struct XXH_state32_t);
314 }
315 
316 
318 {
319  struct XXH_state32_t * state = (struct XXH_state32_t *) state_in;
320  state->seed = seed;
321  state->v1 = seed + PRIME32_1 + PRIME32_2;
322  state->v2 = seed + PRIME32_2;
323  state->v3 = seed + 0;
324  state->v4 = seed - PRIME32_1;
325  state->total_len = 0;
326  state->memsize = 0;
327  return XXH_OK;
328 }
329 
330 
332 {
333  void* state = XXH_malloc (sizeof(struct XXH_state32_t));
334  if (state != NULL)
335  {
336  XXH32_resetState(state, seed);
337  }
338  return state;
339 }
340 
341 
342 FORCE_INLINE XXH_errorcode XXH32_update_endian (void* state_in, const void* input, int len, XXH_endianess endian)
343 {
344  struct XXH_state32_t * state = (struct XXH_state32_t *) state_in;
345  const BYTE* p = (const BYTE*)input;
346  const BYTE* const bEnd = p + len;
347 
348 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
349  if (input==NULL) return XXH_ERROR;
350 #endif
351 
352  state->total_len += len;
353 
354  if (state->memsize + len < 16) // fill in tmp buffer
355  {
356  XXH_memcpy(state->memory + state->memsize, input, len);
357  state->memsize += len;
358  return XXH_OK;
359  }
360 
361  if (state->memsize) // some data left from previous update
362  {
363  XXH_memcpy(state->memory + state->memsize, input, 16-state->memsize);
364  {
365  const U32* p32 = (const U32*)state->memory;
366  state->v1 += XXH_readLE32(p32, endian) * PRIME32_2; state->v1 = XXH_rotl32(state->v1, 13); state->v1 *= PRIME32_1; p32++;
367  state->v2 += XXH_readLE32(p32, endian) * PRIME32_2; state->v2 = XXH_rotl32(state->v2, 13); state->v2 *= PRIME32_1; p32++;
368  state->v3 += XXH_readLE32(p32, endian) * PRIME32_2; state->v3 = XXH_rotl32(state->v3, 13); state->v3 *= PRIME32_1; p32++;
369  state->v4 += XXH_readLE32(p32, endian) * PRIME32_2; state->v4 = XXH_rotl32(state->v4, 13); state->v4 *= PRIME32_1; p32++;
370  }
371  p += 16-state->memsize;
372  state->memsize = 0;
373  }
374 
375  if (p <= bEnd-16)
376  {
377  const BYTE* const limit = bEnd - 16;
378  U32 v1 = state->v1;
379  U32 v2 = state->v2;
380  U32 v3 = state->v3;
381  U32 v4 = state->v4;
382 
383  do
384  {
385  v1 += XXH_readLE32((const U32*)p, endian) * PRIME32_2; v1 = XXH_rotl32(v1, 13); v1 *= PRIME32_1; p+=4;
386  v2 += XXH_readLE32((const U32*)p, endian) * PRIME32_2; v2 = XXH_rotl32(v2, 13); v2 *= PRIME32_1; p+=4;
387  v3 += XXH_readLE32((const U32*)p, endian) * PRIME32_2; v3 = XXH_rotl32(v3, 13); v3 *= PRIME32_1; p+=4;
388  v4 += XXH_readLE32((const U32*)p, endian) * PRIME32_2; v4 = XXH_rotl32(v4, 13); v4 *= PRIME32_1; p+=4;
389  } while (p<=limit);
390 
391  state->v1 = v1;
392  state->v2 = v2;
393  state->v3 = v3;
394  state->v4 = v4;
395  }
396 
397  if (p < bEnd)
398  {
399  XXH_memcpy(state->memory, p, bEnd-p);
400  state->memsize = (int)(bEnd-p);
401  }
402 
403  return XXH_OK;
404 }
405 
406 XXH_errorcode XXH32_update (void* state_in, const void* input, int len)
407 {
409 
410  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
411  return XXH32_update_endian(state_in, input, len, XXH_littleEndian);
412  else
413  return XXH32_update_endian(state_in, input, len, XXH_bigEndian);
414 }
415 
416 
417 
419 {
420  struct XXH_state32_t * state = (struct XXH_state32_t *) state_in;
421  const BYTE * p = (const BYTE*)state->memory;
422  BYTE* bEnd = (BYTE*)state->memory + state->memsize;
423  U32 h32;
424 
425  if (state->total_len >= 16)
426  {
427  h32 = XXH_rotl32(state->v1, 1) + XXH_rotl32(state->v2, 7) + XXH_rotl32(state->v3, 12) + XXH_rotl32(state->v4, 18);
428  }
429  else
430  {
431  h32 = state->seed + PRIME32_5;
432  }
433 
434  h32 += (U32) state->total_len;
435 
436  while (p<=bEnd-4)
437  {
438  h32 += XXH_readLE32((const U32*)p, endian) * PRIME32_3;
439  h32 = XXH_rotl32(h32, 17) * PRIME32_4;
440  p+=4;
441  }
442 
443  while (p<bEnd)
444  {
445  h32 += (*p) * PRIME32_5;
446  h32 = XXH_rotl32(h32, 11) * PRIME32_1;
447  p++;
448  }
449 
450  h32 ^= h32 >> 15;
451  h32 *= PRIME32_2;
452  h32 ^= h32 >> 13;
453  h32 *= PRIME32_3;
454  h32 ^= h32 >> 16;
455 
456  return h32;
457 }
458 
459 
461 {
463 
464  if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
466  else
468 }
469 
470 
471 U32 XXH32_digest (void* state_in)
472 {
473  U32 h32 = XXH32_intermediateDigest(state_in);
474 
475  XXH_free(state_in);
476 
477  return h32;
478 }
U32 v
Definition: xxhash.c:127
#define FORCE_INLINE
Definition: xxhash.c:76
U32 XXH32(const void *input, int len, U32 seed)
Definition: xxhash.c:265
FORCE_INLINE U32 XXH32_endian_align(const void *input, int len, U32 seed, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:206
U32 XXH32_digest(void *state_in)
Definition: xxhash.c:471
#define PRIME32_5
Definition: xxhash.c:168
#define XXH32_SIZEOFSTATE
Definition: xxhash.h:149
unsigned char BYTE
Definition: xxhash.c:106
FORCE_INLINE U32 XXH32_intermediateDigest_endian(void *state_in, XXH_endianess endian)
Definition: xxhash.c:418
int XXH32_sizeofState()
Definition: xxhash.c:310
void * XXH32_init(U32 seed)
Definition: xxhash.c:331
#define _PACKED
Definition: xxhash.c:116
XXH_errorcode XXH32_resetState(void *state_in, U32 seed)
Definition: xxhash.c:317
#define PRIME32_4
Definition: xxhash.c:167
#define XXH_CPU_LITTLE_ENDIAN
Definition: xxhash.c:177
static const int one
Definition: xxhash.c:176
XXH_alignment
Definition: xxhash.c:190
#define XXH_rotl32(x, r)
Definition: xxhash.c:145
#define PRIME32_2
Definition: xxhash.c:165
#define A32(x)
Definition: xxhash.c:133
FORCE_INLINE void * XXH_memcpy(void *dest, const void *src, size_t size)
Definition: xxhash.c:92
char memory[16]
Definition: xxhash.c:306
signed int S32
Definition: xxhash.c:109
Definition: xxhash.c:127
#define PRIME32_1
Definition: xxhash.c:164
int memsize
Definition: xxhash.c:305
struct _U32_S U32_S
#define PRIME32_3
Definition: xxhash.c:166
XXH_errorcode
Definition: xxhash.h:70
U32 XXH32_intermediateDigest(void *state_in)
Definition: xxhash.c:460
FORCE_INLINE XXH_errorcode XXH32_update_endian(void *state_in, const void *input, int len, XXH_endianess endian)
Definition: xxhash.c:342
Definition: xxhash.h:70
XXH_errorcode XXH32_update(void *state_in, const void *input, int len)
Definition: xxhash.c:406
#define XXH_STATIC_ASSERT(c)
Definition: xxhash.c:184
unsigned short U16
Definition: xxhash.c:107
unsigned long long U64
Definition: xxhash.c:110
FORCE_INLINE U32 XXH_readLE32_align(const U32 *ptr, XXH_endianess endian, XXH_alignment align)
Definition: xxhash.c:192
FORCE_INLINE void XXH_free(void *p)
Definition: xxhash.c:89
static U32 XXH_swap32(U32 x)
Definition: xxhash.c:153
U64 total_len
Definition: xxhash.c:299
#define XXH_FORCE_NATIVE_FORMAT
Definition: xxhash.c:59
XXH_endianess
Definition: xxhash.c:174
FORCE_INLINE void * XXH_malloc(size_t s)
Definition: xxhash.c:88
unsigned int U32
Definition: xxhash.c:108
FORCE_INLINE U32 XXH_readLE32(const U32 *ptr, XXH_endianess endian)
Definition: xxhash.c:200


roslz4
Author(s): Ben Charrow
autogenerated on Mon Feb 28 2022 23:33:18