52
#ifndef XXH_FORCE_MEMORY_ACCESS
53
# if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) \
54
|| defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) \
55
|| defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
56
# define XXH_FORCE_MEMORY_ACCESS 2
57
# elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || \
58
(defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) \
59
|| defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) \
60
|| defined(__ARM_ARCH_7S__) ))
61
# define XXH_FORCE_MEMORY_ACCESS 1
70
#ifndef XXH_ACCEPT_NULL_INPUT_POINTER
71
# define XXH_ACCEPT_NULL_INPUT_POINTER 0
82
#ifndef XXH_FORCE_NATIVE_FORMAT
83
# define XXH_FORCE_NATIVE_FORMAT 0
93
#ifndef XXH_FORCE_ALIGN_CHECK
94
# if defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
95
# define XXH_FORCE_ALIGN_CHECK 0
97
# define XXH_FORCE_ALIGN_CHECK 1
108
static void* XXH_malloc(size_t s) { return malloc(s); }
109
static void XXH_free (void* p) { free(p); }
112
static void* XXH_memcpy(void* dest, const void* src, size_t size) { return memcpy(dest,src,size); }
116
#define XXH_STATIC_LINKING_ONLY
124
# pragma warning(disable : 4127)
125
# define FORCE_INLINE static __forceinline
127
# if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
129
# define FORCE_INLINE static inline __attribute__((always_inline))
131
# define FORCE_INLINE static inline
134
# define FORCE_INLINE static
143
# if !defined (__VMS) \
144
&& (defined (__cplusplus) \
145
|| (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
147
typedef uint8_t BYTE;
148
typedef uint16_t U16;
149
typedef uint32_t U32;
151
typedef unsigned char BYTE;
152
typedef unsigned short U16;
153
typedef unsigned int U32;
157
#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
160
static U32 XXH_read32(const void* memPtr) { return *(const U32*) memPtr; }
162
#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
166
typedef union { U32 u32; } __attribute__((packed)) unalign;
167
static U32 XXH_read32(const void* ptr) { return ((const unalign*)ptr)->u32; }
174
static U32 XXH_read32(const void* memPtr)
177
memcpy(&val, memPtr, sizeof(val));
187
#define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
191
# define XXH_rotl32(x,r) _rotl(x,r)
192
# define XXH_rotl64(x,r) _rotl64(x,r)
194
# define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
195
# define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r)))
199
# define XXH_swap32 _byteswap_ulong
200
#elif GCC_VERSION >= 403
201
# define XXH_swap32 __builtin_bswap32
203
static U32 XXH_swap32 (U32 x)
205
return ((x << 24) & 0xff000000 ) |
206
((x << 8) & 0x00ff0000 ) |
207
((x >> 8) & 0x0000ff00 ) |
208
((x >> 24) & 0x000000ff );
216
typedef enum { XXH_bigEndian=0, XXH_littleEndian=1 } XXH_endianess;
219
#ifndef XXH_CPU_LITTLE_ENDIAN
220
static int XXH_isLittleEndian(void)
222
const union { U32 u; BYTE c[4]; } one = { 1 };
225
# define XXH_CPU_LITTLE_ENDIAN XXH_isLittleEndian()
232
typedef enum { XXH_aligned, XXH_unaligned } XXH_alignment;
234
FORCE_INLINE U32 XXH_readLE32_align(const void* ptr, XXH_endianess endian, XXH_alignment align)
236
if (align==XXH_unaligned)
237
return endian==XXH_littleEndian ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
239
return endian==XXH_littleEndian ? *(const U32*)ptr : XXH_swap32(*(const U32*)ptr);
242
FORCE_INLINE U32 XXH_readLE32(const void* ptr, XXH_endianess endian)
244
return XXH_readLE32_align(ptr, endian, XXH_unaligned);
247
static U32 XXH_readBE32(const void* ptr)
249
return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(ptr)) : XXH_read32(ptr);
256
#define XXH_STATIC_ASSERT(c) { enum { XXH_sa = 1/(int)(!!(c)) }; }
257
XXH_PUBLIC_API unsigned XXH_versionNumber (void) { return XXH_VERSION_NUMBER; }
263
static const U32 PRIME32_1 = 2654435761U;
264
static const U32 PRIME32_2 = 2246822519U;
265
static const U32 PRIME32_3 = 3266489917U;
266
static const U32 PRIME32_4 = 668265263U;
267
static const U32 PRIME32_5 = 374761393U;
269
static U32 XXH32_round(U32 seed, U32 input)
271
seed += input * PRIME32_2;
272
seed = XXH_rotl32(seed, 13);
278
static U32 XXH32_avalanche(U32 h32)
288
#define XXH_get32bits(p) XXH_readLE32_align(p, endian, align)
291
XXH32_finalize(U32 h32, const void* ptr, size_t len,
292
XXH_endianess endian, XXH_alignment align)
295
const BYTE* p = (const BYTE*)ptr;
297
h32 += (*p) * PRIME32_5; \
299
h32 = XXH_rotl32(h32, 11) * PRIME32_1 ;
302
h32 += XXH_get32bits(p) * PRIME32_3; \
304
h32 = XXH_rotl32(h32, 17) * PRIME32_4 ;
313
return XXH32_avalanche(h32);
321
return XXH32_avalanche(h32);
330
return XXH32_avalanche(h32);
344
case 0: return XXH32_avalanche(h32);
352
XXH32_endian_align(const void* input, size_t len, U32 seed,
353
XXH_endianess endian, XXH_alignment align)
355
const BYTE* p = (const BYTE*)input;
356
const BYTE* bEnd = p + len;
359
#if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
362
bEnd=p=(const BYTE*)(size_t)16;
367
const BYTE* const limit = bEnd - 15;
368
U32 v1 = seed + PRIME32_1 + PRIME32_2;
369
U32 v2 = seed + PRIME32_2;
371
U32 v4 = seed - PRIME32_1;
374
v1 = XXH32_round(v1, XXH_get32bits(p)); p+=4;
375
v2 = XXH32_round(v2, XXH_get32bits(p)); p+=4;
376
v3 = XXH32_round(v3, XXH_get32bits(p)); p+=4;
377
v4 = XXH32_round(v4, XXH_get32bits(p)); p+=4;
380
h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7)
381
+ XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
383
h32 = seed + PRIME32_5;
388
return XXH32_finalize(h32, p, len&15, endian, align);
392
XXH_PUBLIC_API unsigned int XXH32 (const void* input, size_t len, unsigned int seed)
397
XXH32_reset(&state, seed);
398
XXH32_update(&state, input, len);
399
return XXH32_digest(&state);
401
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
403
if (XXH_FORCE_ALIGN_CHECK) {
404
if ((((size_t)input) & 3) == 0) {
405
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
406
return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned);
408
return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned);
411
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
412
return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_unaligned);
414
return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned);
422
XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void)
424
return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
426
XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr)
432
XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dstState, const XXH32_state_t* srcState)
434
memcpy(dstState, srcState, sizeof(*dstState));
437
XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t* statePtr, unsigned int seed)
440
memset(&state, 0, sizeof(state));
441
state.v1 = seed + PRIME32_1 + PRIME32_2;
442
state.v2 = seed + PRIME32_2;
444
state.v4 = seed - PRIME32_1;
446
memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved));
452
XXH_errorcode XXH32_update_endian (XXH32_state_t* state, const void* input, size_t len, XXH_endianess endian)
454
const BYTE* p = (const BYTE*)input;
455
const BYTE* const bEnd = p + len;
458
#if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
464
state->total_len_32 += (unsigned)len;
465
state->large_len |= (len>=16) | (state->total_len_32>=16);
467
if (state->memsize + len < 16) {
468
XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, len);
469
state->memsize += (unsigned)len;
473
if (state->memsize) {
474
XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, 16-state->memsize);
475
{ const U32* p32 = state->mem32;
476
state->v1 = XXH32_round(state->v1, XXH_readLE32(p32, endian)); p32++;
477
state->v2 = XXH32_round(state->v2, XXH_readLE32(p32, endian)); p32++;
478
state->v3 = XXH32_round(state->v3, XXH_readLE32(p32, endian)); p32++;
479
state->v4 = XXH32_round(state->v4, XXH_readLE32(p32, endian));
481
p += 16-state->memsize;
486
const BYTE* const limit = bEnd - 16;
493
v1 = XXH32_round(v1, XXH_readLE32(p, endian)); p+=4;
494
v2 = XXH32_round(v2, XXH_readLE32(p, endian)); p+=4;
495
v3 = XXH32_round(v3, XXH_readLE32(p, endian)); p+=4;
496
v4 = XXH32_round(v4, XXH_readLE32(p, endian)); p+=4;
506
XXH_memcpy(state->mem32, p, (size_t)(bEnd-p));
507
state->memsize = (unsigned)(bEnd-p);
514
XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* state_in, const void* input, size_t len)
516
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
518
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
519
return XXH32_update_endian(state_in, input, len, XXH_littleEndian);
521
return XXH32_update_endian(state_in, input, len, XXH_bigEndian);
526
XXH32_digest_endian (const XXH32_state_t* state, XXH_endianess endian)
530
if (state->large_len) {
531
h32 = XXH_rotl32(state->v1, 1)
532
+ XXH_rotl32(state->v2, 7)
533
+ XXH_rotl32(state->v3, 12)
534
+ XXH_rotl32(state->v4, 18);
536
h32 = state->v3 + PRIME32_5;
539
h32 += state->total_len_32;
541
return XXH32_finalize(h32, state->mem32, state->memsize, endian, XXH_aligned);
545
XXH_PUBLIC_API unsigned int XXH32_digest (const XXH32_state_t* state_in)
547
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
549
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
550
return XXH32_digest_endian(state_in, XXH_littleEndian);
552
return XXH32_digest_endian(state_in, XXH_bigEndian);
564
XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash)
566
XXH_STATIC_ASSERT(sizeof(XXH32_canonical_t) == sizeof(XXH32_hash_t));
567
if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
568
memcpy(dst, &hash, sizeof(*dst));
571
XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src)
573
return XXH_readBE32(src);
577
#ifndef XXH_NO_LONG_LONG
587
# if !defined (__VMS) \
588
&& (defined (__cplusplus) \
589
|| (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) )
591
typedef uint64_t U64;
594
typedef unsigned long long U64;
599
#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
602
static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; }
604
#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
608
typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
609
static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; }
617
static U64 XXH_read64(const void* memPtr)
620
memcpy(&val, memPtr, sizeof(val));
627
# define XXH_swap64 _byteswap_uint64
628
#elif GCC_VERSION >= 403
629
# define XXH_swap64 __builtin_bswap64
631
static U64 XXH_swap64 (U64 x)
633
return ((x << 56) & 0xff00000000000000ULL) |
634
((x << 40) & 0x00ff000000000000ULL) |
635
((x << 24) & 0x0000ff0000000000ULL) |
636
((x << 8) & 0x000000ff00000000ULL) |
637
((x >> 8) & 0x00000000ff000000ULL) |
638
((x >> 24) & 0x0000000000ff0000ULL) |
639
((x >> 40) & 0x000000000000ff00ULL) |
640
((x >> 56) & 0x00000000000000ffULL);
644
FORCE_INLINE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment align)
646
if (align==XXH_unaligned)
647
return endian==XXH_littleEndian ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
649
return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr);
652
FORCE_INLINE U64 XXH_readLE64(const void* ptr, XXH_endianess endian)
654
return XXH_readLE64_align(ptr, endian, XXH_unaligned);
657
static U64 XXH_readBE64(const void* ptr)
659
return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(ptr)) : XXH_read64(ptr);
665
static const U64 PRIME64_1 = 11400714785074694791ULL;
666
static const U64 PRIME64_2 = 14029467366897019727ULL;
667
static const U64 PRIME64_3 = 1609587929392839161ULL;
668
static const U64 PRIME64_4 = 9650029242287828579ULL;
669
static const U64 PRIME64_5 = 2870177450012600261ULL;
671
static U64 XXH64_round(U64 acc, U64 input)
673
acc += input * PRIME64_2;
674
acc = XXH_rotl64(acc, 31);
679
static U64 XXH64_mergeRound(U64 acc, U64 val)
681
val = XXH64_round(0, val);
683
acc = acc * PRIME64_1 + PRIME64_4;
687
static U64 XXH64_avalanche(U64 h64)
698
#define XXH_get64bits(p) XXH_readLE64_align(p, endian, align)
701
XXH64_finalize(U64 h64, const void* ptr, size_t len,
702
XXH_endianess endian, XXH_alignment align)
704
const BYTE* p = (const BYTE*)ptr;
707
h64 ^= (*p) * PRIME64_5; \
709
h64 = XXH_rotl64(h64, 11) * PRIME64_1;
712
h64 ^= (U64)(XXH_get32bits(p)) * PRIME64_1; \
714
h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
716
#define PROCESS8_64 { \
717
U64 const k1 = XXH64_round(0, XXH_get64bits(p)); \
720
h64 = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4; \
724
case 24: PROCESS8_64;
726
case 16: PROCESS8_64;
729
return XXH64_avalanche(h64);
731
case 28: PROCESS8_64;
733
case 20: PROCESS8_64;
735
case 12: PROCESS8_64;
738
return XXH64_avalanche(h64);
740
case 25: PROCESS8_64;
742
case 17: PROCESS8_64;
746
return XXH64_avalanche(h64);
748
case 29: PROCESS8_64;
750
case 21: PROCESS8_64;
752
case 13: PROCESS8_64;
756
return XXH64_avalanche(h64);
758
case 26: PROCESS8_64;
760
case 18: PROCESS8_64;
762
case 10: PROCESS8_64;
765
return XXH64_avalanche(h64);
767
case 30: PROCESS8_64;
769
case 22: PROCESS8_64;
771
case 14: PROCESS8_64;
776
return XXH64_avalanche(h64);
778
case 27: PROCESS8_64;
780
case 19: PROCESS8_64;
782
case 11: PROCESS8_64;
786
return XXH64_avalanche(h64);
788
case 31: PROCESS8_64;
790
case 23: PROCESS8_64;
792
case 15: PROCESS8_64;
802
case 0: return XXH64_avalanche(h64);
811
XXH64_endian_align(const void* input, size_t len, U64 seed,
812
XXH_endianess endian, XXH_alignment align)
814
const BYTE* p = (const BYTE*)input;
815
const BYTE* bEnd = p + len;
818
#if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
821
bEnd=p=(const BYTE*)(size_t)32;
826
const BYTE* const limit = bEnd - 32;
827
U64 v1 = seed + PRIME64_1 + PRIME64_2;
828
U64 v2 = seed + PRIME64_2;
830
U64 v4 = seed - PRIME64_1;
833
v1 = XXH64_round(v1, XXH_get64bits(p)); p+=8;
834
v2 = XXH64_round(v2, XXH_get64bits(p)); p+=8;
835
v3 = XXH64_round(v3, XXH_get64bits(p)); p+=8;
836
v4 = XXH64_round(v4, XXH_get64bits(p)); p+=8;
839
h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
840
h64 = XXH64_mergeRound(h64, v1);
841
h64 = XXH64_mergeRound(h64, v2);
842
h64 = XXH64_mergeRound(h64, v3);
843
h64 = XXH64_mergeRound(h64, v4);
846
h64 = seed + PRIME64_5;
851
return XXH64_finalize(h64, p, len, endian, align);
855
XXH_PUBLIC_API unsigned long long XXH64 (const void* input, size_t len, unsigned long long seed)
860
XXH64_reset(&state, seed);
861
XXH64_update(&state, input, len);
862
return XXH64_digest(&state);
864
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
866
if (XXH_FORCE_ALIGN_CHECK) {
867
if ((((size_t)input) & 7)==0) {
868
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
869
return XXH64_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned);
871
return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned);
874
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
875
return XXH64_endian_align(input, len, seed, XXH_littleEndian, XXH_unaligned);
877
return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned);
883
XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void)
885
return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
887
XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr)
893
XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t* dstState, const XXH64_state_t* srcState)
895
memcpy(dstState, srcState, sizeof(*dstState));
898
XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t* statePtr, unsigned long long seed)
901
memset(&state, 0, sizeof(state));
902
state.v1 = seed + PRIME64_1 + PRIME64_2;
903
state.v2 = seed + PRIME64_2;
905
state.v4 = seed - PRIME64_1;
907
memcpy(statePtr, &state, sizeof(state) - sizeof(state.reserved));
912
XXH_errorcode XXH64_update_endian (XXH64_state_t* state, const void* input, size_t len, XXH_endianess endian)
914
const BYTE* p = (const BYTE*)input;
915
const BYTE* const bEnd = p + len;
918
#if defined(XXH_ACCEPT_NULL_INPUT_POINTER) && (XXH_ACCEPT_NULL_INPUT_POINTER>=1)
924
state->total_len += len;
926
if (state->memsize + len < 32) {
927
XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, len);
928
state->memsize += (U32)len;
932
if (state->memsize) {
933
XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, 32-state->memsize);
934
state->v1 = XXH64_round(state->v1, XXH_readLE64(state->mem64+0, endian));
935
state->v2 = XXH64_round(state->v2, XXH_readLE64(state->mem64+1, endian));
936
state->v3 = XXH64_round(state->v3, XXH_readLE64(state->mem64+2, endian));
937
state->v4 = XXH64_round(state->v4, XXH_readLE64(state->mem64+3, endian));
938
p += 32-state->memsize;
943
const BYTE* const limit = bEnd - 32;
950
v1 = XXH64_round(v1, XXH_readLE64(p, endian)); p+=8;
951
v2 = XXH64_round(v2, XXH_readLE64(p, endian)); p+=8;
952
v3 = XXH64_round(v3, XXH_readLE64(p, endian)); p+=8;
953
v4 = XXH64_round(v4, XXH_readLE64(p, endian)); p+=8;
963
XXH_memcpy(state->mem64, p, (size_t)(bEnd-p));
964
state->memsize = (unsigned)(bEnd-p);
970
XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH64_state_t* state_in, const void* input, size_t len)
972
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
974
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
975
return XXH64_update_endian(state_in, input, len, XXH_littleEndian);
977
return XXH64_update_endian(state_in, input, len, XXH_bigEndian);
980
FORCE_INLINE U64 XXH64_digest_endian (const XXH64_state_t* state, XXH_endianess endian)
984
if (state->total_len >= 32) {
985
U64 const v1 = state->v1;
986
U64 const v2 = state->v2;
987
U64 const v3 = state->v3;
988
U64 const v4 = state->v4;
990
h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
991
h64 = XXH64_mergeRound(h64, v1);
992
h64 = XXH64_mergeRound(h64, v2);
993
h64 = XXH64_mergeRound(h64, v3);
994
h64 = XXH64_mergeRound(h64, v4);
996
h64 = state->v3 + PRIME64_5;
999
h64 += (U64) state->total_len;
1001
return XXH64_finalize(h64, state->mem64, (size_t)state->total_len, endian, XXH_aligned);
1004
XXH_PUBLIC_API unsigned long long XXH64_digest (const XXH64_state_t* state_in)
1006
XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
1008
if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
1009
return XXH64_digest_endian(state_in, XXH_littleEndian);
1011
return XXH64_digest_endian(state_in, XXH_bigEndian);
1017
XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t* dst, XXH64_hash_t hash)
1019
XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
1020
if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
1021
memcpy(dst, &hash, sizeof(*dst));
1024
XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t* src)
1026
return XXH_readBE64(src);