summaryrefslogtreecommitdiff
path: root/stadtx_hash.h
diff options
context:
space:
mode:
authorYves Orton <demerphq@gmail.com>2017-10-12 17:57:21 +0200
committerYves Orton <demerphq@gmail.com>2017-10-12 17:57:21 +0200
commitd939098ccc7209faf586b0a2cca8b202f4c297f1 (patch)
tree000379ada7d3ad3a979540c6d4a8608e1cde4461 /stadtx_hash.h
parenta68a847ef5e609337bd3f538810884d3b918bfb8 (diff)
downloadperl-d939098ccc7209faf586b0a2cca8b202f4c297f1.tar.gz
hopefully better macros for building on x86
Diffstat (limited to 'stadtx_hash.h')
-rw-r--r--stadtx_hash.h104
1 files changed, 61 insertions, 43 deletions
diff --git a/stadtx_hash.h b/stadtx_hash.h
index db6c406583..a54af2e590 100644
--- a/stadtx_hash.h
+++ b/stadtx_hash.h
@@ -5,14 +5,6 @@
#define DEBUG_STADTX_HASH 0
#endif
-
-#ifndef ROTL64
-#define _ROTL_SIZED(x,r,s) ( ((x) << (r)) | ((x) >> ((s) - (r))) )
-#define _ROTR_SIZED(x,r,s) ( ((x) << ((s) - (r))) | ((x) >> (r)) )
-#define ROTL64(x,r) _ROTL_SIZED(x,r,64)
-#define ROTR64(x,r) _ROTR_SIZED(x,r,64)
-#endif
-
#ifndef PERL_SEEN_HV_FUNC_H
#if !defined(U64)
@@ -35,6 +27,7 @@
#ifndef STRLEN
#define STRLEN int
#endif
+
#endif
#ifndef STADTX_STATIC_INLINE
@@ -51,47 +44,72 @@
#endif
#ifndef STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
+/* STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN only matters if nothing has defined U8TO64_LE etc,
+ * and when built with Perl these should be defined before this file is loaded.
+ */
+#ifdef U32_ALIGNMENT_REQUIRED
+#define STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN 0
+#else
#define STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN 1
#endif
+#endif
+
+#ifndef U8TO64_LE
+#if STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
+#define U8TO64_LE(ptr) (*((const U64 *)(ptr)))
+#else
+#define U8TO64_LE(ptr) (\
+ (U64)(ptr)[7] << 56 | \
+ (U64)(ptr)[6] << 48 | \
+ (U64)(ptr)[5] << 40 | \
+ (U64)(ptr)[4] << 32 | \
+ (U64)(ptr)[3] << 24 | \
+ (U64)(ptr)[2] << 16 | \
+ (U64)(ptr)[1] << 8 | \
+ (U64)(ptr)[0] \
+)
+#endif
+#endif
+
+#ifndef U8TO32_LE
+#if STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
+#define U8TO32_LE(ptr) (*((const U32 *)(ptr)))
+#else
+#define U8TO32_LE(ptr) (\
+ (U32)(ptr)[3] << 24 | \
+ (U32)(ptr)[2] << 16 | \
+ (U32)(ptr)[1] << 8 | \
+ (U32)(ptr)[0] \
+)
+#endif
+#endif
+#ifndef U8TO16_LE
#if STADTX_ALLOW_UNALIGNED_AND_LITTLE_ENDIAN
- #ifndef U8TO64_LE
- #define U8TO64_LE(ptr) (*((const U64 *)(ptr)))
- #endif
- #ifndef U8TO32_LE
- #define U8TO32_LE(ptr) (*((const U32 *)(ptr)))
- #endif
- #ifndef U8TO16_LE
- #define U8TO16_LE(ptr) (*((const U16 *)(ptr)))
- #endif
+#define U8TO16_LE(ptr) (*((const U16 *)(ptr)))
#else
- #ifndef U8TO64_LE
- #define U8TO64_LE(ptr) (\
- (U64)(ptr)[7] << 56 | \
- (U64)(ptr)[6] << 48 | \
- (U64)(ptr)[5] << 40 | \
- (U64)(ptr)[4] << 32 | \
- (U64)(ptr)[3] << 24 | \
- (U64)(ptr)[2] << 16 | \
- (U64)(ptr)[1] << 8 | \
- (U64)(ptr)[0] \
- )
- #endif
- #ifndef U8TO32_LE
- #define U8TO32_LE(ptr) (\
- (U32)(ptr)[3] << 24 | \
- (U32)(ptr)[2] << 16 | \
- (U32)(ptr)[1] << 8 | \
- (U32)(ptr)[0] \
- )
- #endif
- #ifndef U8TO16_LE
- #define U8TO16_LE(ptr) (\
- (U16)(ptr)[1] << 8 | \
- (U16)(ptr)[0] \
- )
- #endif
+#define U8TO16_LE(ptr) (\
+ (U16)(ptr)[1] << 8 | \
+ (U16)(ptr)[0] \
+)
#endif
+#endif
+
+/* Find best way to ROTL32/ROTL64 */
+#if defined(_MSC_VER)
+ #include <stdlib.h> /* Microsoft put _rotl declaration in here */
+ #define ROTL32(x,r) _rotl(x,r)
+ #define ROTR32(x,r) _rotr(x,r)
+ #define ROTL64(x,r) _rotl64(x,r)
+ #define ROTR64(x,r) _rotr64(x,r)
+#else
+ /* gcc recognises this code and generates a rotate instruction for CPUs with one */
+ #define ROTL32(x,r) (((U32)(x) << (r)) | ((U32)(x) >> (32 - (r))))
+ #define ROTR32(x,r) (((U32)(x) << (32 - (r))) | ((U32)(x) >> (r)))
+ #define ROTL64(x,r) ( ( (U64)(x) << (r) ) | ( (U64)(x) >> ( 64 - (r) ) ) )
+ #define ROTR64(x,r) ( ( (U64)(x) << ( 64 - (r) ) ) | ( (U64)(x) >> (r) ) )
+#endif
+
/* do a marsaglia xor-shift permutation followed by a
* multiply by a prime (presumably large) and another