summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--block-sha1/sha1.c4
-rw-r--r--compat/bswap.h36
-rw-r--r--git-compat-util.h2
3 files changed, 40 insertions, 2 deletions
diff --git a/block-sha1/sha1.c b/block-sha1/sha1.c
index 464cb258aa..d31f2e386c 100644
--- a/block-sha1/sha1.c
+++ b/block-sha1/sha1.c
@@ -4,8 +4,8 @@
* and to avoid unnecessary copies into the context array.
*/
-#include <string.h>
-#include <arpa/inet.h>
+/* this is only to get definitions for memcpy(), ntohl() and htonl() */
+#include "../git-compat-util.h"
#include "sha1.h"
diff --git a/compat/bswap.h b/compat/bswap.h
new file mode 100644
index 0000000000..7246a12c6e
--- /dev/null
+++ b/compat/bswap.h
@@ -0,0 +1,36 @@
+/*
+ * Let's make sure we always have a sane definition for ntohl()/htonl().
+ * Some libraries define those as a function call, just to perform byte
+ * shifting, bringing significant overhead to what should be a simple
+ * operation.
+ */
+
+/*
+ * Default version that the compiler ought to optimize properly with
+ * constant values.
+ */
+static inline unsigned int default_swab32(unsigned int val)
+{
+ return (((val & 0xff000000) >> 24) |
+ ((val & 0x00ff0000) >> 8) |
+ ((val & 0x0000ff00) << 8) |
+ ((val & 0x000000ff) << 24));
+}
+
+#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
+
+#define bswap32(x) ({ \
+ unsigned int __res; \
+ if (__builtin_constant_p(x)) { \
+ __res = default_swab32(x); \
+ } else { \
+ __asm__("bswap %0" : "=r" (__res) : "0" (x)); \
+ } \
+ __res; })
+
+#undef ntohl
+#undef htonl
+#define ntohl(x) bswap32(x)
+#define htonl(x) bswap32(x)
+
+#endif
diff --git a/git-compat-util.h b/git-compat-util.h
index 9f941e42b1..000859ed90 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -176,6 +176,8 @@ extern char *gitbasename(char *);
#endif
#endif
+#include "compat/bswap.h"
+
/* General helper functions */
extern void usage(const char *err) NORETURN;
extern void die(const char *err, ...) NORETURN __attribute__((format (printf, 1, 2)));