summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzhudacai 00228490 <zhudacai@hisilicon.com>2019-09-04 12:14:25 +0000
committerzhudacai 00228490 <zhudacai@hisilicon.com>2019-09-04 12:14:25 +0000
commit66a5c1cd7e557187bf3a1913e836d0c9be8adbb7 (patch)
tree6a5b91d397039716529f4418cbd1ec39a1ab342c
parentd7c25806c0b40c096bbb2ea3cc36d2e0491ec7a5 (diff)
downloadredis-66a5c1cd7e557187bf3a1913e836d0c9be8adbb7.tar.gz
The aarch64 architecture is support normal memory unaligned accesses,
so add the UNALIGNED_LE_CPU to the aarch64 .
-rw-r--r--src/siphash.c3
1 files changed, 2 insertions, 1 deletions
diff --git a/src/siphash.c b/src/siphash.c
index 6b9419031..357741132 100644
--- a/src/siphash.c
+++ b/src/siphash.c
@@ -58,7 +58,8 @@ int siptlw(int c) {
/* Test of the CPU is Little Endian and supports not aligned accesses.
* Two interesting conditions to speedup the function that happen to be
* in most of x86 servers. */
-#if defined(__X86_64__) || defined(__x86_64__) || defined (__i386__)
+#if defined(__X86_64__) || defined(__x86_64__) || defined (__i386__) \
+ || defined (__aarch64__) || defined (__arm64__)
#define UNALIGNED_LE_CPU
#endif