summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohnny Willemsen <jwillemsen@remedy.nl>2007-03-05 09:21:45 +0000
committerJohnny Willemsen <jwillemsen@remedy.nl>2007-03-05 09:21:45 +0000
commitd8fe2371826c270bddce6a75ff54df740c0ee7ed (patch)
tree2edf3e0e65674f619d6c93edae3050b9768f53c4
parenta0a646e2c83d0de9e3e05175b324b2611187a38d (diff)
downloadATCD-d8fe2371826c270bddce6a75ff54df740c0ee7ed.tar.gz
Mon Mar 5 09:21:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
-rw-r--r--ACE/ChangeLog7
-rw-r--r--ACE/THANKS1
-rw-r--r--ACE/ace/Atomic_Op.cpp24
-rw-r--r--ACE/ace/Atomic_Op.h2
-rw-r--r--ACE/ace/Atomic_Op_Sparc.c76
5 files changed, 101 insertions, 9 deletions
diff --git a/ACE/ChangeLog b/ACE/ChangeLog
index 0c352b68232..79e209ba413 100644
--- a/ACE/ChangeLog
+++ b/ACE/ChangeLog
@@ -1,3 +1,10 @@
+Mon Mar 5 09:21:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
+
+ * ace/Atomic_Op_Sparc.c:
+ * ace/Atomic_Op.{h,cpp}:
+ Added optimized functions for SunCC on x86. Thanks to
+ Dennis Chernoivanov <cdi at tbricks dot com> for reporting this
+
Sat Mar 3 17:35:52 UTC 2007 Ossama Othman <ossama_othman at symantec dot com>
* ace/Basic_Types.h (ACE_UINT64_MAX):
diff --git a/ACE/THANKS b/ACE/THANKS
index 2977867154a..f690849bbe8 100644
--- a/ACE/THANKS
+++ b/ACE/THANKS
@@ -2180,6 +2180,7 @@ Waba <waba-ace at waba dot be>
Scott Mark <sjm at pobox dot com>
Bjoern Rasmussen <bjoern.d.rasmussen at gmail dot com>
Ian C White <Ian_C_White at raytheond dot com>
+Dennis Chernoivanov <cdi at tbricks dot com>
I would particularly like to thank Paul Stephenson, who worked with me
at Ericsson in the early 1990's. Paul devised the recursive Makefile
diff --git a/ACE/ace/Atomic_Op.cpp b/ACE/ace/Atomic_Op.cpp
index 84dd1763565..517a16b8ff2 100644
--- a/ACE/ace/Atomic_Op.cpp
+++ b/ACE/ace/Atomic_Op.cpp
@@ -34,7 +34,8 @@ single_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -51,7 +52,8 @@ single_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -67,7 +69,8 @@ single_cpu_exchange (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -84,7 +87,8 @@ single_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
@@ -121,7 +125,8 @@ multi_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -138,7 +143,8 @@ multi_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -155,7 +161,8 @@ multi_cpu_exchange (volatile long *value, long rhs)
// The XCHG instruction automatically follows LOCK semantics
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -172,7 +179,8 @@ multi_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
diff --git a/ACE/ace/Atomic_Op.h b/ACE/ace/Atomic_Op.h
index 5a984093363..27144b52318 100644
--- a/ACE/ace/Atomic_Op.h
+++ b/ACE/ace/Atomic_Op.h
@@ -40,6 +40,8 @@
# endif /* ACE_HAS_INTERLOCKED_EXCHANGEADD */
# elif defined (__GNUC__) && (defined (ACE_HAS_PENTIUM) || defined (__amd64__))
# define ACE_HAS_BUILTIN_ATOMIC_OP
+# elif defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))
+# define ACE_HAS_BUILTIN_ATOMIC_OP
# endif /* WIN32 */
#endif /* ACE_HAS_THREADS */
diff --git a/ACE/ace/Atomic_Op_Sparc.c b/ACE/ace/Atomic_Op_Sparc.c
index f301de5b85f..6f1faf7aaac 100644
--- a/ACE/ace/Atomic_Op_Sparc.c
+++ b/ACE/ace/Atomic_Op_Sparc.c
@@ -9,7 +9,81 @@
#if defined (ACE_INCLUDE_ATOMIC_OP_SPARC)
-# if defined (__sparcv9)
+#if defined(__i386) && defined(__SUNPRO_C)
+static void
+__sunpro_asm_code() {
+ __asm("\n\
+ .globl ace_atomic_add_long \n\
+ .type ace_atomic_add_long,@function \n\
+ .align 4 \n\
+ace_atomic_add_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ lock; xadd %eax, (%edx) \n\
+ addl %eax, 0x00000008(%esp) \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_long \n\
+ .type ace_atomic_swap_long,@function \n\
+ .align 4 \n\
+ace_atomic_swap_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ xchg %eax, (%edx) \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_long \n\
+ .type ace_atomic_swap_long,@function \n\
+ .align 4 \n\
+ace_atomic_swap_add_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ lock; xadd %eax, (%edx) \n\
+ ret \n\
+ ");
+}
+
+#elif defined(__x86_64) && defined(__SUNPRO_C)
+
+static void
+__sunpro_asm_code() {
+ __asm("\n\
+ .globl ace_atomic_add_long \n\
+ .type ace_atomic_add_long,@function \n\
+ .align 16 \n\
+ace_atomic_add_long: \n\
+ movq %rsi, %rax \n\
+ lock; xaddq %rax, (%rdi) \n\
+ addq %rsi, %rax \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_long \n\
+ .type ace_atomic_swap_long,@function \n\
+ .align 16 \n\
+ace_atomic_swap_long: \n\
+ xchgq %rsi, (%rdi) \n\
+ movq %rsi, %rax \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_add_long \n\
+ .type ace_atomic_swap_add_long,@function \n\
+ .align 16 \n\
+ace_atomic_swap_add_long: \n\
+ lock; xaddq %rsi, (%rdi) \n\
+ movq %rsi, %rax \n\
+ ret \n\
+ ");
+}
+
+# elif defined (__sparcv9)
unsigned long
ace_atomic_add_long (volatile unsigned long *dest, long rhs)