summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohnny Willemsen <jwillemsen@remedy.nl>2007-08-17 11:46:13 +0000
committerJohnny Willemsen <jwillemsen@remedy.nl>2007-08-17 11:46:13 +0000
commitec73e249289592fe82959ee10639031191eeaded (patch)
tree06c3636dbe01f746d61816deeb2e078b2526533a
parent600e9416e2068802d83715fd8c5008acc61782cb (diff)
downloadATCD-ec73e249289592fe82959ee10639031191eeaded.tar.gz
Fri Aug 17 11:44:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
-rw-r--r--ACE/ChangeLog9
-rw-r--r--ACE/ace/Atomic_Op.cpp24
-rw-r--r--ACE/ace/Atomic_Op.h2
-rw-r--r--ACE/ace/Atomic_Op_Sparc.c76
4 files changed, 102 insertions, 9 deletions
diff --git a/ACE/ChangeLog b/ACE/ChangeLog
index 5326e4481c5..4032b0bf513 100644
--- a/ACE/ChangeLog
+++ b/ACE/ChangeLog
@@ -1,3 +1,12 @@
+Fri Aug 17 11:44:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
+
+ * ace/Atomic_Op_Sparc.c:
+ * ace/Atomic_Op.{h,cpp}:
+ Added optimized functions for SunCC on x86. Thanks to
+ Dennis Chernoivanov <cdi at tbricks dot com> for reporting this.
+ If you want to enable these, add atomic_ops_sparc=1 to your
+ platform_macros.GNU file
+
Fri Aug 17 10:43:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
* include/makeinclude/platform_linux_common.GNU:
diff --git a/ACE/ace/Atomic_Op.cpp b/ACE/ace/Atomic_Op.cpp
index d03e4f8de1a..55db49fc096 100644
--- a/ACE/ace/Atomic_Op.cpp
+++ b/ACE/ace/Atomic_Op.cpp
@@ -34,7 +34,8 @@ single_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#elif defined(__GNUC__) && defined(PPC)
@@ -57,7 +58,8 @@ single_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#elif defined(__GNUC__) && defined(PPC)
@@ -79,7 +81,8 @@ single_cpu_exchange (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(PPC)
@@ -101,7 +104,8 @@ single_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(PPC)
@@ -143,7 +147,8 @@ multi_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
@@ -160,7 +165,8 @@ multi_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
@@ -177,7 +183,8 @@ multi_cpu_exchange (volatile long *value, long rhs)
// The XCHG instruction automatically follows LOCK semantics
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
@@ -194,7 +201,8 @@ multi_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun)
+#elif defined (sun) || \
+ (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
diff --git a/ACE/ace/Atomic_Op.h b/ACE/ace/Atomic_Op.h
index 94c5031cee6..24e2f2a620d 100644
--- a/ACE/ace/Atomic_Op.h
+++ b/ACE/ace/Atomic_Op.h
@@ -44,6 +44,8 @@
# elif (defined (__GNUC__) || defined (__INTEL_COMPILER)) && (defined (ACE_HAS_PENTIUM) || defined (__amd64__) || defined (__x86_64__))
# define ACE_HAS_BUILTIN_ATOMIC_OP
# define ACE_HAS_INTEL_ASSEMBLY
+# elif defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))
+# define ACE_HAS_BUILTIN_ATOMIC_OP
# endif /* WIN32 */
#endif /* ACE_HAS_THREADS */
diff --git a/ACE/ace/Atomic_Op_Sparc.c b/ACE/ace/Atomic_Op_Sparc.c
index 71af436af38..75d64eb6ac5 100644
--- a/ACE/ace/Atomic_Op_Sparc.c
+++ b/ACE/ace/Atomic_Op_Sparc.c
@@ -9,7 +9,81 @@
#if defined (ACE_INCLUDE_ATOMIC_OP_SPARC)
-#if defined (__sparcv9)
+#if defined(__i386) && defined(__SUNPRO_C)
+static void
+__sunpro_asm_code() {
+ __asm("\n\
+ .globl ace_atomic_add_long \n\
+ .type ace_atomic_add_long,@function \n\
+ .align 4 \n\
+ace_atomic_add_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ lock; xadd %eax, (%edx) \n\
+ addl 0x00000008(%esp), %eax \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_long \n\
+ .type ace_atomic_swap_long,@function \n\
+ .align 4 \n\
+ace_atomic_swap_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ xchg %eax, (%edx) \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_add_long \n\
+ .type ace_atomic_swap_add_long,@function \n\
+ .align 4 \n\
+ace_atomic_swap_add_long: \n\
+ movl 0x00000004(%esp), %edx \n\
+ movl 0x00000008(%esp), %eax \n\
+ lock; xadd %eax, (%edx) \n\
+ ret \n\
+ ");
+}
+
+#elif defined(__x86_64) && defined(__SUNPRO_C)
+
+static void
+__sunpro_asm_code() {
+ __asm("\n\
+ .globl ace_atomic_add_long \n\
+ .type ace_atomic_add_long,@function \n\
+ .align 16 \n\
+ace_atomic_add_long: \n\
+ movq %rsi, %rax \n\
+ lock; xaddq %rax, (%rdi) \n\
+ addq %rsi, %rax \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_long \n\
+ .type ace_atomic_swap_long,@function \n\
+ .align 16 \n\
+ace_atomic_swap_long: \n\
+ xchgq %rsi, (%rdi) \n\
+ movq %rsi, %rax \n\
+ ret \n\
+ ");
+
+ __asm("\n\
+ .globl ace_atomic_swap_add_long \n\
+ .type ace_atomic_swap_add_long,@function \n\
+ .align 16 \n\
+ace_atomic_swap_add_long: \n\
+ lock; xaddq %rsi, (%rdi) \n\
+ movq %rsi, %rax \n\
+ ret \n\
+ ");
+}
+
+#elif defined (__sparcv9)
unsigned long
ace_atomic_add_long (volatile unsigned long *dest, long rhs)