summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohnny Willemsen <jwillemsen@remedy.nl>2007-03-15 12:07:33 +0000
committerJohnny Willemsen <jwillemsen@remedy.nl>2007-03-15 12:07:33 +0000
commit08dcf0144f5fef8278ae4d2c06fac1c0630c4ea9 (patch)
treec284231dc28d1b97800536e48768411995f918d2
parent355374d93045e18919f1ea6b59a4b4e892cd02aa (diff)
downloadATCD-08dcf0144f5fef8278ae4d2c06fac1c0630c4ea9.tar.gz
Thu Mar 15 12:06:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
-rw-r--r--ACE/ChangeLog16
-rw-r--r--ACE/ace/Atomic_Op.cpp24
-rw-r--r--ACE/ace/Atomic_Op_Sparc.c76
3 files changed, 24 insertions, 92 deletions
diff --git a/ACE/ChangeLog b/ACE/ChangeLog
index 3efa0af8c20..df2fba9e9fb 100644
--- a/ACE/ChangeLog
+++ b/ACE/ChangeLog
@@ -1,4 +1,18 @@
-Thu Mar 15 12:20:31 UTC 2007 Chad Elliott <elliott_c@ociweb.com>
+Thu Mar 15 12:06:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
+
+ Reverted the change below, the test stats show that after adding
+ the functions the Atomic_Op test does fail when these optimized
+ functions are enabled.
+
+ Mon Mar 5 09:21:12 UTC 2007 Johnny Willemsen <jwillemsen@remedy.nl>
+ * ace/Atomic_Op_Sparc.c:
+ * ace/Atomic_Op.{h,cpp}:
+ Added optimized functions for SunCC on x86. Thanks to
+ Dennis Chernoivanov <cdi at tbricks dot com> for reporting this.
+ If you want to enable these, add atomic_ops_sparc=1 to your
+ platform_macros.GNU file
+
+Thu Mar 15 11:20:31 UTC 2007 Chad Elliott <elliott_c@ociweb.com>
* tests/Reactor_Notify_Test.cpp:
diff --git a/ACE/ace/Atomic_Op.cpp b/ACE/ace/Atomic_Op.cpp
index c0a2b4a3718..634b89d4bfa 100644
--- a/ACE/ace/Atomic_Op.cpp
+++ b/ACE/ace/Atomic_Op.cpp
@@ -34,8 +34,7 @@ single_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#elif defined(__GNUC__) && defined(PPC)
@@ -58,8 +57,7 @@ single_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#elif defined(__GNUC__) && defined(PPC)
@@ -81,8 +79,7 @@ single_cpu_exchange (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(PPC)
@@ -104,8 +101,7 @@ single_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(PPC)
@@ -147,8 +143,7 @@ multi_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), 1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -165,8 +160,7 @@ multi_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_add_long (
reinterpret_cast<volatile unsigned long*> (value), -1);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -183,8 +177,7 @@ multi_cpu_exchange (volatile long *value, long rhs)
// The XCHG instruction automatically follows LOCK semantics
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_swap_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#else /* __GNUC__ && ACE_HAS_PENTIUM */
@@ -201,8 +194,7 @@ multi_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64)))
+#elif defined (sun)
return ace_atomic_swap_add_long (
reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
diff --git a/ACE/ace/Atomic_Op_Sparc.c b/ACE/ace/Atomic_Op_Sparc.c
index 7118610872f..df9565fc653 100644
--- a/ACE/ace/Atomic_Op_Sparc.c
+++ b/ACE/ace/Atomic_Op_Sparc.c
@@ -9,81 +9,7 @@
#if defined (ACE_INCLUDE_ATOMIC_OP_SPARC)
-#if defined(__i386) && defined(__SUNPRO_C)
-static void
-__sunpro_asm_code() {
- __asm("\n\
- .globl ace_atomic_add_long \n\
- .type ace_atomic_add_long,@function \n\
- .align 4 \n\
-ace_atomic_add_long: \n\
- movl 0x00000004(%esp), %edx \n\
- movl 0x00000008(%esp), %eax \n\
- lock; xadd %eax, (%edx) \n\
- addl %eax, 0x00000008(%esp) \n\
- ret \n\
- ");
-
- __asm("\n\
- .globl ace_atomic_swap_long \n\
- .type ace_atomic_swap_long,@function \n\
- .align 4 \n\
-ace_atomic_swap_long: \n\
- movl 0x00000004(%esp), %edx \n\
- movl 0x00000008(%esp), %eax \n\
- xchg %eax, (%edx) \n\
- ret \n\
- ");
-
- __asm("\n\
- .globl ace_atomic_swap_add_long \n\
- .type ace_atomic_swap_add_long,@function \n\
- .align 4 \n\
-ace_atomic_swap_add_long: \n\
- movl 0x00000004(%esp), %edx \n\
- movl 0x00000008(%esp), %eax \n\
- lock; xadd %eax, (%edx) \n\
- ret \n\
- ");
-}
-
-#elif defined(__x86_64) && defined(__SUNPRO_C)
-
-static void
-__sunpro_asm_code() {
- __asm("\n\
- .globl ace_atomic_add_long \n\
- .type ace_atomic_add_long,@function \n\
- .align 16 \n\
-ace_atomic_add_long: \n\
- movq %rsi, %rax \n\
- lock; xaddq %rax, (%rdi) \n\
- addq %rsi, %rax \n\
- ret \n\
- ");
-
- __asm("\n\
- .globl ace_atomic_swap_long \n\
- .type ace_atomic_swap_long,@function \n\
- .align 16 \n\
-ace_atomic_swap_long: \n\
- xchgq %rsi, (%rdi) \n\
- movq %rsi, %rax \n\
- ret \n\
- ");
-
- __asm("\n\
- .globl ace_atomic_swap_add_long \n\
- .type ace_atomic_swap_add_long,@function \n\
- .align 16 \n\
-ace_atomic_swap_add_long: \n\
- lock; xaddq %rsi, (%rdi) \n\
- movq %rsi, %rax \n\
- ret \n\
- ");
-}
-
-# elif defined (__sparcv9)
+#if defined (__sparcv9)
unsigned long
ace_atomic_add_long (volatile unsigned long *dest, long rhs)