summaryrefslogtreecommitdiff
path: root/performance-tests
diff options
context:
space:
mode:
authornanbor <nanbor@ae88bc3d-4319-0410-8dbf-d08b4c9d3795>1999-04-19 22:31:51 +0000
committernanbor <nanbor@ae88bc3d-4319-0410-8dbf-d08b4c9d3795>1999-04-19 22:31:51 +0000
commit651321242d560ad9ebb65ee2a38cdb9cf25428e9 (patch)
tree0d6148ef6545d5036feeaef6a0b587f571aad4b1 /performance-tests
parent08e65834fd95a8aad64a74de5e4034c882e1b888 (diff)
downloadATCD-651321242d560ad9ebb65ee2a38cdb9cf25428e9.tar.gz
*** empty log message ***
Diffstat (limited to 'performance-tests')
-rwxr-xr-xperformance-tests/Synch-Benchmarks/run_tests.pl71
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t1.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t16.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t2.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t32.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t4.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t64.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/perf_t8.conf32
-rw-r--r--performance-tests/Synch-Benchmarks/svcconf/svc.conf36
9 files changed, 307 insertions, 24 deletions
diff --git a/performance-tests/Synch-Benchmarks/run_tests.pl b/performance-tests/Synch-Benchmarks/run_tests.pl
index 21d28cc281a..1ef43e69298 100755
--- a/performance-tests/Synch-Benchmarks/run_tests.pl
+++ b/performance-tests/Synch-Benchmarks/run_tests.pl
@@ -17,6 +17,9 @@ $result_dir = "results";
$svcconf_dir = "svcconf";
$conf_ext = ".conf";
+@Null_List = ();
+
+# This is called "baseline"
@Baseline_List = ("base_acquire",
"base_tryacquire",
"base_acquire_read",
@@ -24,23 +27,54 @@ $conf_ext = ".conf";
"base_acquire_write",
"base_tryacquire_write");
+# this is called "perf_thrno"
+@Perf_Thr_Number_List = ("perf_t1",
+ "perf_t2",
+ "perf_t4",
+ "perf_t8",
+ "perf_t16",
+ "perf_t32",
+ "perf_t64");
+
+@Target = @Null_List;
+
while ( $#ARGV >= 0 && $ARGV[0] =~ /^-/ )
{
- if ($ARGV[0] eq '-d') # Run debug
+ if ($ARGV[0] eq '-d') # Run debug mode
{
- $debug = 1;
$name = "debug";
}
- elsif ($ARGV[0] eq '-D') # Subdir name to put the result
+ elsif ($ARGV[0] eq '-p') # Debug perl script
+ {
+ $debug = 1;
+ print "debug perl scirpt\n";
+ }
+ elsif ($ARGV[0] eq '-D') # Subdir name to put the result
{
shift;
$result_dir = $ARGV[0];
}
- elsif ($ARGV[0] eq '-S') # Subdir to svc.conf files.
+ elsif ($ARGV[0] eq '-S') # Subdir to svc.conf files.
{
shift;
$svcconf_dir = $ARGV[0];
}
+ elsif ($ARGV[0] eq '-N') # Specify test name.
+ {
+ shift;
+ if ($ARGV[0] eq "baseline")
+ {
+ @Target = @Baseline_List;
+ }
+ elsif ($ARGV[0] eq "perf_thrno")
+ {
+ @Target = @Perf_Thr_Number_List;
+ }
+ else
+ {
+ die "Unknown test \"$ARGV[0]\"\n";
+ }
+ }
else
{
warn "$0: unknown option $ARGV[0]\n";
@@ -49,7 +83,8 @@ while ( $#ARGV >= 0 && $ARGV[0] =~ /^-/ )
shift;
}
-@Target = @Baseline_List;
+die "You must specify a test to run\n" if (scalar (@Target) == 0);
+
if ($Win32 != 0)
{
$execname = "$name\\$EXE";
@@ -65,12 +100,32 @@ else
for ($Cntr = 0; $Cntr < scalar (@Target); $Cntr++)
{
- open STDOUT, "> $result_dir$DIR_SEPARATOR$Target[$Cntr].$name";
- open STDERR, ">&STDOUT";
+ $Redirect_Output = "$result_dir$DIR_SEPARATOR$Target[$Cntr].$name";
+ if ($debug != 0) # Only redirect output in actual run
+ {
+ print "Redirectling output to $Redirect_Output\n";
+ }
+ else
+ {
+ open STDOUT, "> $Redirect_Output";
+ open STDERR, ">&STDOUT";
+ }
@args = ("$execname",
"-f",
"$svcconf_dir$DIR_SEPARATOR$Target[$Cntr]$conf_ext");
- system (@args);
+ if ($debug != 0)
+ {
+ print "Debug mode: Executing -- ";
+ for ($args_c = 0; $args_c < scalar (@args); $args_c ++)
+ {
+ print "$args[$args_c] ";
+ }
+ print "\n";
+ }
+ else
+ {
+ system (@args);
+ }
}
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t1.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t1.conf
new file mode 100644
index 00000000000..1c4c2ad2f01
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t1.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 1"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t16.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t16.conf
new file mode 100644
index 00000000000..f50ae6f4544
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t16.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 16"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t2.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t2.conf
new file mode 100644
index 00000000000..cc08f483a73
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t2.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 2"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t32.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t32.conf
new file mode 100644
index 00000000000..c12d3dc752b
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t32.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 32"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t4.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t4.conf
new file mode 100644
index 00000000000..2d6661d36e1
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t4.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 4"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t64.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t64.conf
new file mode 100644
index 00000000000..0bf358d0191
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t64.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 64"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/perf_t8.conf b/performance-tests/Synch-Benchmarks/svcconf/perf_t8.conf
new file mode 100644
index 00000000000..cbfe7062fa8
--- /dev/null
+++ b/performance-tests/Synch-Benchmarks/svcconf/perf_t8.conf
@@ -0,0 +1,32 @@
+# Dynamically configure all the tests
+
+dynamic Performance_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Performance_Test() "-s 60 -N -B -t 8"
+dynamic Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
+dynamic Adaptive_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+dynamic Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+dynamic Adaptive_Recursive_Lock_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+dynamic Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Sema_Test()
+dynamic Adaptive_Semaphore_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+dynamic RWRD_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWRD_Test()
+dynamic RWWR_Mutex_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_RWWR_Test()
+dynamic Token_Test
+ Service_Object *
+ Perf_Test/Perf_Test:_make_Token_Test()
diff --git a/performance-tests/Synch-Benchmarks/svcconf/svc.conf b/performance-tests/Synch-Benchmarks/svcconf/svc.conf
index 4d762bb4092..ec2d85fbcbf 100644
--- a/performance-tests/Synch-Benchmarks/svcconf/svc.conf
+++ b/performance-tests/Synch-Benchmarks/svcconf/svc.conf
@@ -1,25 +1,29 @@
# Dynamically configure all the tests
-dynamic Performance_Test Service_Object * Perf_Test/Perf_Test:_make_Performance_Test() "-s 3 -t 4"
+dynamic Performance_Test
+ Service_Object
+ *
+ Perf_Test/Perf_Test:_make_Performance_Test()
+ "-s 3 -t 4"
+dynamic Mutex_Test Service_Object
+ *
+ Perf_Test/Perf_Test:_make_Mutex_Test()
#dynamic Guard_Test Service_Object * Perf_Test/Perf_Test:_make_Guard_Test() "-g"
#dynamic SYSVSema_Test Service_Object * Perf_Test/Perf_Test:_make_SYSVSema_Test()
-dynamic Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_Mutex_Test()
-dynamic Adaptive_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
-dynamic Recursive_Lock_Test Service_Object * Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
-dynamic Adaptive_Recursive_Lock_Test Service_Object * Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
-dynamic Semaphore_Test Service_Object * Perf_Test/Perf_Test:_make_Sema_Test()
-dynamic Adaptive_Semaphore_Test Service_Object * Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
-dynamic RWRD_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_RWRD_Test()
-dynamic RWWR_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_RWWR_Test()
-dynamic Token_Test Service_Object * Perf_Test/Perf_Test:_make_Token_Test()
+#dynamic Adaptive_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_Adaptive_Mutex_Test()
+#dynamic Recursive_Lock_Test Service_Object * Perf_Test/Perf_Test:_make_Recursive_Lock_Test()
+#dynamic Adaptive_Recursive_Lock_Test Service_Object *
+# Perf_Test/Perf_Test:_make_Adaptive_Recursive_Lock_Test()
+#dynamic Semaphore_Test Service_Object * Perf_Test/Perf_Test:_make_Sema_Test()
+#dynamic Adaptive_Semaphore_Test Service_Object * Perf_Test/Perf_Test:_make_Adaptive_Sema_Test()
+#dynamic RWRD_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_RWRD_Test()
+#dynamic RWWR_Mutex_Test Service_Object * Perf_Test/Perf_Test:_make_RWWR_Test()
+#dynamic Token_Test Service_Object * Perf_Test/Perf_Test:_make_Token_Test()
#dynamic SYSVSema_Test Service_Object * Perf_Test/Perf_Test:_make_SYSVSema_Test()
#dynamic Context_Test Service_Object * Perf_Test/Perf_Test:_make_Context_Test()
# dynamic Memory_Test Service_Object * Perf_Test/Perf_Test:_make_Memory_Test()
-# dynamic Pipe_Thr_Test Service_Object * Perf_Test/Perf_Test:_make_Pipe_Thr_Test()
+#dynamic Pipe_Thr_Test Service_Object * Perf_Test/Perf_Test:_make_Pipe_Thr_Test()
# dynamic Pipe_Proc_Test Service_Object * Perf_Test/Perf_Test:_make_Pipe_Proc_Test()
# The following two tests don't work correctly yet...
-# dynamic Condition_Broadcast_Test Service_Object * Perf_Test/Perf_Test:_make_Cond_Brdcast_Test()
-# dynamic Condition_Signal_Test Service_Object * Perf_Test/Perf_Test:_make_Cond_Signal_Test()
-
-# ACE_Condition_Thread_Mutex
-# ACE_File_Lock
+#dynamic Condition_Broadcast_Test Service_Object * Perf_Test/Perf_Test:_make_Cond_Brdcast_Test()
+#dynamic Condition_Signal_Test Service_Object * Perf_Test/Perf_Test:_make_Cond_Signal_Test()